Example #1
0
    async def _execute_sql_scripts(self, reindex: bool) -> None:
        """Execute SQL included with project"""
        sql_path = join(self._config.package_path, 'sql')
        if not exists(sql_path):
            return
        if any(
                map(lambda p: p not in ('on_reindex', 'on_restart'),
                    listdir(sql_path))):
            raise ConfigurationError(
                f'SQL scripts must be placed either to `{self._config.package}/sql/on_restart` or to `{self._config.package}/sql/on_reindex` directory'
            )
        if not isinstance(self._config.database, PostgresDatabaseConfig):
            self._logger.warning(
                'Execution of user SQL scripts is supported on PostgreSQL only, skipping'
            )
            return

        sql_path = join(sql_path, 'on_reindex' if reindex else 'on_restart')
        if not exists(sql_path):
            return
        self._logger.info('Executing SQL scripts from `%s`', sql_path)
        for filename in sorted(listdir(sql_path)):
            if not filename.endswith('.sql'):
                continue

            with open(join(sql_path, filename)) as file:
                sql = file.read()

            self._logger.info('Executing `%s`', filename)
            await get_connection(None).execute_script(sql)
Example #2
0
def _predict(exp_cfg: dict, collect_dir: str) -> None:
    """Download sample image and make predictions on it using the model bundle.
    """
    key = exp_cfg['key']
    console_heading(f'Testing model bundle for {key}...')

    model_bundle_uri = join(collect_dir, 'bundle', 'model-bundle.zip')
    if not exists(model_bundle_uri):
        console_failure(f'Bundle does not exist: {model_bundle_uri}',
                        bold=True)
        exit(1)

    pred_dir = join(collect_dir, 'sample-predictions')
    sample_uri = exp_cfg['sample_img']
    _, sample_ext = splitext(basename(sample_uri))
    sample_final_uri = join(pred_dir, f'sample-img-{key}{sample_ext}')
    if not exists(sample_final_uri):
        # downloads to <pred_dir>/[s3|http]/.../<sample> and returns that path
        sample_downloaded_uri = download_or_copy(sample_uri, pred_dir)
        # ... also copies <sample> to <pred_dir>/<sample>
        sample_copied_uri = join(pred_dir, basename(sample_uri))
        # delete  <pred_dir>/[s3|http]/
        dl_dir = normpath(relpath(sample_downloaded_uri,
                                  pred_dir)).split(os.sep)[0]
        shutil.rmtree(join(pred_dir, dl_dir))
        # rename <sample>
        if sample_copied_uri != sample_final_uri:
            os.rename(sample_copied_uri, sample_final_uri)

    pred_ext = exp_cfg['pred_ext']
    out_uri = join(pred_dir, f'sample-pred-{key}{pred_ext}')
    cmd = [
        'rastervision', 'predict', model_bundle_uri, sample_final_uri, out_uri
    ]
    run_command(cmd)
    def processThumbnail(self, source, destination, width=None, height=None):
        '''
        @see: IThumbnailProcessor.processThumbnail
        '''
        assert isinstance(source, str), 'Invalid source path %s' % source
        assert isinstance(destination, str), 'Invalid destination path %s' % destination

        params = dict(gm=abspath(self.gm_path), source=source, destination=destination)
        if width and height:
            assert isinstance(width, int), 'Invalid width %s' % width
            assert isinstance(height, int), 'Invalid height %s' % height

            params.update(width=width, height=height)
            command = self.command_resize % params
        else: command = self.command_transform % params

        destDir = dirname(destination)
        if not exists(destDir): makedirs(destDir)
        try:
            p = Popen(shlex.split(command), stdin=PIPE, stdout=PIPE, stderr=PIPE)
            error = p.wait() != 0
        except Exception as e:
            log.exception('Problems while executing command:\n%s \n%s' % (command, e))
            error = True

        if error:
            if exists(destination): os.remove(destination)
Example #4
0
def fs_render(fs_metadata, json=None, **kwargs):
    """render node based on json option passed renders to file, stdout or deletes json at root

    Parameters
    ----------
    fs_metadata: dict
      Metadata json to be rendered
    json: str ('file', 'display', 'delete')
      Render to file, stdout or delete json
    """

    metadata_file = metadata_locator(fs_metadata, **kwargs)

    if json == 'file':
        # create metadata_root directory if it doesn't exist
        metadata_dir = dirname(metadata_file)
        if not exists(metadata_dir):
            makedirs(metadata_dir)
        # write directory metadata to json
        with open(metadata_file, 'w') as f:
            js.dump(fs_metadata, f)

    # else if json flag set to delete, remove .dir.json of current directory
    elif json == 'delete' and exists(metadata_file):
        remove(metadata_file)

    # else dump json to stdout
    elif json == 'display':
        safe_print(js.dumps(fs_metadata) + '\n')
    def processThumbnail(self, source, destination, width=None, height=None):
        '''
        @see: IThumbnailProcessor.processThumbnail
        '''
        assert isinstance(source, str), 'Invalid source path %s' % source
        assert isinstance(destination,
                          str), 'Invalid destination path %s' % destination

        params = dict(avconv=abspath(self.avconv_path),
                      source=source,
                      destination=destination)
        if width and height:
            assert isinstance(width, int), 'Invalid width %s' % width
            assert isinstance(height, int), 'Invalid height %s' % height

            params.update(width=width, height=height)
            command = self.command_resize % params
        else:
            command = self.command_transform % params

        destDir = dirname(destination)
        if not exists(destDir): makedirs(destDir)
        try:
            p = Popen(command)
            error = p.wait() != 0
        except:
            log.exception('Problems while executing command:\n % s', command)
            error = True

        if error:
            if exists(destination): os.remove(destination)
            raise IOError('Cannot process thumbnail from \'%s\' to \'%s\'' %
                          (source, destination))
Example #6
0
def cached_file_reader(dir, flat, flon):
    # https://wiki.openstreetmap.org/wiki/SRTM
    # The official 3-arc-second and 1-arc-second data for versions 2.1 and 3.0 are divided into 1°×1° data tiles.
    # The tiles are distributed as zip files containing HGT files labeled with the coordinate of the southwest cell.
    # For example, the file N20E100.hgt contains data from 20°N to 21°N and from 100°E to 101°E inclusive.
    root = '%s%02d%s%03d' % ('S' if flat < 0 else 'N', abs(flat),
                             'W' if flon < 0 else 'E', abs(flon))
    hgt_file = root + '.hgt'
    hgt_path = join(dir, hgt_file)
    zip_path = join(dir, root + EXTN)
    if exists(hgt_path):
        log.debug(f'Reading {hgt_path}')
        with open(hgt_path, 'rb') as input:
            data = input.read()
    elif exists(zip_path):
        log.debug(f'Reading {zip_path}')
        with open(zip_path, 'rb') as input:
            zip = ZipFile(input)
            log.debug(f'Found {zip.filelist}')
            data = zip.open(hgt_file).read()
    else:
        # i tried automating download, but couldn't get ouath2 to work
        log.warning(f'Download {BASE_URL + root + EXTN}')
        raise Exception(f'Missing {hgt_file}')
    return np.flip(
        np.frombuffer(data, np.dtype('>i2'), SAMPLES * SAMPLES).reshape(
            (SAMPLES, SAMPLES)), 0)
    def processThumbnail(self, source, destination, width=None, height=None):
        """
        @see: IThumbnailProcessor.processThumbnail
        """
        assert isinstance(source, str), "Invalid source path %s" % source
        assert isinstance(destination, str), "Invalid destination path %s" % destination

        params = dict(avconv=abspath(self.avconv_path), source=source, destination=destination)
        if width and height:
            assert isinstance(width, int), "Invalid width %s" % width
            assert isinstance(height, int), "Invalid height %s" % height

            params.update(width=width, height=height)
            command = self.command_resize % params
        else:
            command = self.command_transform % params

        destDir = dirname(destination)
        if not exists(destDir):
            makedirs(destDir)
        try:
            p = Popen(command)
            error = p.wait() != 0
        except:
            log.exception("Problems while executing command:\n % s", command)
            error = True

        if error:
            if exists(destination):
                os.remove(destination)
            raise IOError("Cannot process thumbnail from '%s' to '%s'" % (source, destination))
def make_parameters(p_out_dir, p_parameter_path, p_Ed_type, p_Cn, p_Ce, p_alpha, p_norm_type_list):
    print p_Ed_type
    print p_Ce
    print p_Cn
    print p_out_dir
    print p_parameter_path

    print "start of parameters"
    if not os.path.exists(p_out_dir):
        os.mkdir(p_out_dir)
    local_root = os.path.join(p_out_dir, p_parameter_path)

    if not os.path.exists(local_root):
        os.mkdir(local_root)

    for ed_alg in p_Ed_type:
        ed_path = os.path.join(local_root, ed_alg)
        if not exists(ed_path):
            os.mkdir(ed_path)
        for cn in p_Cn:
            for ce in p_Ce:
                for alpha in p_alpha:
                    #                     param_str = "Cn:%s Ce:%s a:%s", cn, ce, alpha
                    param_str = "Cn=" + str(cn) + ", Ce=" + str(ce) + ", a=" + str(alpha)
                    param_path = os.path.join(ed_path, param_str)
                    #                     print param_path
                    if not exists(param_path):
                        os.mkdir(param_path)

                    for norm_type in p_norm_type_list:
                        norm_path = os.path.join(param_path, norm_type)
                        if not exists(norm_path):
                            os.mkdir(norm_path)
Example #9
0
def initDB(drop=False):

    from elixir import metadata, setup_all, drop_all, create_all
    from genericpath import exists
    from os import makedirs
    from posixpath import expanduser

    DB_NAME = "stockflow.sqlite"
    log = logging.getLogger(__name__)
    log.info("Inicializando o Core")
    dbpath = expanduser("~/.stockflow/")
    if not exists(dbpath):
        try:
            makedirs(dbpath)
        except OSError:
            log.warning("Nao foi possivel criar os diretorios, \
                usando o home do usuário.")
            dbpath = expanduser("~")

    metadata.bind = "".join(("sqlite:///", dbpath, DB_NAME))
    metadata.bind.echo = False

    setup_all()
    if(drop):
        drop_all()


    if not exists("".join((dbpath, DB_NAME))) or drop:
        log.debug("Criando tabelas...")
        create_all()
Example #10
0
def fs_render(fs_metadata, json=None, **kwargs):
    """render node based on json option passed renders to file, stdout or deletes json at root

    Parameters
    ----------
    fs_metadata: dict
      Metadata json to be rendered
    json: str ('file', 'display', 'delete')
      Render to file, stdout or delete json
    """

    metadata_file = metadata_locator(fs_metadata, **kwargs)

    if json == 'file':
        # create metadata_root directory if it doesn't exist
        metadata_dir = dirname(metadata_file)
        if not exists(metadata_dir):
            makedirs(metadata_dir)
        # write directory metadata to json
        with open(metadata_file, 'w') as f:
            js.dump(fs_metadata, f)

    # else if json flag set to delete, remove .dir.json of current directory
    elif json == 'delete' and exists(metadata_file):
        remove(metadata_file)

    # else dump json to stdout
    elif json == 'display':
        safe_print(js.dumps(fs_metadata) + '\n')
    def processThumbnail(self, source, destination, width=None, height=None):
        '''
        @see: IThumbnailProcessor.processThumbnail
        '''
        assert isinstance(source, str), 'Invalid source path %s' % source
        assert isinstance(destination, str), 'Invalid destination path %s' % destination

        params = dict(ffmpeg=abspath(self.ffmpeg_path), source=source, destination=destination)
        if width and height:
            assert isinstance(width, int), 'Invalid width %s' % width
            assert isinstance(height, int), 'Invalid height %s' % height

            params.update(width=width, height=height)
            command = self.command_resize % params
        else: command = self.command_transform % params

        destDir = dirname(destination)
        if not exists(destDir): makedirs(destDir)
        try:
            p = Popen(shlex.split(command), stdin=PIPE, stdout=PIPE, stderr=PIPE)
            error = p.wait() != 0
        except Exception as e:
            log.exception('Problems while executing command:\n%s \n%s' % (command, e))
            error = True

        if error:
            if exists(destination): os.remove(destination)
Example #12
0
    def drop_jobs_logs(self, taskid):
        jobdir = join(settings.CRAWLER_DIRS['jobdir'], taskid)
        logfile = join(settings.CRAWLER_DIRS['logdir'], '%s.log' % taskid)
        if exists(jobdir):
            rmtree(jobdir)

        if exists(logfile):
            unlink(logfile)
Example #13
0
def init_logging(name='',
                 basic=True,
                 to_stdout=False,
                 to_file=True,
                 log_file=None,
                 log_dir='../logs'):

    if log_file is None:
        log_file = name + '.log' if name else 'train.log'

    if basic:
        if to_file:
            if not exists(log_dir):
                makedirs(log_dir)
            file_path = join(log_dir, log_file)
            logging.basicConfig(
                filename=file_path,
                format='%(asctime)s - %(name)s - %(levelname)s | %(message)s',
                datefmt='%Y-%m-%d %H:%M:%S',
                level=logging.INFO)
        else:
            logging.basicConfig(
                format='%(asctime)s - %(name)s - %(levelname)s | %(message)s',
                datefmt='%Y-%m-%d %H:%M:%S',
                level=logging.INFO)
        logger = logging.getLogger()

    else:
        logger = logging.getLogger(name)
        logger.setLevel(logging.DEBUG)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s | %(message)s',
            datefmt='%Y-%m-%d %H:%M:%S',
        )
        if to_file:
            # create path if necessary
            if not exists(log_dir):
                makedirs(log_dir)
            file_path = join(log_dir, log_file)
            fh = logging.FileHandler(file_path)
            fh.setLevel(logging.DEBUG)
            fh.setFormatter(formatter)
            logger.addHandler(fh)
        if to_stdout:
            ch = logging.StreamHandler(sys.stdout)
            ch.setLevel(logging.DEBUG)
            ch.setFormatter(formatter)
            logger.addHandler(ch)

    logger.info('')
    logger.info('#' * 50)
    logger.info('----- %s -----' % name.upper())
    logger.info('----- start -----')
    logger.info('python: ' + sys.version.replace('\n', ' '))
    logger.info('pandas: ' + pd.__version__)
    logger.info('gensim: ' + gensim.__version__)

    return logger
def get_system_path(cnf,
                    interpreter_or_name,
                    name=None,
                    extra_warning='',
                    suppress_warn=False,
                    is_critical=False):
    """ "name" can be:
        - key in system_into.yaml
        - relative path in the project (e.g. external/...)
        - anything in system path
    """
    interpreter = interpreter_or_name
    if name is None:
        name = interpreter_or_name
        interpreter = None

    if interpreter:
        if interpreter == 'java':
            return get_java_tool_cmdline(cnf,
                                         name,
                                         extra_warning,
                                         suppress_warn,
                                         is_critical=is_critical)

        return get_script_cmdline(cnf,
                                  interpreter,
                                  name,
                                  extra_warning=extra_warning,
                                  suppress_warn=suppress_warn,
                                  is_critical=is_critical)

    # IN SYSTEM CONFIG?
    if cnf and (cnf.resources is not None and name.lower() in cnf.resources
                and 'path' in cnf.resources[name.lower()]):

        tool_path = cnf.resources[name.lower()]['path']
        tool_path = adjust_system_path(tool_path)
        return verify_obj_by_path(tool_path, name, is_critical=is_critical)

    # IN PROJECT ROOT DIR? IN EXTERNAL?
    for dirpath in [code_base_path]:
        tool_path = join(dirpath, name)
        if exists(tool_path):
            return verify_obj_by_path(tool_path, name, is_critical=is_critical)

    # IN PATH?
    tool_path = which(name)
    if tool_path and exists(tool_path):
        return verify_obj_by_path(tool_path, name, is_critical=is_critical)

    msg = (name + ' was not found. You may either specify path in the system '
           'config, or load into your PATH environment variable. ' +
           extra_warning)
    if not suppress_warn:
        err(msg)
    if is_critical:
        critical(msg)
    return None
Example #15
0
def _load(train_file, test_file, name):
    if not exists(train_file) or \
            (test_file is not None and not exists(test_file)):
        raise IOError("Dataset missing! %s" % name)

    train_dataset = np.genfromtxt(train_file, delimiter=',', dtype=np.int32)
    test_dataset = np.genfromtxt(test_file, delimiter=',', dtype=np.int32)

    return train_dataset, test_dataset
Example #16
0
    def maybe_makedir(*args):

        p = join(base_dir, *args)

        if exists(p) and not isdir(p):
            raise IOError("File '{}' exists but is not a directory ".format(p))

        if not exists(p):
            makedirs(p)
Example #17
0
 def __check_empty(self):
     data_dir = self.__config.dir(DATADIR, create_dir=False)
     if exists(data_dir):
         raise Exception('The data directory already exists (%s)' % data_dir)
     config_file = self.__config.file(FILE, create_dir=False)
     if exists(config_file):
         raise Exception('The configuration file already exists (%s)' % config_file)
     log_dir = self.__config.dir(LOGDIR, create_dir=False)
     if exists(log_dir):
         raise Exception('The log directory already exists (%s)' % log_dir)
Example #18
0
 def test_initdb(self):
     if exists(TEST_TEMP_DBFILE_PATH):
         os.remove(TEST_TEMP_DBFILE_PATH)
     self.assertIs(False, exists(TEST_TEMP_DBFILE_PATH))
     conn, curr = BukuDb.initdb()
     self.assertIsInstance(conn, sqlite3.Connection)
     self.assertIsInstance(curr, sqlite3.Cursor)
     self.assertIs(True, exists(TEST_TEMP_DBFILE_PATH))
     curr.close()
     conn.close()
Example #19
0
 def test_initdb(self):
     if exists(TEST_TEMP_DBFILE_PATH):
         os.remove(TEST_TEMP_DBFILE_PATH)
     self.assertIs(False, exists(TEST_TEMP_DBFILE_PATH))
     conn, curr = BukuDb.initdb()
     self.assertIsInstance(conn, sqlite3.Connection)
     self.assertIsInstance(curr, sqlite3.Cursor)
     self.assertIs(True, exists(TEST_TEMP_DBFILE_PATH))
     curr.close()
     conn.close()
Example #20
0
    def execute(self, context):

        skeleton = find_armature(context)
        action = skeleton.animation_data.action
        action.s3py.actor_name = self.actor_name
        action.s3py.source_name = self.source_name
        action.s3py.name = self.clip_name

        if not action.s3py.name:
            self.report({'ERROR_INVALID_INPUT'},
                        "Animation must have an name i.e. 'a_myAnimation'")
            return {'CANCELLED'}

        if not action.s3py.actor_name:
            self.report({'ERROR_INVALID_INPUT'},
                        "Animation must have an actor name i.e. 'x'")
            return {'CANCELLED'}

        if not action.s3py.source_name:
            self.report(
                {'ERROR_INVALID_INPUT'},
                "Animation must have a source name i.e. 'myAnimation.blend'")
            return {'CANCELLED'}

        # if saving over an existing, make a backup
        if exists(self.filepath):
            ext = os.path.splitext(self.filepath)[1]
            i = 1
            backup = str(self.filepath).replace(ext, '_backup_%i_' % i + ext)
            while exists(backup):
                i += 1
                backup = str(self.filepath).replace(ext,
                                                    '_backup_%i_' % i + ext)
            copyfile(self.filepath, backup)

        clip_resource = s3py.animation.ClipResource()
        # if not starting from scratch, find original clip to preserve events
        if exists(action.s3py.basis):
            with io.open(action.s3py.basis, 'rb') as clip_stream:
                clip_resource.read(clip_stream)
            clip_resource.clip.frame_duration = 1.0 / 30.0
        else:
            print('No basis clip found, creating new one from scratch.')

        # Remove IK chains unless user specifies to keep them
        if self.remove_ik and clip_resource.ik_info:
            clip_resource.ik_info.chains = []
        #rig_scale = 0.9 if self.clip_name[0] == 't' else 1.0
        #print('Animation scale detected: %s'%rig_scale)
        # Write animation data
        save_clip(clip_resource, skeleton, 1.0)
        with io.open(self.filepath, 'wb') as output_stream:
            clip_resource.write(output_stream)
        return {'FINISHED'}
Example #21
0
def synchronizeURIToDir(path, dirPath):
    '''
    Publishes the entire contents from the URI path to the provided directory path.

    @param path: string
        The path to a resource: a file system path, a ZIP path
    @param dirPath: string
        The directory path to synchronize with.
    '''
    assert isinstance(path, str) and path, 'Invalid content path %s' % path
    assert isinstance(dirPath, str), 'Invalid directory path value %s' % dirPath

    if not isdir(path):
        # not a directory, see if it's a entry in a zip file
        zipFilePath, inDirPath = getZipFilePath(path)
        zipFile = ZipFile(zipFilePath)
        if not inDirPath.endswith(ZIPSEP): inDirPath = inDirPath + ZIPSEP

        tmpDir = TemporaryDirectory()

        lenPath, zipTime = len(inDirPath), datetime.fromtimestamp(stat(zipFilePath).st_mtime)
        for zipInfo in zipFile.filelist:
            assert isinstance(zipInfo, ZipInfo), 'Invalid zip info %s' % zipInfo
            if zipInfo.filename.startswith(inDirPath):
                if zipInfo.filename[0] == '/': dest = zipInfo.filename[1:]
                else: dest = zipInfo.filename

                dest = normpath(join(dirPath, dest[lenPath:]))

                if exists(dest) and zipTime <= datetime.fromtimestamp(stat(dest).st_mtime): continue
                destDir = dirname(dest)
                if not exists(destDir): makedirs(destDir)

                zipFile.extract(zipInfo.filename, tmpDir.name)
                move(join(tmpDir.name, normOSPath(zipInfo.filename)), dest)
                if zipInfo.filename.endswith('.exe'): os.chmod(dest, stat(dest).st_mode | S_IEXEC)
        return

    path = normpath(path)
    assert os.access(path, os.R_OK), 'Unable to read the directory path %s' % path
    lenPath = len(path) + 1
    for root, _dirs, files in os.walk(path):
        for file in files:
            src, dest = join(root, file), join(dirPath, root[lenPath:], file)

            if exists(dest) and \
            datetime.fromtimestamp(stat(src).st_mtime) <= datetime.fromtimestamp(stat(dest).st_mtime): continue

            destDir = dirname(dest)
            if not exists(destDir): makedirs(destDir)
            copy(src, dest)
            if file.endswith('.exe'): os.chmod(dest, stat(dest).st_mode | S_IEXEC)
Example #22
0
def pandoc_html(path):
    if exists('/usr/bin/pandoc'):
        pandoc = '/usr/bin/pandoc'
    elif exists('/usr/local/bin/pandoc'):
        pandoc = '/usr/local/bin/pandoc'
    else:
        pandoc = '/bin/echo'
    if exists(path):
        script = [pandoc, '-t', 'html', path]
        output = Popen(script, stdout=PIPE).stdout
        return output.read().decode(encoding='UTF-8')
    else:
        return ("Path NOT found,  %s" % path)
Example #23
0
    def execute(self, context):

        skeleton =  find_armature(context)
        action = skeleton.animation_data.action
        action.s3py.actor_name = self.actor_name
        action.s3py.source_name = self.source_name
        action.s3py.name = self.clip_name

        if not action.s3py.name:
            self.report({'ERROR_INVALID_INPUT'}, "Animation must have an name i.e. 'a_myAnimation'")
            return{'CANCELLED'}


        if not action.s3py.actor_name:
            self.report({'ERROR_INVALID_INPUT'}, "Animation must have an actor name i.e. 'x'")
            return{'CANCELLED'}

        if not action.s3py.source_name:
            self.report({'ERROR_INVALID_INPUT'}, "Animation must have a source name i.e. 'myAnimation.blend'")
            return{'CANCELLED'}

        # if saving over an existing, make a backup
        if exists(self.filepath):
            ext = os.path.splitext(self.filepath)[1]
            i = 1
            backup =  str(self.filepath).replace(ext,'_backup_%i_'%i +ext)
            while exists(backup):
                i+=1
                backup =  str(self.filepath).replace(ext,'_backup_%i_'%i +ext)
            copyfile(self.filepath,backup)


        clip_resource = s3py.animation.ClipResource()
        # if not starting from scratch, find original clip to preserve events
        if exists(action.s3py.basis):
            with io.open(action.s3py.basis, 'rb') as clip_stream:
                clip_resource.read(clip_stream)
            clip_resource.clip.frame_duration = 1.0/30.0
        else:
            print('No basis clip found, creating new one from scratch.')

        # Remove IK chains unless user specifies to keep them
        if self.remove_ik and clip_resource.ik_info:
            clip_resource.ik_info.chains = []
        #rig_scale = 0.9 if self.clip_name[0] == 't' else 1.0
        #print('Animation scale detected: %s'%rig_scale)
        # Write animation data
        save_clip(clip_resource,skeleton,1.0)
        with io.open(self.filepath, 'wb') as output_stream:
            clip_resource.write(output_stream)
        return {'FINISHED'}
Example #24
0
 def quickCompareFilesInFolder(self, refFolder, desFolder, fileNames):
     rs = True
     for f in fileNames:
         refFile = refFolder + "/" + f
         desFile = desFolder + "/" + f
         if not exists(desFile):
             self.logger.error("Missing file %s" % desFile)
             return False
         elif not exists(refFile):
             self.logger.error("Missing file %s" % refFile)
             return False
         else:
             rs = self.quickCompareFile(refFile, desFile) and rs
     return rs
Example #25
0
def CreateDB():
    if (ini.DB_TYPE == "Sqlite3"):
        if (not exists(ini.DB_NAME_SQLITE3)):

            fileCont = readBackupFile()
    
            conn = sqlite3.connect(ini.DB_NAME_SQLITE3)
            cu = conn.cursor()
    
            try:
                for sql in fileCont:
                    print sql
                    if sql:
                        cu.execute(sql)
    
                conn.commit()
                
                #Migreren naar latere versie indien nodig
                DoMigration()
                
            except sqlite3.OperationalError:
                print sql
                conn.rollback()
                
        else:
            #bestaat, versie nagaan en indien nodig updaten
            DoMigration()
Example #26
0
    def sync_paths(self, paths):
        change = False

        for local_file, remote_file in self._get_changes(paths):
            output_dir = dirname(remote_file)
            if not exists(output_dir):
                os.makedirs(output_dir)

            if exists(remote_file):
                self.merge_file(local_file, remote_file)
            else:
                copy_file(local_file, remote_file)

            change = True

        return change
Example #27
0
def html_file(name):
    if exists(_HTML):
        pass
    else:
        makedirs(_HTML)

    return join(_HTML, name)
Example #28
0
def retrieve_docs():
    if not exists(PKGPATH):
        os.makedirs(PKGPATH)
    filepath, headers = urlretrieve(DOCURL, "html-2.2.1.zip")
    with ZipFile(filepath, 'r') as zf:
        zf.extractall(join(PKGPATH, 'Documents'))
    os.unlink(filepath)
 def setUp(self):
     """Initialize values for this testcase class."""
     self.hass = get_test_home_assistant()
     # Delete any previously stored data
     data_file = self.hass.config.path("{}.pickle".format('feedreader'))
     if exists(data_file):
         remove(data_file)
Example #30
0
    def __init__(self):
        argparse = Arguments()
        args, self.__config = argparse.parse_args()

        # there's a pile of ugliness here so that we delay error handling until we have logs.
        # see also comments in parse_args.
        self.__error = self.__config and not exists(
            self.__config)  # see logic in parse_args
        full_config = self.__config and not self.__error

        # Special case of initializing repository, set config directory
        configdir = None
        if args.command and args.command in (INIT_REPOSITORY):
            if not args.args:
                configdir = getcwd()
            elif len(args.args) == 1:
                configdir = args.args[0]
            else:
                raise Exception(
                    'Command %s takes at most one argument - the directory to initialise'
                    % INIT_REPOSITORY)
            configdir = canonify(configdir)

        # this is a bit ugly, but we need to use the base methods to construct the log and db
        # note that log is not used in base!
        super().__init__(None, None, args, None,
                         dirname(self.__config) if full_config else configdir)
        self.log, self.log_path, self.__log_stream = \
            init_log(self.dir(LOGDIR) if full_config else None, self.arg(LOGSIZE), self.arg(LOGCOUNT),
                     self.arg(LOGVERBOSITY), self.arg(VERBOSITY), self.arg(COMMAND) or 'rover',
                     self.arg(LOGUNIQUE), self.arg(LOGUNIQUEEXPIRE))
        if full_config:  # if initializing, we have no database...
            self.db = init_db(timeseries_db(self), self.log)
Example #31
0
def retrieve_docs():
    if not exists(PKGPATH):
        os.makedirs(PKGPATH)
    filepath, headers = urlretrieve(DOCURL, "html-2.2.1.zip")
    with ZipFile(filepath, 'r') as zf:
        zf.extractall(join(PKGPATH, 'Documents'))
    os.unlink(filepath)
    def draw_charts(self):
        '''
        Draw charts
        apply filter of displayed list if any
        if not draw for all non-zero elements

        '''
        if not self.result_elements:
            self.read_concentrations()

        self.messenger('Data loaded sucessfully, starting drawing the charts')

        gdir = self.work_dir + SEPAR + 'charts' + SEPAR
        if not exists(gdir):
            mkdir(gdir)

        if len(self.displayed_mesh_list) > 0:
            filtered_dict = grafLinear.list_filter(
                self.result_elements, self.displayed_mesh_list.keys())
        else:
            filtered_dict = self.result_elements

        for xkey, xval in filtered_dict.items():
            self.draw_routine(xkey, xval, gdir)

        self.messenger('all charts sucessfully created')
Example #33
0
def importarMuchoCine(path, newPath):
    data = []
    target = []
    target_names = ['neg', 'pos']
    filenames = []

    onlyfiles = [f for f in listdir(path) if isfile(join(path, f))]

    for file in onlyfiles:
        route = realpath(join(path, file))
        filename = splitext(file)[0]

        with open(route, 'r') as myfile:
            temp1 = myfile.read()
            temp2 = tidylib.tidy_document(temp1, {
                'input_xml': True,
                'output_xml': True
            })[0]

        root = ET.XML(temp2)

        rank = int(root.get('rank'))
        summary = xstr(root.find('summary').text)
        body = xstr(root.find('body').text)

        if rank <= 2 or rank >= 4:
            if rank <= 2:
                sentiment = 'neg'
                rankTarget = 0
            elif rank >= 4:
                sentiment = 'pos'
                rankTarget = 1
            else:
                sentiment = 'neu'
                rankTarget = 3

            content1 = (summary + ' ' + body)
            content2 = content1.encode('utf-8')

            data.append(content2)
            target.append(rankTarget)
            filenames.append(filename)

            newFilePath = join(newPath, sentiment)

            if not exists(newFilePath):
                makedirs(newFilePath)

            newFile = open(join(newFilePath, filename), 'w')
            newFile.write(content2)
            newFile.close()

    ret = {
        'data': data,
        'filenames': filenames,
        'target': target,
        'target_names': target_names
    }

    return ret
Example #34
0
    def __str__(self):
        try:
            user_paths = os.environ['PYTHONPATH'].split(os.pathsep)
        except KeyError:
            user_paths = []

        module_full_path = self.args[0]
        error = self.args[1]
        traceback_text = self.args[2]
        relative_path = module_full_path.replace('.', '/') + ".py"

        real_path = None
        for path in user_paths:
            full_path = join(path, relative_path)
            if exists(full_path):
                real_path = abspath(full_path)
                break

        if real_path:
            return "Import module [b]{0}[/b] failed (file://{1})." \
                   "\n[b]Error when import: {2}." \
                   "[/b]\n----------------------\n{3}".format(
                module_full_path, real_path,
                str(error),
                traceback_text
            )

        return "Could not find module {0}. PYTHONPATH: {1}".format(
            module_full_path, ':'.join(user_paths))
Example #35
0
    def do(self, theEnv, aPath, *args, **kargs):
        """
        function handler implementation
        """

        aPath = self.resolve(theEnv, 
                             self.semplify(theEnv, aPath, types.Lexeme, ('1', 'symbol or string')))
        
        aPath = os.path.abspath(aPath)
        
        if not exists(aPath):
            raise InvalidArgValueError("Function load was unable to open file %s"%aPath)
        
        oldcwd = os.getcwd()
        
        os.chdir(os.path.dirname(aPath))
        
        aFile = open(aPath, 'rU')
        aString = aFile.read()

        try:
            parsed = theEnv.network.getParser().parse(aString, extended=True)
        except Exception, e:
            print >> theEnv.RESOURCES['werror'], theEnv.network.getParser().ExceptionPPrint(e, aString)
            os.chdir(oldcwd)
            return types.Symbol('FALSE')
Example #36
0
def load(default):
    if not exists(PICKLE_FILE):
        return default
    pkl_file = open(PICKLE_FILE, 'rb')
    data = pickle.load(pkl_file)
    pkl_file.close()
    return data
Example #37
0
def stage_config_file(filename, input_dir, input_mount_point, home_mounted=False):
    """Stage config file inside container

    Args:
        filename (str): Path to config file
        input_dir (str): The input directory outside the container
        input_mount_point (str): The input directory inside the container
        home_mounted (bool): True if home directory is mounted inside container
    """
    fn = filename
    is_filename_inside_input_dir = input_dir and abspath(filename).startswith(abspath(input_dir))
    is_filename_inside_home_dir = home_mounted and abspath(filename).startswith(expanduser('~'))
    filename_exists = exists(filename)
    if is_filename_inside_input_dir:
        # Replace input dir outside container by input dir inside container
        fn = abspath(filename).replace(abspath(input_dir), input_mount_point)
    elif is_filename_inside_home_dir:
        # Singularity has home dir mounted, so valid filename should be available inside container
        # Make absolute because current working dir can be different inside image
        fn = abspath(filename)
    elif filename_exists:
        if input_dir is not None:
            # Copying filename inside input dir
            shutil.copy(filename, input_dir)
            fname = os.path.basename(filename)
            fn = os.path.join(input_mount_point, fname)
        else:
            raise Exception(f'Unable to copy {filename}, without a input_dir')
    else:
        # Assume filename exists inside container or model does not need a file to initialize
        pass
    return fn
Example #38
0
 def read_config(self):
     if exists(self.PICKLE_FILE):
         pkl_file = open(self.PICKLE_FILE, 'rb')
         data = pickle.load(pkl_file)
         pkl_file.close()
         self.track_text = data["track_text"]
         self.backlight_map = data["backlight_map"]
Example #39
0
def get_tour(codehydro):
    resFileName = "/tour_%s.kml" % codehydro
    resPath = DATA_DIR + resFileName
    if (exists(resPath)):
        return resFileName
    pm = get_coursdo_placemarks(codehydro, "Cours_Eau_default")
    pm += get_confluent_placemarks(codehydro)
    tron = get_troncons(codehydro)
    con = psycopg2.connect("dbname='eau_france' user='******' password='******'")
    res = ""
    ini = tron[0][0]
    for elmt in tron[1::5]:
        fin = elmt[1]
        nd = get_noeud_ini_fin(con, ini, fin)
        ini = fin
        (lat,lon, head) = compute_heading(strip_point(nd[0]), strip_point(nd[1]))
        res += flyto_lookat % {"duration":1, "lon":lon, "lat":lat, "heading":head, "tilt":75, "range":5000, "alt":0}
#    for elmt in tron:
#        nd = get_noeud_ini_fin(con, elmt[0], elmt[1])
#        (lat,lon, head) = compute_heading(strip_point(nd[0]), strip_point(nd[1]))
#        res += flyto_lookat % {"duration":1, "lon":lon, "lat":lat, "heading":head, "tilt":80, "range":500, "alt":0}
    con.close()
    tr = tour % {"flyto":res, "name":"dowstream tour"}
    doc = kmldoc % {"style":linestyles, "placemarks":pm, "tour":tr}
    f = file(resPath,"w")
    f.write(doc.encode("utf-8"))
    f.close()
    return resFileName
Example #40
0
def cancelDefaulters(zoomWebinarId):
    yn = 'N'
    emails = []
    defaultersFilePath = getDefaultersFilePath(zoomWebinarId)
    if not exists(defaultersFilePath):
        _logger.error('File not found: %s', defaultersFilePath)
        return 0

    with open(defaultersFilePath) as dfd:
        # Strip contents; Split lines; Skip header; Strip each row; Convert to lowercase
        emails = [
            e.strip().lower() for e in dfd.read().strip().split('\n')[1:]
        ]

    if not emails:
        return 0

    yn = raw_input('Cancel %s defaulters from attending webinar %s? (Y/N) > ' %
                   (len(emails), zoomWebinarId))
    if yn.upper().strip() not in ('Y', 'YES'):
        return 0

    r2c = getRegistrantsToUpdateStatus(zoomWebinarId, emails)
    ret = cancelRegistrants(zoomWebinarId, r2c)
    _logger.info('ret=%s', ret)

    return len(r2c)
Example #41
0
    def __init__(self):
        if not InputForm.__instance:
            InputForm.__instance = self

        super(InputForm, self).__init__()

        Global.event.task_started.connect(self._on_task_started)
        Global.event.task_completed.connect(self._on_task_completed)
        Global.event.task_progressed.connect(self._on_task_progressed)
        Global.event.task_range_progressed.connect(self._on_task_range_progressed)
        Global.event.interface_load_task_params.connect(self._on_interface_load_task_params)

        self.vl = QVBoxLayout()
        self.vl.setContentsMargins(0,0,0,0)

        self.setLayout(self.vl)
        self.setFixedWidth(290)

        self.tab = QTabWidget()
        self.vl.addWidget(self.tab)

        self.input_parameters = InputParameters()
        self.input_parameters.ranges_state_changed.connect(self._on_ranges_state_changed)
        self.tab.addTab(self.input_parameters, 'Parameters')

        self.import_parameters = ImportParameters()
        self.tab.addTab(self.import_parameters, 'Observation')

        control_buttons = QWidget()
        control_buttons.setLayout(QVBoxLayout())
        control_buttons.layout().setContentsMargins(0, 0, 0, 0)
        control_buttons.layout().setAlignment(Qt.AlignBottom)

        self._progress = QProgressBar()
        self._progress.setValue(0)
        self._progress.setTextVisible(True)
        self._progress.setAlignment(Qt.AlignCenter)
        self._progress.hide()
        control_buttons.layout().addWidget(self._progress)

        self._range_progress = QProgressBar()
        self._range_progress.setValue(0)
        self._range_progress.setTextVisible(True)
        self._range_progress.setAlignment(Qt.AlignCenter)
        self._range_progress.hide()
        control_buttons.layout().addWidget(self._range_progress)

        self._calculate = QPushButton('Calculate')
        self._calculate.clicked.connect(self._on_calculate)
        control_buttons.layout().addWidget(self._calculate)

        self._cancel = QPushButton('Cancel')
        self._cancel.hide()
        self._cancel.clicked.connect(self._on_cancel)
        control_buttons.layout().addWidget(self._cancel)

        self.vl.addWidget(control_buttons)

        if exists("./config/last-session.ini") :
            self.load_params("./config/last-session.ini")
Example #42
0
 def read_config(self):
     if exists(self.PICKLE_FILE):
         pkl_file = open(self.PICKLE_FILE, 'rb')
         data = pickle.load(pkl_file)
         pkl_file.close()
         self.track_text = data["track_text"]
         self.backlight_map = data["backlight_map"]
Example #43
0
 def setUp(self):
     """Initialize values for this testcase class."""
     self.hass = get_test_home_assistant()
     # Delete any previously stored data
     data_file = self.hass.config.path("{}.pickle".format('feedreader'))
     if exists(data_file):
         remove(data_file)
Example #44
0
def _upload_to_zoo(exp_cfg: dict, collect_dir: str, upload_dir: str) -> None:
    src_uris = {}
    dst_uris = {}

    src_uris['eval'] = join(collect_dir, 'eval', 'eval.json')
    src_uris['bundle'] = join(collect_dir, 'bundle', 'model-bundle.zip')
    src_uris['sample_predictions'] = join(collect_dir, 'sample-predictions')

    dst_uris['eval'] = join(upload_dir, 'eval.json')
    dst_uris['bundle'] = join(upload_dir, 'model-bundle.zip')
    dst_uris['sample_predictions'] = join(upload_dir, 'sample-predictions')

    assert len(src_uris) == len(dst_uris)

    for k, src in src_uris.items():
        dst = dst_uris[k]
        if not exists(src):
            console_failure(f'{k}: {src} not found.')
        if isfile(src):
            console_info(f'Uploading {k} file: {src} to {dst}.')
            upload_or_copy(src, dst)
        elif isdir(src):
            console_info(f'Syncing {k} dir: {src} to {dst}.')
            sync_to_dir(src, dst)
        else:
            raise ValueError()
Example #45
0
    def test_exists(self):
        self.assertIs(genericpath.exists(support.TESTFN), False)
        f = open(support.TESTFN, "wb")
        try:
            f.write(b"foo")
            f.close()
            self.assertIs(genericpath.exists(support.TESTFN), True)
        finally:
            if not f.close():
                f.close()
            try:
                os.remove(support.TESTFN)
            except os.error:
                pass

        self.assertRaises(TypeError, genericpath.exists)
Example #46
0
    def do(self, theEnv, aPath, *args, **kargs):
        """
        function handler implementation
        """

        aPath = self.resolve(
            theEnv,
            self.semplify(theEnv, aPath, types.Lexeme,
                          ('1', 'symbol or string')))

        aPath = os.path.abspath(aPath)

        if not exists(aPath):
            raise InvalidArgValueError(
                "Function load was unable to open file %s" % aPath)

        oldcwd = os.getcwd()

        os.chdir(os.path.dirname(aPath))

        aFile = open(aPath, 'rU')
        aString = aFile.read()

        try:
            parsed = theEnv.network.getParser().parse(aString, extended=True)
        except Exception, e:
            print >> theEnv.RESOURCES['werror'], theEnv.network.getParser(
            ).ExceptionPPrint(e, aString)
            os.chdir(oldcwd)
            return types.Symbol('FALSE')
def last_few(pscore):
    last_n = []
    lastfile = 'output/last-100.txt'
    if exists(lastfile):
        with open(lastfile, 'r') as f:
            for line in f:
                dt, ghash, name, score = [i.strip() for i in line.split('\t')]
                last_n.append([dt, ghash, name, float(score)])
    last_n.append([
        datetime.utcnow().strftime('%Y%m%d%H%M%S'),
        git_hash,
        model_name,
        pscore
    ])
    last_n = last_n[-100:]
    with open(lastfile, 'w') as f:
        for row in last_n:
            f.write('{}\n'.format('\t'.join([str(i) for i in row])))
        f.flush()
    print('')
    print('last 10 scores:')
    for line in last_n[-10:]:
        print('  {} {:>15} {:>25} {:>20}'.format(
            line[0],
            line[1],
            line[2],
            '{:.5f}'.format(line[3])
        ))
Example #48
0
def load(default):
    if not exists(PICKLE_FILE):
        return default
    pkl_file = open(PICKLE_FILE, 'rb')
    data = pickle.load(pkl_file)
    pkl_file.close()
    return data
    def test_exists(self):
        self.assertIs(genericpath.exists(support.TESTFN), False)
        f = open(support.TESTFN, "wb")
        try:
            f.write(b"foo")
            f.close()
            self.assertIs(genericpath.exists(support.TESTFN), True)
        finally:
            if not f.close():
                f.close()
            try:
                os.remove(support.TESTFN)
            except os.error:
                pass

        self.assertRaises(TypeError, genericpath.exists)
def logstash_filter_run(inputs,
                        filter_def,
                        logstash_bin=None,
                        remove_tempdir=True):
    """
    Run a bunch of json through logstash given the filter definition
    :param inputs: a list of dicts
    :param filter_def: logstash filter definition as a string
    :param logstash_bin: logstash executable path. By default will try
        LOGSTASH_BIN_ALTERNATIVES
    :param remove_tempdir: remove temporary working directory after done
    :return: a list of dicts, the results
    """
    input_jsons = [json.dumps(d) for d in inputs]
    assert all(s[0] == '{'
               for s in input_jsons), "inputs must be a list of dicts"
    if logstash_bin is None:
        for fn in LOGSTASH_BIN_ALTERNATIVES:
            if exists(fn):
                logstash_bin = fn
                break
        else:
            raise RuntimeError("Couldn't find logstash executable")

    workdir = mkdtemp(prefix='logstash-test-')
    data_dir = join(workdir, 'data')
    config_dir = join(workdir, 'config')
    pipeline_dir = join(workdir, 'pipeline.d')
    os.mkdir(data_dir)
    os.mkdir(config_dir)
    os.mkdir(pipeline_dir)
    open(join(config_dir, 'logstash.yml'), 'w').close()
    with open(join(config_dir, 'pipelines.yml'), 'w') as f:
        f.write(PIPELINES_YML.format(pipeline_dir))
    output_fn = join(workdir, 'output')
    with open(join(pipeline_dir, 'io.conf'), 'w') as f:
        f.write(INPUT_OUTPUT_CONF % output_fn)
    with open(join(pipeline_dir, 'filter.conf'), 'w') as f:
        f.write(filter_def)
    inputs_s = ''.join(s + '\n' for s in input_jsons)
    args = [
        logstash_bin, '--log.level=warn', '--path.settings', config_dir,
        '--path.data', data_dir
    ]
    print(' '.join(args))
    popen = Popen(args, stdin=PIPE)
    popen.communicate(inputs_s.encode('utf8'))
    rc = popen.wait()
    if rc != 0:
        raise RuntimeError(
            "logstash returned non-zero return code {}".format(rc))
    output_lines = list(open(output_fn))
    if len(output_lines) != len(inputs):
        raise RuntimeError("Received {} outputs, expecting {}".format(
            len(output_lines), len(inputs)))
    outputs = [json.loads(line) for line in output_lines]
    if remove_tempdir:
        rmtree(workdir)
    return outputs
Example #51
0
def main():
    """
    The main function is the entry point for the program. It is called when embyupdate starts up
    and checks to see if there are any updates available. If there are, it will download them
    and install them.
    """

    # pylint: disable=C0103

    # Sets the version # for the command line -v/--version response
    VERSIONNUM = f"{__version__} - {__status__}"

    # Checks for python version, exit if not greater than 3.6
    pythonversion.python_version_check()

    # Creates the default config object
    configfix = config.Config()

    # Fixes pre version 4.0 config files
    configfix.config_fix(VERSIONNUM)

    # First we're going to force the working path to be where the script lives
    os.chdir(sys.path[0])

    # This will test to see if the DB exist. If it doesn't it will create it and
    # launch the config setup process

    if not exists('./db/embyupdate.db'):

        print()
        print(
            f"Database does {c.fg.red}NOT{c.end} exist, creating database...")
        createdb.create_db(VERSIONNUM)
        print()
        print("Starting config setup...")
        configsetup.config_setup()

    else:
        print(f"Database exists! {c.fg.green}CHECK PASSED{c.end}!")

    # Checks for command line arguments
    arguments.read_args(VERSIONNUM)

    # We'll get the config from the DB
    configobj: dbobjects.ConfigObj = dbobjects.ConfigObj().get_config()
    configobj.selfupdate.version = VERSIONNUM

    # Now well try and update the app if the user chose that option
    if configobj.selfupdate.runupdate is True:
        selfupdate.self_update(configobj)

    configobj = api.get_main_online_version(configobj)

    # Ok, we've got all the info we need. Now we'll test if we even need to update or not

    update_needed = updatecheck.check_for_update(configobj)  # pylint: disable=E1111

    if update_needed:
        install.update_emby(configobj)
Example #52
0
 def __load(config, location):
     if exists(location) and isfile(location):
         try:
             config.update(load_commented_json(location))
             LOG.debug("Configuration '%s' loaded" % location)
         except Exception, e:
             LOG.error("Error loading configuration '%s'" % location)
             LOG.error(repr(e))
Example #53
0
 def file(self, name, index=None):
     file = self.path(name, index=index)
     # special case sqlite3 in-memory database
     if file == MEMORY: return file
     path = dirname(file)
     if not exists(path):
         makedirs(path)
     return file
Example #54
0
    def setUp(self):
        os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH

        # start every test from a clean state
        if exists(TEST_TEMP_DBFILE_PATH):
            os.remove(TEST_TEMP_DBFILE_PATH)

        self.bookmarks = TEST_BOOKMARKS
Example #55
0
def create_virtualenv():
    with cd(env.venv_home):
        if exists(env.proj_name):
            if not confirm("Virtualenv exists: %s\n Do you want to replace it?" % env.proj_name):
                print "\nAborting!"
                return False
            removeapp()
        sudo("virtualenv %s --distribute" % env.proj_name)
Example #56
0
	def __init__(self, name):
		assert match(r'^[a-zA-Z][a-zA-Z0-9\.\-,_]+$', name), \
			'Theme names should contain only letters, numbers and "_-+,." (got "{0:s}")'.format(name)
		self.base_template = join(settings.SV_THEMES_DIR, 'templates', name, 'base.html')
		assert exists(self.base_template), 'theme {0:s}: can\'t find "{1:s}"'.format(name, self.base_template)
		self.readme = self.description = self.credits = self.license = None
		self.name = name
		self.load()
Example #57
0
    def main(self):
        """ Runs all methods to gather, align, analyse and save results """
        startTime = time.time()
        self.log.info("Starting")

        if (not exists('HPV81.fa')):
            self.log.info("Generating _query")
            self.generate_entrez_query()
            
            self.log.info("Retrieving Ids")
            self.retrieve_ids()
            
            self.log.info("Retrieving Corrfinates for gene")
            self.retrieve_data()
        
        if (not exists('HPV-aligned.fa')):    
            self.log.info("Removing duplicates")
            self.remove_duplicates()
            
            self.log.info("Saving all to common file")
            self.save_common_unaligned()
            
            self.log.info("Aligning")
            self.align()
        else:
            self.log.info("Using cached alignment")

        # FIXME: debug mode
        self.brute_force()
        
        difference = round(time.time() - startTime, 3)            
        self.log.info("Finished: " + str(difference) + " seconds") 
        
        return
        
        self.log.info("Loading sequences for analysis")
        self.load_aligned()
        self.show_sequences()

        self.log.info("Calculating similarity")
        self.update_conservation()
        
        self.log.info("Candidates")
        self.update_candidates()
        
        self.show_gui()
Example #58
0
def load(default):
    if not exists(PICKLE_FILE):
        return default
    pcklfile = open(PICKLE_FILE, 'rb')
    data = pickle.load(pcklfile)
    pcklfile.close()
    cnt = len(data)
    return data
Example #59
0
 def get():
     if exists(VersionManager.__location) and isfile(VersionManager.__location):
         try:
             with open(VersionManager.__location) as f:
                 return json.load(f)
         except:
             LOG.error("Failed to load version from '%s'" % VersionManager.__location)
     return {"coreVersion": None, "enclosureVersion": None}
Example #60
0
def ds_traverse(rootds, parent=None, json=None,
                recurse_datasets=False, recurse_directories=False,
                long_=False):
    """Hierarchical dataset traverser

    Parameters
    ----------
    rootds: Dataset
      Root dataset to be traversed
    parent: Dataset
      Parent dataset of the current rootds
    recurse_datasets: bool
      Recurse into subdatasets of the root dataset
    recurse_directories: bool
      Recurse into subdirectories of the current dataset
      In both of above cases, if False, they will not be explicitly
      recursed but data would be loaded from their meta-data files

    Returns
    -------
    list of dict
      extracts and returns a (recursive) list of dataset(s) info at path
    """
    # extract parent info to pass to traverser
    fsparent = fs_extract(parent.path, parent.repo, basepath=rootds.path) \
        if parent else None

    # (recursively) traverse file tree of current dataset
    fs = fs_traverse(
        rootds.path, rootds.repo,
        subdatasets=list(rootds.subdatasets(result_xfm='relpaths')),
        render=False,
        parent=fsparent,
        # XXX note that here I kinda flipped the notions!
        recurse_datasets=recurse_datasets,
        recurse_directories=recurse_directories,
        json=json
    )

    # BUT if we are recurse_datasets but not recurse_directories
    #     we need to handle those subdatasets then somehow since
    #     otherwise we might not even get to them?!

    fs['nodes'][0]['size'] = fs['size']  # update self's updated size in nodes sublist too!

    # add dataset specific entries to its dict
    rootds_model = GitModel(rootds.repo)
    fs['tags'] = rootds_model.describe
    fs['branch'] = rootds_model.branch
    index_file = opj(rootds.path, '.git', 'index')
    fs['index-mtime'] = time.strftime(
        u"%Y-%m-%d %H:%M:%S",
        time.localtime(getmtime(index_file))) if exists(index_file) else ''

    # render current dataset
    lgr.info('Dataset: %s' % rootds.path)
    fs_render(fs, json=json, ds_path=rootds.path)
    return fs