예제 #1
0
    def processAlgorithm(self, progress):
        commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasground_new")]
        self.addParametersVerboseCommands(commands)
        self.addParametersPointInputCommands(commands)
        self.addParametersIgnoreClass1Commands(commands)
        self.addParametersHorizontalAndVerticalFeetCommands(commands)
        method = self.getParameterValue(lasground_new.TERRAIN)
        if (method == 5):
            commands.append("-step")
            commands.append(str(self.getParameterValue(lasground_new.STEP)))
            commands.append("-bulge")
            commands.append(str(self.getParameterValue(lasground_new.BULGE)))
            commands.append("-spike")
            commands.append(str(self.getParameterValue(lasground_new.SPIKE)))
            commands.append("-spike_down")
            commands.append(str(self.getParameterValue(lasground_new.DOWN_SPIKE)))
            commands.append("-offset")
            commands.append(str(self.getParameterValue(lasground_new.OFFSET)))
        else:
            commands.append("-" + lasground_new.TERRAINS[method])
        granularity = self.getParameterValue(lasground_new.GRANULARITY)
        if (granularity != 1):
            commands.append("-" + lasground_new.GRANULARITIES[granularity])
        self.addParametersPointOutputCommands(commands)
        self.addParametersAdditionalCommands(commands)

        LAStoolsUtils.runLAStools(commands, progress)
예제 #2
0
    def processAlgorithm(self, progress):
        commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasheight")]
        self.addParametersVerboseCommands(commands)
        self.addParametersPointInputCommands(commands)
        self.addParametersIgnoreClass1Commands(commands)
        self.addParametersIgnoreClass2Commands(commands)
        if self.getParameterValue(lasheight_classify.REPLACE_Z):
            commands.append("-replace_z")
        classify = self.getParameterValue(lasheight_classify.CLASSIFY_BELOW)
        if (classify != 0):
            commands.append("-classify_below")
            commands.append(str(self.getParameterValue(lasheight_classify.CLASSIFY_BELOW_HEIGHT)))
            commands.append(str(classify))
        classify = self.getParameterValue(lasheight_classify.CLASSIFY_BETWEEN1)
        if (classify != 0):
            commands.append("-classify_between")
            commands.append(str(self.getParameterValue(lasheight_classify.CLASSIFY_BETWEEN1_HEIGHT_FROM)))
            commands.append(str(self.getParameterValue(lasheight_classify.CLASSIFY_BETWEEN1_HEIGHT_TO)))
            commands.append(str(classify))
        classify = self.getParameterValue(lasheight_classify.CLASSIFY_BETWEEN2)
        if (classify != 0):
            commands.append("-classify_between")
            commands.append(str(self.getParameterValue(lasheight_classify.CLASSIFY_BETWEEN2_HEIGHT_FROM)))
            commands.append(str(self.getParameterValue(lasheight_classify.CLASSIFY_BETWEEN2_HEIGHT_TO)))
            commands.append(str(classify))
        classify = self.getParameterValue(lasheight_classify.CLASSIFY_ABOVE)
        if (classify != 0):
            commands.append("-classify_above")
            commands.append(str(self.getParameterValue(lasheight_classify.CLASSIFY_ABOVE_HEIGHT)))
            commands.append(str(classify))
        self.addParametersPointOutputCommands(commands)
        self.addParametersAdditionalCommands(commands)

        LAStoolsUtils.runLAStools(commands, progress)
예제 #3
0
    def get(self, section, key, **kwargs):
        section = str(section).lower()
        key = str(key).lower()

        d = self.defaults

        # first check environment variables
        option = self._get_env_var_option(section, key)
        if option:
            return option

        # ...then the config file
        if self.has_option(section, key):
            return expand_env_var(
                ConfigParser.get(self, section, key, **kwargs))

        # ...then commands
        option = self._get_cmd_option(section, key)
        if option:
            return option

        # ...then the defaults
        if section in d and key in d[section]:
            return expand_env_var(d[section][key])

        else:
            logging.warn("section/key [{section}/{key}] not found "
                         "in config".format(**locals()))

            raise AirflowConfigException(
                "section/key [{section}/{key}] not found "
                "in config".format(**locals()))
예제 #4
0
  def test_urls(self):
    default_version_tool = DefaultVersion.global_instance()
    self.assertIsNone(default_version_tool.get_external_url_generator())

    with self.assertRaises(BinaryUtil.BinaryResolutionError) as cm:
      default_version_tool.select()
    err_msg = str(cm.exception)
    self.assertIn(BinaryToolFetcher.BinaryNotFound.__name__, err_msg)
    self.assertIn(
      "Failed to fetch default_version_test_tool binary from any source:",
      err_msg)
    self.assertIn(
      "Failed to fetch binary from https://binaries.example.org/bin/default_version_test_tool/XXX/default_version_test_tool:",
      err_msg)

    custom_urls_tool = CustomUrls.global_instance()
    self.assertEqual(custom_urls_tool.version(), 'v2.3')

    with self.assertRaises(BinaryUtil.BinaryResolutionError) as cm:
      custom_urls_tool.select()
    err_msg = str(cm.exception)
    self.assertIn(BinaryToolFetcher.BinaryNotFound.__name__, err_msg)
    self.assertIn(
      "Failed to fetch custom_urls_tool binary from any source:",
      err_msg)
    self.assertIn(
      "Failed to fetch binary from https://custom-url.example.org/files/custom_urls_tool-v2.3-zzz:",
      err_msg)
    self.assertIn(
      "Failed to fetch binary from https://custom-url.example.org/files/custom_urls_tool-v2.3-zzz-alternate:",
      err_msg)
예제 #5
0
    def selectFile(self):
        fileFilter = self.output.getFileFilter(self.alg)

        settings = QSettings()
        if settings.contains('/Processing/LastOutputPath'):
            path = settings.value('/Processing/LastOutputPath')
        else:
            path = ProcessingConfig.getSetting(ProcessingConfig.OUTPUT_FOLDER)

        encoding = settings.value('/Processing/encoding', 'System')
        fileDialog = QgsEncodingFileDialog(
            self, self.tr('Save file'), path, fileFilter, encoding)
        fileDialog.setFileMode(QFileDialog.AnyFile)
        fileDialog.setAcceptMode(QFileDialog.AcceptSave)
        fileDialog.setOption(QFileDialog.DontConfirmOverwrite, False)

        if fileDialog.exec_() == QDialog.Accepted:
            files = fileDialog.selectedFiles()
            encoding = str(fileDialog.encoding())
            self.output.encoding = encoding
            fileName = str(files[0])
            selectedFileFilter = str(fileDialog.selectedNameFilter())
            if not fileName.lower().endswith(
                    tuple(re.findall("\\*(\\.[a-z]{1,10})", fileFilter))):
                ext = re.search("\\*(\\.[a-z]{1,10})", selectedFileFilter)
                if ext:
                    fileName += ext.group(1)
            self.leText.setText(fileName)
            settings.setValue('/Processing/LastOutputPath',
                              os.path.dirname(fileName))
            settings.setValue('/Processing/encoding', encoding)
예제 #6
0
파일: aspect.py 프로젝트: fritsvanveen/QGIS
    def getConsoleCommands(self):
        arguments = ['aspect']
        arguments.append(str(self.getParameterValue(self.INPUT)))
        output = str(self.getOutputValue(self.OUTPUT))
        arguments.append(output)

        arguments.append('-of')
        arguments.append(GdalUtils.getFormatShortNameFromFilename(output))

        arguments.append('-b')
        arguments.append(str(self.getParameterValue(self.BAND)))

        if self.getParameterValue(self.COMPUTE_EDGES):
            arguments.append('-compute_edges')

        if self.getParameterValue(self.ZEVENBERGEN):
            arguments.append('-alg')
            arguments.append('ZevenbergenThorne')

        if self.getParameterValue(self.TRIG_ANGLE):
            arguments.append('-trigonometric')

        if self.getParameterValue(self.ZERO_FLAT):
            arguments.append('-zero_for_flat')

        return ['gdaldem', GdalUtils.escapeAndJoin(arguments)]
예제 #7
0
    def extract_images(self, overwrite=False):
        from neon.data import CIFAR10
        from PIL import Image
        dataset = dict()
        cifar10 = CIFAR10(path=self.out_dir, normalize=False)
        dataset['train'], dataset['val'], _ = cifar10.load_data()

        for setn in ('train', 'val'):
            data, labels = dataset[setn]

            img_dir = os.path.join(self.out_dir, setn)
            ulabels = np.unique(labels)
            for ulabel in ulabels:
                subdir = os.path.join(img_dir, str(ulabel))
                if not os.path.exists(subdir):
                    os.makedirs(subdir)

            for idx in range(data.shape[0]):
                im = np.pad(data[idx].reshape((3, 32, 32)), self.pad_width, mode='mean')
                im = np.uint8(np.transpose(im, axes=[1, 2, 0]).copy())
                im = Image.fromarray(im)
                path = os.path.join(img_dir, str(labels[idx][0]), str(idx) + '.png')
                im.save(path, format='PNG')

            if setn == 'train':
                self.pixel_mean = list(data.mean(axis=0).reshape(3, -1).mean(axis=1))
                self.pixel_mean.reverse()  # We will see this in BGR order b/c of opencv
예제 #8
0
파일: GridInvDist.py 프로젝트: medspx/QGIS
    def getConsoleCommands(self):
        arguments = ["-l"]
        arguments.append(os.path.basename(os.path.splitext(str(self.getParameterValue(self.INPUT)))[0]))

        fieldName = self.getParameterValue(self.Z_FIELD)
        if fieldName is not None and fieldName != "":
            arguments.append("-zfield")
            arguments.append(fieldName)

        params = "invdist"
        params += ":power=%s" % self.getParameterValue(self.POWER)
        params += ":smothing=%s" % self.getParameterValue(self.SMOTHING)
        params += ":radius1=%s" % self.getParameterValue(self.RADIUS_1)
        params += ":radius2=%s" % self.getParameterValue(self.RADIUS_2)
        params += ":angle=%s" % self.getParameterValue(self.ANGLE)
        params += ":max_points=%s" % self.getParameterValue(self.MAX_POINTS)
        params += ":min_points=%s" % self.getParameterValue(self.MIN_POINTS)
        params += ":nodata=%s" % self.getParameterValue(self.NODATA)

        arguments.append("-a")
        arguments.append(params)
        arguments.append("-ot")
        arguments.append(self.TYPE[self.getParameterValue(self.RTYPE)])
        arguments.append(str(self.getParameterValue(self.INPUT)))
        arguments.append(str(self.getOutputValue(self.OUTPUT)))

        return ["gdal_grid", GdalUtils.escapeAndJoin(arguments)]
예제 #9
0
    def processAlgorithm(self, progress):
        commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lascontrol")]
        self.addParametersVerboseCommands(commands)
        self.addParametersPointInputCommands(commands)
        file = self.getParameterValue(lascontrol.CONTROL_POINT_FILE)
        if file is not None:
            commands.append("-cp")
            commands.append('"' + file + '"')
        parse = self.getParameterValue(lascontrol.PARSE_STRING)
        if parse is not None:
            commands.append("-parse")
            commands.append(parse)
        use_point = self.getParameterValue(lascontrol.USE_POINTS)
        if use_point > 0:
            commands.append("-keep_class")
            commands.append(str(2))
            if use_point > 1:
                commands.append(str(8))
                if use_point > 2:
                    commands.append(str(6))
        if self.getParameterValue(lascontrol.ADJUST_Z):
            commands.append("-adjust_z")
            commands.append("-odix _adjusted")
            commands.append("-olaz")
        self.addParametersAdditionalCommands(commands)

        LAStoolsUtils.runLAStools(commands, progress)
예제 #10
0
  def execute(self):
    if self.get_options().skip:
      return

    if self.get_options().transitive:
      targets = self.context.targets(self._is_errorprone_target)
    else:
      targets = [t for t in self.context.target_roots if self._is_errorprone_target(t)]

    targets = list(set(targets))

    target_count = 0
    errorprone_failed = False
    with self.invalidated(targets, invalidate_dependents=True) as invalidation_check:
      total_targets = len(invalidation_check.invalid_vts)
      for vt in invalidation_check.invalid_vts:
        target_count += 1
        self.context.log.info('[{}/{}] {}'.format(
          str(target_count).rjust(len(str(total_targets))),
          total_targets,
          vt.target.address.spec))

        result = self.errorprone(vt.target)
        if result != 0:
          errorprone_failed = True
          if self.get_options().fail_fast:
            break
        else:
          vt.update()

      if errorprone_failed:
        raise TaskError('ErrorProne checks failed')
예제 #11
0
파일: las2iso.py 프로젝트: 3liz/Quantum-GIS
    def processAlgorithm(self, progress):
        commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "las2iso")]
        self.addParametersVerboseCommands(commands)
        self.addParametersPointInputCommands(commands)
        smooth = self.getParameterValue(las2iso.SMOOTH)
        if smooth != 0:
            commands.append("-smooth")
            commands.append(str(smooth))
        commands.append("-iso_every")
        commands.append(str(self.getParameterValue(las2iso.ISO_EVERY)))
        simplify_length = self.getParameterValue(las2iso.SIMPLIFY_LENGTH)
        if simplify_length != 0:
            commands.append("-simplify_length")
            commands.append(str(simplify_length))
        simplify_area = self.getParameterValue(las2iso.SIMPLIFY_AREA)
        if simplify_area != 0:
            commands.append("-simplify_area")
            commands.append(str(simplify_area))
        clean = self.getParameterValue(las2iso.CLEAN)
        if clean != 0:
            commands.append("-clean")
            commands.append(str(clean))
        self.addParametersVectorOutputCommands(commands)
        self.addParametersAdditionalCommands(commands)

        LAStoolsUtils.runLAStools(commands, progress)
예제 #12
0
def _dump_model(model, attrs=None):
    """
    Dump the model fields for debugging.
    """

    fields = []

    for field in model._meta.fields:
        fields.append((field.name, str(getattr(model, field.name))))

    if attrs is not None:
        for attr in attrs:
            fields.append((attr, str(getattr(model, attr))))

    for field in model._meta.many_to_many:
        vals = getattr(model, field.name)
        fields.append((field.name, '{val} ({count})'.format(
            val=', '.join(map(str, vals.all())),
            count=vals.count(),
        )))

    print(', '.join(
        '{0}={1}'.format(field, value)
        for field, value in fields
    ))
예제 #13
0
def build_models(corpus, corpus_filename, model_path, context_type, krange,
                 n_iterations=200, n_proc=1, seed=None, dry_run=False):
    basefilename = os.path.basename(corpus_filename).replace('.npz', '')
    basefilename += "-LDA-K%s-%s-%d.npz" % ('{0}', context_type, n_iterations)
    basefilename = os.path.join(model_path, basefilename)

    if n_proc == 1 and type(seed) == int:
        seeds = seed
        fileparts = basefilename.split('-')
        fileparts.insert(-1, str(seed))
        basefilename = '-'.join(fileparts)
    elif type(seed) == int:
        seeds = [seed + p for p in range(n_proc)]
        fileparts = basefilename.split('-')
        fileparts.insert(-1, str(seed))
        basefilename = '-'.join(fileparts)
    else:
        seeds = None

    if not dry_run:
        from vsm.model.lda import LDA
        for k in krange:
            print("Training model for k={0} Topics with {1} Processes".format(k, n_proc))
            m = LDA(corpus, context_type, K=k, multiprocessing=(n_proc > 1),
                    seed_or_seeds=seeds, n_proc=n_proc)
            m.train(n_iterations=n_iterations)
            m.save(basefilename.format(k))
            print(" ")

    return basefilename
예제 #14
0
    def write(self, message):

        is_stream = hasattr(message, "read_bytes")

        if (
            hasattr(message, "to_bytes") is False
            or isinstance(getattr(message, "to_bytes"), collections.Callable) is False
        ) and not is_stream:
            raise TypeError("invalid message: ({0})".format(message))

        logger.debug("=> %s", message)
        try:

            if not is_stream:
                self._socket().sendall(message.to_bytes())
            else:
                # read to end in chunks
                while True:
                    data = message.read_bytes()
                    if len(data) == 0:
                        break

                    self._socket().sendall(data)

        except Exception as e:
            self.close_socket()
            if str(e) == "unsupported authentication method: 9":
                raise errors.ConnectionError("Error during authentication. Your password might be expired.")
            else:
                raise errors.ConnectionError(str(e))
예제 #15
0
 def clearMemory(self):
     """
     """
     self.sendMessage('SPIE')
     self.pause()
     reply = self.getResponse(timeout=10)
     # should return either FRMT or ESEC to indicate it started
     if reply.startswith('FRMT'):
         logging.info("BBTK.clearMemory(): "
                      "Starting full format of BBTK memory")
     elif reply.startswith('ESEC'):
         logging.info("BBTK.clearMemory(): "
                      "Starting quick erase of BBTK memory")
     else:
         logging.error("BBTK.clearMemory(): "
                       "didn't get a reply from %s" % str(self.com))
         return False
     # we aren't in a time-critical period so flush messages
     logging.flush()
     # now wait until we get told 'DONE'
     self.com.timeout = 20
     retVal = self.com.readline()
     if retVal.startswith("DONE"):
         logging.info("BBTK.clearMemory(): completed")
         # we aren't in a time-critical period so flush messages
         logging.flush()
         return True
     else:
         logging.error("BBTK.clearMemory(): "
                       "Stalled waiting for %s" % str(self.com))
         # we aren't in a time-critical period so flush messages
         logging.flush()
         return False
예제 #16
0
파일: parameters.py 프로젝트: ndavid/QGIS
def getParameterFromString(s):
    # Try the parameter definitions used in description files
    if '|' in s and (s.startswith("Parameter") or s.startswith("*Parameter")):
        isAdvanced = False
        if s.startswith("*"):
            s = s[1:]
            isAdvanced = True
        tokens = s.split("|")
        params = [t if str(t) != str(None) else None for t in tokens[1:]]
        try:
            clazz = getattr(sys.modules[__name__], tokens[0])
            param = clazz(*params)
            if isAdvanced:
                param.setFlags(param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
            return param
        except:
            return None
    else:  # try script syntax

        # try native method
        param = QgsProcessingParameters.parameterFromScriptCode(s)
        if param:
            return param

        # try Python duck-typed method
        for paramClass in paramClasses:
            try:
                param = paramClass.fromScriptCode(s)
                if param is not None:
                    return param
            except:
                pass
예제 #17
0
 def processAlgorithm(self, progress):
     commands = [os.path.join(FusionUtils.FusionPath(), 'TINSurfaceCreate.exe')]
     commands.append('/verbose')
     class_var = self.getParameterValue(self.CLASS)
     if str(class_var).strip():
         commands.append('/class:' + str(class_var))
         return_sel = self.getParameterValue(self.RETURN)
     if str(return_sel).strip():
         commands.append('/return:' + str(return_sel))
     outFile = self.getOutputValue(self.OUTPUT)
     commands.append(outFile)
     commands.append(str(self.getParameterValue(self.CELLSIZE)))
     commands.append(self.UNITS[self.getParameterValue(self.XYUNITS)][0])
     commands.append(self.UNITS[self.getParameterValue(self.ZUNITS)][0])
     commands.append('0')
     commands.append('0')
     commands.append('0')
     commands.append('0')
     files = self.getParameterValue(self.INPUT).split(';')
     if len(files) == 1:
         commands.append(self.getParameterValue(self.INPUT))
     else:
         commands.extend(files)
     FusionUtils.runFusion(commands, progress)
     commands = [os.path.join(FusionUtils.FusionPath(), 'DTM2ASCII.exe')]
     commands.append('/raster')
     commands.append(outFile)
     commands.append(self.getOutputValue(self.OUTPUT))
     p = subprocess.Popen(commands, shell=True)
     p.wait()
def draw_all_training( dataDirectory ):
    test_samples = draw_test_samples( dataDirectory )

    availableGames = []
    fileinfos = index_processor.get_fileInfos( dataDirectory )
    for fileinfo in fileinfos:
        filename = fileinfo['filename']
        year = int( filename.split('-')[1].split('_')[0] )
        if year > 2014:
            continue  # ignore after 2014, to keep the set of games fixed
        numgames = fileinfo['numGames']
        for i in range( numgames ):
            availableGames.append( ( filename, i ) )
    print( 'total num games: ' + str( len( availableGames ) ) )

    # need to seed random first
    random.seed(0)
    # I suppose the first 100 samples will be the testing ones :-P
    # anyway, just skip those....
    
    samplesSet = set()
    for sample in availableGames:
        if sample not in test_samples:
            samplesSet.add( sample )
    print( 'Drawn all samples, ie ' + str( len( samplesSet ) ) + ' samples:' )
    # copy to list
    samples = list( samplesSet )
    return samples
def draw_samples( dataDirectory, numSamples ):
    # draws filename, and game index number, from the available games
    # without replacement (so we should check for dupes :-( )

    # first we should create a single list, containing pairs of ( filename, gameindex )
    # then we will draw samples from this
    # we should restrict the available games to something static, eg everything up to dec 2014, inclusive
    availableGames = []
    fileinfos = index_processor.get_fileInfos( dataDirectory )
    for fileinfo in fileinfos:
        filename = fileinfo['filename']
        year = int( filename.split('-')[1].split('_')[0] )
        if year > 2014:
            continue  # ignore after 2014, to keep the set of games fixed
        numgames = fileinfo['numGames']
        for i in range( numgames ):
            availableGames.append( ( filename, i ) )
    print( 'total num games: ' + str( len( availableGames ) ) )

    # need to seed random first
    random.seed(0)
    
    samplesSet = set()
    while len( samplesSet ) < numSamples:
        sample = random.choice( availableGames )
        if sample not in samplesSet:
            samplesSet.add( sample )
    print( 'Drawn ' + str( numSamples ) + ' samples:' )
    # copy to list
    samples = list( samplesSet )
    return samples
예제 #20
0
    def processAlgorithm(self, progress):
        commands = []
        commands.append(os.path.join(TauDEMUtils.mpiexecPath(), 'mpiexec'))

        processNum = ProcessingConfig.getSetting(TauDEMUtils.MPI_PROCESSES)
        if processNum <= 0:
            raise GeoAlgorithmExecutionException(
                self.tr('Wrong number of MPI processes used. Please set '
                        'correct number before running TauDEM algorithms.'))

        commands.append('-n')
        commands.append(str(processNum))
        commands.append(os.path.join(TauDEMUtils.taudemPath(), self.cmdName))
        commands.append('-ad8')
        commands.append(self.getParameterValue(self.D8_CONTRIB_AREA_GRID))
        commands.append('-p')
        commands.append(self.getParameterValue(self.D8_FLOW_DIR_GRID))
        commands.append('-fel')
        commands.append(self.getParameterValue(self.PIT_FILLED_GRID))
        commands.append('-ssa')
        commands.append(self.getParameterValue(self.ACCUM_STREAM_SOURCE_GRID))
        commands.append('-o')
        commands.append(self.getParameterValue(self.OUTLETS_SHAPE))
        commands.append('-par')
        commands.append(str(self.getParameterValue(self.MIN_TRESHOLD)))
        commands.append(str(self.getParameterValue(self.MAX_THRESHOLD)))
        commands.append(str(self.getParameterValue(self.TRESHOLD_NUM)))
        commands.append(str(self.getParameterValue(self.STEP_TYPE)))
        commands.append('-drp')
        commands.append(self.getOutputValue(self.DROP_ANALYSIS_FILE))

        TauDEMUtils.executeTauDEM(commands, progress)
예제 #21
0
    def processAlgorithm(self, progress):
        commands = []
        commands.append(os.path.join(TauDEMUtils.mpiexecPath(), 'mpiexec'))

        processNum = ProcessingConfig.getSetting(TauDEMUtils.MPI_PROCESSES)
        if processNum <= 0:
            raise GeoAlgorithmExecutionException(
                self.tr('Wrong number of MPI processes used. Please set '
                        'correct number before running TauDEM algorithms.'))

        commands.append('-n')
        commands.append(str(processNum))
        commands.append(os.path.join(TauDEMUtils.taudemPath(), self.cmdName))
        commands.append('-ang')
        commands.append(self.getParameterValue(self.DINF_FLOW_DIR_GRID))
        commands.append('-fel')
        commands.append(self.getParameterValue(self.PIT_FILLED_GRID))
        commands.append('-m')
        commands.append(str(self.STAT_DICT[self.getParameterValue(
            self.STAT_METHOD)]))
        commands.append(str(self.DIST_DICT[self.getParameterValue(
            self.DIST_METHOD)]))
        commands.append('-thresh')
        commands.append(str(self.getParameterValue(self.THRESHOLD)))
        if not self.getParameterValue(self.EDGE_CONTAM):
            commands.append('-nc')
        commands.append('-du')
        commands.append(self.getOutputValue(self.DIST_UP_GRID))

        TauDEMUtils.executeTauDEM(commands, progress)
예제 #22
0
    def processAlgorithm(self, progress):
        if (LAStoolsUtils.hasWine()):
            commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasinfo.exe")]
        else:
            commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasinfo")]
        self.addParametersVerboseCommands(commands)
        self.addParametersPointInputCommands(commands)
        if self.getParameterValue(lasinfo.COMPUTE_DENSITY):
            commands.append("-cd")
        if self.getParameterValue(lasinfo.REPAIR_BB):
            commands.append("-repair_bb")
        if self.getParameterValue(lasinfo.REPAIR_COUNTERS):
            commands.append("-repair_counters")
        histo = self.getParameterValue(lasinfo.HISTO1)
        if histo != 0:
            commands.append("-histo")
            commands.append(lasinfo.HISTOGRAM[histo])
            commands.append(str(self.getParameterValue(lasinfo.HISTO1_BIN)))
        histo = self.getParameterValue(lasinfo.HISTO2)
        if histo != 0:
            commands.append("-histo")
            commands.append(lasinfo.HISTOGRAM[histo])
            commands.append(str(self.getParameterValue(lasinfo.HISTO2_BIN)))
        histo = self.getParameterValue(lasinfo.HISTO3)
        if histo != 0:
            commands.append("-histo")
            commands.append(lasinfo.HISTOGRAM[histo])
            commands.append(str(self.getParameterValue(lasinfo.HISTO3_BIN)))
        commands.append("-o")
        commands.append(self.getOutputValue(lasinfo.OUTPUT))
        self.addParametersAdditionalCommands(commands)

        LAStoolsUtils.runLAStools(commands, progress)
예제 #23
0
    def showFileSelectionDialog(self):
        settings = QSettings()
        text = str(self.text.text())
        if os.path.isdir(text):
            path = text
        elif os.path.isdir(os.path.dirname(text)):
            path = os.path.dirname(text)
        elif settings.contains('/Processing/LastInputPath'):
            path = str(settings.value('/Processing/LastInputPath'))
        else:
            path = ''

        ret, selected_filter = QFileDialog.getOpenFileNames(self, self.tr('Open file'), path,
                                                            self.tr('All files(*.*);;') + self.param.getFileFilter())
        if ret:
            files = list(ret)
            settings.setValue('/Processing/LastInputPath',
                              os.path.dirname(str(files[0])))
            for i, filename in enumerate(files):
                files[i] = dataobjects.getRasterSublayer(filename, self.param)
            if len(files) == 1:
                self.text.setText(files[0])
                self.textEditingFinished()
            else:
                if isinstance(self.param, ParameterMultipleInput):
                    self.text.setText(';'.join(str(f) for f in files))
                else:
                    rowdif = len(files) - (self._table().rowCount() - self.row)
                    for i in range(rowdif):
                        self._panel().addRow()
                    for i, f in enumerate(files):
                        self._table().cellWidget(i + self.row,
                                                 self.col).setValue(f)
예제 #24
0
    def regularMatrix(self, inLayer, inField, targetLayer, targetField,
                      nPoints, progress):
        index = vector.spatialindex(targetLayer)

        inIdx = inLayer.fields().lookupField(inField)

        distArea = QgsDistanceArea()

        first = True
        features = vector.features(inLayer)
        total = 100.0 / len(features)
        for current, inFeat in enumerate(features):
            inGeom = inFeat.geometry()
            inID = str(inFeat.attributes()[inIdx])
            featList = index.nearestNeighbor(inGeom.asPoint(), nPoints)
            if first:
                first = False
                data = ['ID']
                for i in range(len(featList)):
                    data.append('DIST_{0}'.format(i + 1))
                self.writer.addRecord(data)

            data = [inID]
            for i in featList:
                request = QgsFeatureRequest().setFilterFid(i)
                outFeat = next(targetLayer.getFeatures(request))
                outGeom = outFeat.geometry()
                dist = distArea.measureLine(inGeom.asPoint(),
                                            outGeom.asPoint())
                data.append(str(float(dist)))
            self.writer.addRecord(data)

            progress.setPercentage(int(current * total))
예제 #25
0
    def grassPath():
        if not isWindows() and not isMac():
            return ''

        folder = ProcessingConfig.getSetting(GrassUtils.GRASS_FOLDER) or ''
        if not os.path.exists(folder):
            folder = None
        if folder is None:
            if isWindows():
                if "OSGEO4W_ROOT" in os.environ:
                    testfolder = os.path.join(str(os.environ['OSGEO4W_ROOT']), "apps")
                else:
                    testfolder = str(QgsApplication.prefixPath())
                testfolder = os.path.join(testfolder, 'grass')
                if os.path.isdir(testfolder):
                    for subfolder in os.listdir(testfolder):
                        if subfolder.startswith('grass-6'):
                            folder = os.path.join(testfolder, subfolder)
                            break
            else:
                folder = os.path.join(QgsApplication.prefixPath(), 'grass')
                if not os.path.isdir(folder):
                    folder = '/Applications/GRASS-6.4.app/Contents/MacOS'

        if folder:
            ProcessingConfig.setSettingValue(GrassUtils.GRASS_FOLDER, folder)
        return folder or ''
예제 #26
0
파일: Cover.py 프로젝트: Gustry/QGIS
 def processAlgorithm(self, feedback):
     commands = [os.path.join(FusionUtils.FusionPath(), "Cover.exe")]
     commands.append("/verbose")
     self.addAdvancedModifiersToCommand(commands)
     ground = self.getParameterValue(self.GROUND)
     if str(ground).strip() != "":
         commands.append("/ground:" + str(ground))
     outFile = self.getOutputValue(self.OUTPUT) + ".dtm"
     commands.append(outFile)
     commands.append(str(self.getParameterValue(self.HEIGHTBREAK)))
     commands.append(str(self.getParameterValue(self.CELLSIZE)))
     commands.append(self.UNITS[self.getParameterValue(self.XYUNITS)][0])
     commands.append(self.UNITS[self.getParameterValue(self.ZUNITS)][0])
     commands.append("0")
     commands.append("0")
     commands.append("0")
     commands.append("0")
     files = self.getParameterValue(self.INPUT).split(";")
     if len(files) == 1:
         commands.append(self.getParameterValue(self.INPUT))
     else:
         FusionUtils.createFileList(files)
         commands.append(FusionUtils.tempFileListFilepath())
     FusionUtils.runFusion(commands, feedback)
     commands = [os.path.join(FusionUtils.FusionPath(), "DTM2ASCII.exe")]
     commands.append(outFile)
     commands.append(self.getOutputValue(self.OUTPUT))
     p = subprocess.Popen(commands, shell=True)
     p.wait()
예제 #27
0
파일: ClipData.py 프로젝트: grizonnetm/QGIS
 def processAlgorithm(self, progress):
     commands = [os.path.join(FusionUtils.FusionPath(), 'ClipData.exe')]
     commands.append('/verbose')
     self.addAdvancedModifiersToCommand(commands)
     commands.append('/shape:' + str(self.getParameterValue(self.SHAPE)))
     dtm = self.getParameterValue(self.DTM)
     if dtm:
         commands.append('/dtm:' + str(dtm))
     height = self.getParameterValue(self.HEIGHT)
     if height:
         commands.append('/height')
     files = self.getParameterValue(self.INPUT).split(';')
     if len(files) == 1:
         commands.append(self.getParameterValue(self.INPUT))
     else:
         FusionUtils.createFileList(files)
         commands.append(FusionUtils.tempFileListFilepath())
     outFile = self.getOutputValue(self.OUTPUT)
     commands.append(outFile)
     extent = str(self.getParameterValue(self.EXTENT)).split(',')
     commands.append(extent[0])
     commands.append(extent[2])
     commands.append(extent[1])
     commands.append(extent[3])
     FusionUtils.runFusion(commands, progress)
예제 #28
0
def parseParameters(command):
    """
    Parse alg string to grab parameters value.
    Can handle quotes and comma.
    """
    pos = 0
    exp = re.compile(r"""(['"]?)(.*?)\1(,|$)""")
    while True:
        m = exp.search(command, pos)
        result = m.group(2)
        separator = m.group(3)

        # Handle special values:
        if result == 'None':
            result = None
        elif result.lower() == str(True).lower():
            result = True
        elif result.lower() == str(False).lower():
            result = False

        yield result

        if not separator:
            break

        pos = m.end(0)
예제 #29
0
파일: lasthinPro.py 프로젝트: naihil/QGIS
    def processAlgorithm(self, progress):
        commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasthin")]
        self.addParametersVerboseCommands(commands)
        self.addParametersPointInputFolderCommands(commands)
        self.addParametersIgnoreClass1Commands(commands)
        self.addParametersIgnoreClass2Commands(commands)
        step = self.getParameterValue(lasthinPro.THIN_STEP)
        if step != 0.0:
            commands.append("-step")
            commands.append(str(step))
        operation = self.getParameterValue(lasthinPro.OPERATION)
        if (operation != 0):
            commands.append("-" + self.OPERATIONS[operation])
        if (operation >= 4):
            commands.append(str(self.getParameterValue(lasthinPro.THRESHOLD_OR_INTERVAL)))
        if self.getParameterValue(lasthinPro.WITHHELD):
            commands.append("-withheld")
        if self.getParameterValue(lasthinPro.CLASSIFY_AS):
            commands.append("-classify_as")
            commands.append(str(self.getParameterValue(lasthinPro.CLASSIFY_AS_CLASS)))
        self.addParametersOutputDirectoryCommands(commands)
        self.addParametersOutputAppendixCommands(commands)
        self.addParametersPointOutputFormatCommands(commands)
        self.addParametersAdditionalCommands(commands)
        self.addParametersCoresCommands(commands)

        LAStoolsUtils.runLAStools(commands, progress)
예제 #30
0
파일: GridAverage.py 프로젝트: exlimit/QGIS
    def getConsoleCommands(self, parameters, context, feedback):
        arguments = ['-l']
        arguments.append(
            os.path.basename(os.path.splitext(
                str(self.getParameterValue(self.INPUT)))[0]))

        fieldName = self.getParameterValue(self.Z_FIELD)
        if fieldName is not None and fieldName != '':
            arguments.append('-zfield')
            arguments.append(fieldName)

        params = 'average'
        params += ':radius1=%s' % self.getParameterValue(self.RADIUS_1)
        params += ':radius2=%s' % self.getParameterValue(self.RADIUS_2)
        params += ':angle=%s' % self.getParameterValue(self.ANGLE)
        params += ':min_points=%s' % self.getParameterValue(self.MIN_POINTS)
        params += ':nodata=%s' % self.getParameterValue(self.NODATA)

        arguments.append('-a')
        arguments.append(params)
        arguments.append('-ot')
        arguments.append(self.TYPE[self.getParameterValue(self.RTYPE)])
        arguments.append(str(self.getParameterValue(self.INPUT)))
        arguments.append(str(self.getOutputValue(self.OUTPUT)))

        return ['gdal_grid', GdalUtils.escapeAndJoin(arguments)]
 def test_pts_compressed(self):
     public_key = PublicKey(key["public_key"])
     address = Address(address=None, pubkey=repr(public_key))
     self.assertEqual(str(key["Compressed_PTS"]), (format(address.derive256address_with_version(56), "STM")))
예제 #32
0
def parallel_dot_product(A, b, block_size=20000, dview=None, transpose=False):
    ''' Chunk matrix product between matrix and column vectors 
    A: memory mapped ndarray
        pixels x time
    b: time x comps   
    '''

    import pickle
    pars = []
    d1, d2 = np.shape(A)
    b = pickle.dumps(b)
    print('parallel dot product block size: ' + str(block_size))

    if block_size < d1:

        for idx in range(0, d1 - block_size, block_size):

            idx_to_pass = list(range(idx, idx + block_size))
            pars.append([A.filename, idx_to_pass, b, transpose])

        if (idx + block_size) < d1:

            idx_to_pass = list(range(idx + block_size, d1))
            pars.append([A.filename, idx_to_pass, b, transpose])

    else:

        idx_to_pass = list(range(d1))
        pars.append([A.filename, idx_to_pass, b, transpose])

    print('Start product')

    if dview is None:

        #        results = list(map(dot_place_holder,pars))
        if transpose:
            b = pickle.loads(b)
            print('Transposing')
            output = np.zeros((d2, np.shape(b)[-1]))
            for counts, pr in enumerate(pars):

                #                print(counts)
                iddx, rs = dot_place_holder(pr)
                output = output + rs

        else:
            b = pickle.loads(b)
            output = np.zeros((d1, np.shape(b)[-1]))
            for counts, pr in enumerate(pars):

                #                print(counts)
                iddx, rs = dot_place_holder(pr)
                output[iddx] = rs

    else:

        #        results = dview.map_sync(dot_place_holder,pars)
        results = dview.map_sync(dot_place_holder, pars)

        b = pickle.loads(b)
        if transpose:
            print('Transposing')
            output = np.zeros((d2, np.shape(b)[-1]))
            for res in results:
                output = output + res[1]

        else:
            print('Filling')
            output = np.zeros((d1, np.shape(b)[-1]))
            for res in results:
                output[res[0]] = res[1]

    return output
예제 #33
0
def save_memmap_chunks(filename,
                       base_name='Yr',
                       resize_fact=(1, 1, 1),
                       remove_init=0,
                       idx_xy=None,
                       order='F',
                       xy_shifts=None,
                       is_3D=False,
                       add_to_movie=0,
                       border_to_0=0,
                       n_chunks=1):
    """ Saves efficiently a list of tif files into a memory mappable file
    Parameters
    ----------
        filenames: list
            list of tif files
        base_name: str
            the base used to build the file name. IT MUST NOT CONTAIN "_"    
        resize_fact: tuple
            x,y, and z downampling factors (0.5 means downsampled by a factor 2) 
        remove_init: int
            number of frames to remove at the begining of each tif file (used for resonant scanning images if laser in rutned on trial by trial)
        idx_xy: tuple size 2 [or 3 for 3D data]
            for selecting slices of the original FOV, for instance idx_xy=(slice(150,350,None),slice(150,350,None))
        order: string
            whether to save the file in 'C' or 'F' order     
        xy_shifts: list 
            x and y shifts computed by a motion correction algorithm to be applied before memory mapping    

        is_3D: boolean
            whether it is 3D data
    Return
    -------
        fname_new: the name of the mapped file, the format is such that the name will contain the frame dimensions and the number of f

    """

    #TODO: can be done online
    print(filename)

    Yr = cm.load(filename, fr=1)

    T, dims = Yr.shape[0], Yr.shape[1:]
    step = np.int(old_div(T, n_chunks))
    bins = []

    for i in range(0, T, step):
        bins.append(i)
    bins.append(T)

    for j in range(0, len(bins) - 1):
        tmp = np.array(Yr[bins[j]:bins[j + 1], :, :])
        if xy_shifts is not None:
            tmp = tmp.apply_shifts(xy_shifts,
                                   interpolation='cubic',
                                   remove_blanks=False)

        if idx_xy is None:
            if remove_init > 0:
                tmp = np.array(tmp)[remove_init:]
        elif len(idx_xy) == 2:
            tmp = np.array(tmp)[remove_init:, idx_xy[0], idx_xy[1]]
        else:
            raise Exception('You need to set is_3D=True for 3D data)')
            tmp = np.array(tmp)[remove_init:, idx_xy[0], idx_xy[1], idx_xy[2]]

        if border_to_0 > 0:
            min_mov = np.nanmin(tmp)
            tmp[:, :border_to_0, :] = min_mov
            tmp[:, :, :border_to_0] = min_mov
            tmp[:, :, -border_to_0:] = min_mov
            tmp[:, -border_to_0:, :] = min_mov

        fx, fy, fz = resize_fact
        if fx != 1 or fy != 1 or fz != 1:

            tmp = cm.movie(tmp, fr=1)
            tmp = Yr.resize(fx=fx, fy=fy, fz=fz)

        Tc, dimsc = tmp.shape[0], tmp.shape[1:]
        tmp = np.transpose(tmp, list(range(1, len(dimsc) + 1)) + [0])
        tmp = np.reshape(tmp, (np.prod(dimsc), Tc), order='F')

        if j == 0:
            fname_tot = base_name + '_d1_' + str(dims[0]) + '_d2_' + str(
                dims[1]) + '_d3_' + str(
                    1 if len(dims) == 2 else dims[2]) + '_order_' + str(order)
            fname_tot = os.path.join(os.path.split(filename)[0], fname_tot)
            big_mov = np.memmap(fname_tot,
                                mode='w+',
                                dtype=np.float32,
                                shape=(np.prod(dims), T),
                                order=order)
        else:
            big_mov = np.memmap(fname_tot,
                                dtype=np.float32,
                                mode='r+',
                                shape=(np.prod(dims), T),
                                order=order)
        #    np.save(fname[:-3]+'npy',np.asarray(Yr))

        big_mov[:, bins[j]:bins[j + 1]] = np.asarray(
            tmp, dtype=np.float32) + 1e-10 + add_to_movie
        big_mov.flush()
        del big_mov

#    if ref+step+1<d:
#        print 'running on remaining pixels:' + str(ref+step-d)
#        pars.append([fname_tot,d,tot_frames,mmap_fnames,ref+step,d])

    fname_new = fname_tot + '_frames_' + str(T) + '_.mmap'
    os.rename(fname_tot, fname_new)

    return fname_new
예제 #34
0
def save_memmap(filenames,
                base_name='Yr',
                resize_fact=(1, 1, 1),
                remove_init=0,
                idx_xy=None,
                order='F',
                xy_shifts=None,
                is_3D=False,
                add_to_movie=0,
                border_to_0=0,
                save_dir=None):
    """ Saves efficiently a list of tif files into a memory mappable file
    Parameters
    ----------
        filenames: list
            list of tif files
        base_name: str
            the base used to build the file name. IT MUST NOT CONTAIN "_"    
        resize_fact: tuple
            x,y, and z downampling factors (0.5 means downsampled by a factor 2) 
        remove_init: int
            number of frames to remove at the begining of each tif file (used for resonant scanning images if laser in rutned on trial by trial)
        idx_xy: tuple size 2 [or 3 for 3D data]
            for selecting slices of the original FOV, for instance idx_xy=(slice(150,350,None),slice(150,350,None))
        order: string
            whether to save the file in 'C' or 'F' order     
        xy_shifts: list 
            x and y shifts computed by a motion correction algorithm to be applied before memory mapping    

        is_3D: boolean
            whether it is 3D data
    Return
    -------
        fname_new: the name of the mapped file, the format is such that the name will contain the frame dimensions and the number of f

    """

    #TODO: can be done online
    Ttot = 0
    for idx, f in enumerate(filenames):
        print(f)

        if is_3D:
            import tifffile
            #            print("Using tifffile library instead of skimage because of  3D")

            if idx_xy is None:
                Yr = tifffile.imread(f)[remove_init:]
            elif len(idx_xy) == 2:
                Yr = tifffile.imread(f)[remove_init:, idx_xy[0], idx_xy[1]]
            else:
                Yr = tifffile.imread(f)[remove_init:, idx_xy[0], idx_xy[1],
                                        idx_xy[2]]


#        elif :
#
#            if xy_shifts is not None:
#                raise Exception('Calblitz not installed, you cannot motion correct')
#
#            if idx_xy is None:
#                Yr = imread(f)[remove_init:]
#            elif len(idx_xy) == 2:
#                Yr = imread(f)[remove_init:, idx_xy[0], idx_xy[1]]
#            else:
#                raise Exception('You need to set is_3D=True for 3D data)')

        else:

            Yr = cm.load(f, fr=1, in_memory=True)
            if xy_shifts is not None:
                Yr = Yr.apply_shifts(xy_shifts,
                                     interpolation='cubic',
                                     remove_blanks=False)

            if idx_xy is None:
                if remove_init > 0:
                    Yr = np.array(Yr)[remove_init:]
            elif len(idx_xy) == 2:
                Yr = np.array(Yr)[remove_init:, idx_xy[0], idx_xy[1]]
            else:
                raise Exception('You need to set is_3D=True for 3D data)')
                Yr = np.array(Yr)[remove_init:, idx_xy[0], idx_xy[1],
                                  idx_xy[2]]

        if border_to_0 > 0:

            min_mov = Yr.calc_min()
            Yr[:, :border_to_0, :] = min_mov
            Yr[:, :, :border_to_0] = min_mov
            Yr[:, :, -border_to_0:] = min_mov
            Yr[:, -border_to_0:, :] = min_mov

        fx, fy, fz = resize_fact
        if fx != 1 or fy != 1 or fz != 1:

            if 'movie' not in str(type(Yr)):
                Yr = cm.movie(Yr, fr=1)

            Yr = Yr.resize(fx=fx, fy=fy, fz=fz)

        T, dims = Yr.shape[0], Yr.shape[1:]
        Yr = np.transpose(Yr, list(range(1, len(dims) + 1)) + [0])
        Yr = np.reshape(Yr, (np.prod(dims), T), order='F')

        if idx == 0:
            fname_tot = base_name + '_d1_' + str(dims[0]) + '_d2_' + str(
                dims[1]) + '_d3_' + str(
                    1 if len(dims) == 2 else dims[2]) + '_order_' + str(order)

            if save_dir is None:
                fname_tot = os.path.join(os.path.split(f)[0], fname_tot)
            else:
                fname_tot = os.path.join(save_dir, fname_tot)

            big_mov = np.memmap(fname_tot,
                                mode='w+',
                                dtype=np.float32,
                                shape=(np.prod(dims), T),
                                order=order)
        else:
            big_mov = np.memmap(fname_tot,
                                dtype=np.float32,
                                mode='r+',
                                shape=(np.prod(dims), Ttot + T),
                                order=order)
        #    np.save(fname[:-3]+'npy',np.asarray(Yr))

        big_mov[:, Ttot:Ttot +
                T] = np.asarray(Yr, dtype=np.float32) + 1e-10 + add_to_movie
        big_mov.flush()
        del big_mov
        Ttot = Ttot + T

    fname_new = fname_tot + '_frames_' + str(Ttot) + '_.mmap'
    os.rename(fname_tot, fname_new)

    return fname_new
예제 #35
0
    for f in mmap_fnames:

        Yr, dims, T = load_memmap(f)

        print((f, T))

        tot_frames += T

        del Yr

    d = np.prod(dims)

    if base_name is None:

        base_name = mmap_fnames[0]
        base_name = base_name[:base_name.find('_d1_')] + '-#-' + str(
            len(mmap_fnames))

    fname_tot = base_name + '_d1_' + str(dims[0]) + '_d2_' + str(
        dims[1]) + '_d3_' + str(
            1 if len(dims) == 2 else dims[2]) + '_order_' + str(
                order) + '_frames_' + str(tot_frames) + '_.mmap'
    fname_tot = os.path.join(os.path.split(mmap_fnames[0])[0], fname_tot)

    print(fname_tot)

    big_mov = np.memmap(fname_tot,
                        mode='w+',
                        dtype=np.float32,
                        shape=(d, tot_frames),
                        order='C')
예제 #36
0
파일: _base.py 프로젝트: JingtaiL/psychopy
    def writeParamUpdate(self, buff, compName, paramName, val, updateType,
                         params=None, target="PsychoPy"):
        """Writes an update string for a single parameter.
        This should not need overriding for different components - try to keep
        constant
        """
        if params is None:
            params = self.params
        # first work out the name for the set____() function call
        if paramName == 'advancedParams':
            return  # advancedParams is not really a parameter itself
        elif paramName == 'letterHeight':
            paramCaps = 'Height'  # setHeight for TextStim
        elif paramName == 'image' and self.getType() == 'PatchComponent':
            paramCaps = 'Tex'  # setTex for PatchStim
        elif paramName == 'sf':
            paramCaps = 'SF'  # setSF, not SetSf
        elif paramName == 'coherence':
            paramCaps = 'FieldCoherence'
        elif paramName == 'fieldPos':
            paramCaps = 'FieldPos'
        else:
            paramCaps = paramName[0].capitalize() + paramName[1:]

        # code conversions for PsychoJS
        if target == 'PsychoJS':
            endStr = ';'
            # convert (0,0.5) to [0,0.5] but don't convert "rand()" to "rand[]"
            valStr = str(val).strip()
            if valStr.startswith("(") and valStr.endswith(")"):
                valStr = valStr.replace("(", "[", 1)
                valStr = valStr[::-1].replace(")", "]", 1)[
                         ::-1]  # replace from right
            # filenames (e.g. for image) need to be loaded from resources
            if paramName in ["sound"]:
                valStr = (
                    "psychoJS.resourceManager.getResource({})".format(valStr))
        else:
            endStr = ''

        # then write the line
        if updateType == 'set every frame' and target == 'PsychoPy':
            loggingStr = ', log=False'
        else:
            loggingStr = ''

        if target == 'PsychoPy':
            if paramName == 'color':
                buff.writeIndented("%s.setColor(%s, colorSpace=%s" %
                                   (compName, params['color'],
                                    params['colorSpace']))
                buff.write("%s)%s\n" % (loggingStr, endStr))
            elif paramName == 'sound':
                stopVal = params['stopVal'].val
                if stopVal in ['', None, -1, 'None']:
                    stopVal = '-1'
                buff.writeIndented("%s.setSound(%s, secs=%s)%s\n" %
                                   (compName, params['sound'], stopVal, endStr))
            else:
                buff.writeIndented("%s.set%s(%s%s)%s\n" %
                                   (compName, paramCaps, val, loggingStr,
                                    endStr))
        elif target == 'PsychoJS':
            # write the line
            if paramName == 'color':
                buff.writeIndented("%s.setColor(new util.Color(%s)" % (
                    compName, params['color']))
                buff.write("%s)%s\n" % (loggingStr, endStr))
            elif paramName == 'fillColor':
                buff.writeIndented("%s.setFillColor(new util.Color(%s)" % (compName, params['fillColor']))
                buff.write("%s)%s\n" % (loggingStr, endStr))
            elif paramName == 'sound':
                stopVal = params['stopVal']
                if stopVal in ['', None, -1, 'None']:
                    stopVal = '-1'
                buff.writeIndented("%s.setSound(%s, secs=%s)%s\n" %
                                   (compName, params['sound'], stopVal, endStr))
            else:
                buff.writeIndented("%s.set%s(%s%s)%s\n" %
                                   (compName, paramCaps, val, loggingStr,
                                    endStr))
    import sys
    import argparse
    parser = argparse.ArgumentParser()

    parser.add_argument('-c','--correction',required=True)
#    parser.add_argument('-G','--genomesize',type=float)
    parser.add_argument('-m','--max',type=int,default=1000000)
    parser.add_argument('-d','--debug',action="store_true")
    parser.add_argument('-p','--progress',action="store_true")

    args = parser.parse_args()
    if args.debug:
        args.progress=True

    if args.progress: print("#",str(args)) 
    sys.stdout.flush()
    correction={}
    G=0.0
    if args.correction:
        f = open(args.correction)
        while True:
            l = f.readline()
            if not l: break
            if l[0]=="#": continue

            c=l.strip().split()
            correction[int(c[0])]=float(c[1])

        f.close()
 def test_calc_pub_key(self):
     private_key = PrivateKey(key["private_key"])
     public_key = private_key.pubkey
     self.assertEqual(key["bts_address"], str(public_key.address))
 def test_import_export(self):
     public_key = PublicKey(key["public_key"])
     self.assertEqual(key["public_key"], str(public_key))
 def test_btc_uncompressed(self):
     public_key = PublicKey(key["public_key"])
     address = Address(address=None, pubkey=public_key.unCompressed())
     self.assertEqual(str(key["Uncompressed_BTC"]), (format(address.derive256address_with_version(0), "STM")))
 def test_public_from_private(self):
     private_key = PrivateKey(key["private_key"])
     public_key = private_key.get_public_key()
     self.assertEqual(key["public_key"], str(public_key))
 def test_to_wif(self):
     private_key = PrivateKey(key["private_key"])
     self.assertEqual(key["private_key_WIF_format"], str(private_key))
예제 #43
0
 def createAlgsList(self):
     algs = []
     folder = Grass7Utils.grassDescriptionPath()
     for descriptionFile in os.listdir(folder):
         if descriptionFile.endswith('txt'):
             try:
                 alg = Grass7Algorithm(os.path.join(folder, descriptionFile))
                 if alg.name().strip() != '':
                     algs.append(alg)
                 else:
                     QgsMessageLog.logMessage(self.tr('Could not open GRASS GIS 7 algorithm: {0}').format(descriptionFile), self.tr('Processing'), QgsMessageLog.CRITICAL)
             except Exception as e:
                 QgsMessageLog.logMessage(
                     self.tr('Could not open GRASS GIS 7 algorithm: {0}\n{1}').format(descriptionFile, str(e)), self.tr('Processing'), QgsMessageLog.CRITICAL)
     #algs.append(nviz7())
     return algs
 def test_short_address(self):
     public_key = PublicKey(key["public_key"])
     self.assertEqual(key["bts_address"], str(public_key.address))
예제 #45
0
def overlapping_plot(old_results_filename,
                     new_results,
                     boxplot_filename,
                     visualize=True):
    """
    Plot the overlaping results of 14 old appraoch and the proposed appraoch
    :param old_results_filename: Old results stored in .mat format file
    :param new_results: Dictionary that contains the new results
    :return:
    """

    if old_results_filename is not None:
        old_results = sio.loadmat(old_results_filename)
    else:
        old_results = None

    # combine old names with proposed method
    compound_names = []

    if old_results is not None:
        for item in old_results['direc_name']:
            compound_names.append(str(item[0])[3:-2])

    compound_names.append('Proposed')

    # new results may only have a subset of the results
    if old_results is not None:
        old_results_selected = old_results['results'][
            new_results['ind'], :]  # select the desired rows

        # combine data
        compound_results = np.concatenate(
            (old_results_selected, np.array(
                new_results['mean_target_overlap']).reshape(-1, 1)),
            axis=1)
    else:
        compound_results = np.array(
            new_results['mean_target_overlap']).reshape(-1, 1)

    # create a figure instance
    fig = plt.figure(1, figsize=(8, 6))

    # create an axes instance
    ax = fig.add_subplot(111)

    # set axis tick
    ax.set_axisbelow(True)
    ax.yaxis.grid(True,
                  linestyle='-',
                  which='major',
                  color='lightgrey',
                  alpha=0.5)
    ax.yaxis.set_tick_params(left=True, direction='in', width=1)
    ax.yaxis.set_tick_params(right=True, direction='in', width=1)
    ax.xaxis.set_tick_params(top=False, direction='in', width=1)
    ax.xaxis.set_tick_params(bottom=False, direction='in', width=1)

    # create the boxplot
    bp = plt.boxplot(compound_results,
                     vert=True,
                     whis=1.5,
                     meanline=True,
                     widths=0.16,
                     showfliers=True,
                     showcaps=False,
                     patch_artist=True,
                     labels=compound_names)

    # rotate x labels
    for tick in ax.get_xticklabels():
        tick.set_rotation(90)

    # set properties of boxes, medians, whiskers, fliers
    plt.setp(bp['medians'], color='orange')
    plt.setp(bp['boxes'], color='blue')
    # plt.setp(bp['caps'], color='b')
    plt.setp(bp['whiskers'], linestyle='-', color='blue')
    plt.setp(bp['fliers'], marker='o', markersize=5, markeredgecolor='blue')

    # matplotlib.rcParams['ytick.direction'] = 'in'
    # matplotlib.rcParams['xtick.direction'] = 'inout'

    # setup font
    font = {'family': 'normal', 'weight': 'semibold', 'size': 10}
    matplotlib.rc('font', **font)

    # set the line width of the figure
    for axis in ['top', 'bottom', 'left', 'right']:
        ax.spines[axis].set_linewidth(2)

    # set the range of the overlapping rate
    plt.ylim([0, 1.0])

    # add two lines to represent the lower quartile and upper quartile
    lower_quartile = np.percentile(compound_results[:, -1], 25)
    upper_quartile = np.percentile(compound_results[:, -1], 75)
    ax.axhline(lower_quartile, ls='-', color='r', linewidth=1)
    ax.axhline(upper_quartile, ls='-', color='r', linewidth=1)

    # set the target box to red color
    bp['boxes'][-1].set(color='red')
    bp['boxes'][-1].set(facecolor='red')
    bp['whiskers'][-1].set(color='red')
    bp['whiskers'][-2].set(color='red')
    bp['fliers'][-1].set(color='red', markeredgecolor='red')

    # save figure
    if boxplot_filename is not None:
        print('Saving boxplot to : ' + boxplot_filename)
        plt.savefig(boxplot_filename, dpi=1000, bbox_inches='tight')

    # show figure
    if visualize:
        plt.show()
예제 #46
0
 def test_enumgroup_unicode(self):
     name = 'Yes / No'
     e = EnumGroup.objects.create(name=name)
     self.assertEqual(str(e), name)
예제 #47
0
def train_rcnn(network, dataset, image_set, root_path, dataset_path,
               frequent, kvstore, work_load_list, no_flip, no_shuffle, resume,
               ctx, pretrained, epoch, prefix, begin_epoch, end_epoch,
               train_shared, lr, lr_step, proposal):
    # set up logger
    logging.basicConfig()
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    # set up config
    config.TRAIN.BATCH_IMAGES = 2
    config.TRAIN.BATCH_ROIS = 128
    if proposal == 'ss':
        config.TRAIN.BG_THRESH_LO = 0.1  # reproduce Fast R-CNN

    # load symbol
    sym = eval('get_' + network + '_rcnn')(num_classes=config.NUM_CLASSES)

    # setup multi-gpu
    batch_size = len(ctx)
    input_batch_size = config.TRAIN.BATCH_IMAGES * batch_size

    # print config
    pprint.pprint(config)

    # load dataset and prepare imdb for training
    image_sets = [iset for iset in image_set.split('+')]
    roidbs = [load_proposal_roidb(dataset, image_set, root_path, dataset_path,
                                  proposal=proposal, append_gt=True, flip=not no_flip)
              for image_set in image_sets]
    roidb = merge_roidb(roidbs)
    roidb = filter_roidb(roidb)
    means, stds = add_bbox_regression_targets(roidb)

    # load training data
    train_data = ROIIter(roidb, batch_size=input_batch_size, shuffle=not no_shuffle,
                         ctx=ctx, work_load_list=work_load_list, aspect_grouping=config.TRAIN.ASPECT_GROUPING)

    # infer max shape
    max_data_shape = [('data', (input_batch_size, 3, max([v[0] for v in config.SCALES]), max([v[1] for v in config.SCALES])))]

    # infer shape
    data_shape_dict = dict(train_data.provide_data + train_data.provide_label)
    arg_shape, out_shape, aux_shape = sym.infer_shape(**data_shape_dict)
    arg_shape_dict = dict(list(zip(sym.list_arguments(), arg_shape)))
    out_shape_dict = dict(list(zip(sym.list_outputs(), out_shape)))
    aux_shape_dict = dict(list(zip(sym.list_auxiliary_states(), aux_shape)))
    print('output shape')
    pprint.pprint(out_shape_dict)

    # load and initialize params
    if resume:
        arg_params, aux_params = load_param(prefix, begin_epoch, convert=True)
    else:
        arg_params, aux_params = load_param(pretrained, epoch, convert=True)
        arg_params['cls_score_weight'] = mx.random.normal(0, 0.01, shape=arg_shape_dict['cls_score_weight'])
        arg_params['cls_score_bias'] = mx.nd.zeros(shape=arg_shape_dict['cls_score_bias'])
        arg_params['bbox_pred_weight'] = mx.random.normal(0, 0.001, shape=arg_shape_dict['bbox_pred_weight'])
        arg_params['bbox_pred_bias'] = mx.nd.zeros(shape=arg_shape_dict['bbox_pred_bias'])

    # check parameter shapes
    for k in sym.list_arguments():
        if k in data_shape_dict:
            continue
        assert k in arg_params, k + ' not initialized'
        assert arg_params[k].shape == arg_shape_dict[k], \
            'shape inconsistent for ' + k + ' inferred ' + str(arg_shape_dict[k]) + ' provided ' + str(arg_params[k].shape)
    for k in sym.list_auxiliary_states():
        assert k in aux_params, k + ' not initialized'
        assert aux_params[k].shape == aux_shape_dict[k], \
            'shape inconsistent for ' + k + ' inferred ' + str(aux_shape_dict[k]) + ' provided ' + str(aux_params[k].shape)

    # prepare training
    # create solver
    data_names = [k[0] for k in train_data.provide_data]
    label_names = [k[0] for k in train_data.provide_label]
    if train_shared:
        fixed_param_prefix = config.FIXED_PARAMS_SHARED
    else:
        fixed_param_prefix = config.FIXED_PARAMS
    mod = MutableModule(sym, data_names=data_names, label_names=label_names,
                        logger=logger, context=ctx, work_load_list=work_load_list,
                        max_data_shapes=max_data_shape, fixed_param_prefix=fixed_param_prefix)

    # decide training params
    # metric
    eval_metric = metric.RCNNAccMetric()
    cls_metric = metric.RCNNLogLossMetric()
    bbox_metric = metric.RCNNL1LossMetric()
    eval_metrics = mx.metric.CompositeEvalMetric()
    for child_metric in [eval_metric, cls_metric, bbox_metric]:
        eval_metrics.add(child_metric)
    # callback
    batch_end_callback = callback.Speedometer(train_data.batch_size, frequent=frequent)
    epoch_end_callback = callback.do_checkpoint(prefix, means, stds)
    # decide learning rate
    base_lr = lr
    lr_factor = 0.1
    lr_epoch = [int(epoch) for epoch in lr_step.split(',')]
    lr_epoch_diff = [epoch - begin_epoch for epoch in lr_epoch if epoch > begin_epoch]
    lr = base_lr * (lr_factor ** (len(lr_epoch) - len(lr_epoch_diff)))
    lr_iters = [int(epoch * len(roidb) / batch_size) for epoch in lr_epoch_diff]
    print('lr', lr, 'lr_epoch_diff', lr_epoch_diff, 'lr_iters', lr_iters)
    lr_scheduler = mx.lr_scheduler.MultiFactorScheduler(lr_iters, lr_factor)
    # optimizer
    optimizer_params = {'momentum': 0.9,
                        'wd': 0.0005,
                        'learning_rate': lr,
                        'lr_scheduler': lr_scheduler,
                        'rescale_grad': (old_div(1.0, batch_size)),
                        'clip_gradient': 5}

    # train
    mod.fit(train_data, eval_metric=eval_metrics, epoch_end_callback=epoch_end_callback,
            batch_end_callback=batch_end_callback, kvstore=kvstore,
            optimizer='sgd', optimizer_params=optimizer_params,
            arg_params=arg_params, aux_params=aux_params, begin_epoch=begin_epoch, num_epoch=end_epoch)
예제 #48
0
 def getValue(self):
     if str(self.leText.text()).strip() != '':
         return str(self.leText.text())
     else:
         return None
예제 #49
0
def chart_html(driver_name, chart_type, source, chart_options=None, fields='*', refresh_interval=0,
               refresh_data=None, control_defaults=None, control_ids=None, schema=None):
  """ Return HTML for a chart.

  Args:
    driver_name: the name of the chart driver. Currently we support 'plotly' or 'gcharts'.
    chart_type: string specifying type of chart.
    source: the data source for the chart. Can be actual data (e.g. list) or the name of
        a data source (e.g. the name of a query module).
    chart_options: a dictionary of options for the chart. Can contain a 'controls' entry
        specifying controls. Other entries are passed as JSON to Google Charts.
    fields: the fields to chart. Can be '*' for all fields (only sensible if the columns are
        ordered; e.g. a Query or list of lists, but not a list of dictionaries); otherwise a
        string containing a comma-separated list of field names.
    refresh_interval: a time in seconds after which the chart data will be refreshed. 0 if the
        chart should not be refreshed (i.e. the data is static).
    refresh_data: if the source is a list or other raw data, this is a YAML string containing
        metadata needed to support calls to refresh (get_chart_data).
    control_defaults: the default variable values for controls that are shared across charts
        including this one.
    control_ids: the DIV IDs for controls that are shared across charts including this one.
    schema: an optional schema for the data; if not supplied one will be inferred.

  Returns:
    A string containing the HTML for the chart.

  """
  div_id = _html.Html.next_id()
  controls_html = ''
  if control_defaults is None:
    control_defaults = {}
  if control_ids is None:
    control_ids = []
  if chart_options is not None and 'variables' in chart_options:
    controls = chart_options['variables']
    del chart_options['variables']  # Just to make sure GCharts doesn't see them.
    controls_html, defaults, ids = parse_control_options(controls)
    # We augment what we are passed so that in principle we can have controls that are
    # shared by charts as well as controls that are specific to a chart.
    control_defaults.update(defaults)
    control_ids.extend(ids),

  _HTML_TEMPLATE = """
    <div class="bqgc-container">
      {controls}
      <div class="bqgc {extra_class}" id="{id}">
      </div>
    </div>
    <script>

      require.config({{
        paths: {{
          d3: '//cdnjs.cloudflare.com/ajax/libs/d3/3.4.13/d3',
          plotly: 'https://cdn.plot.ly/plotly-1.5.1.min.js?noext',
          jquery: '//ajax.googleapis.com/ajax/libs/jquery/2.0.0/jquery.min'
        }},
        map: {{
          '*': {{
            datalab: 'nbextensions/gcpdatalab'
          }}
        }},
        shim: {{
          plotly: {{
            deps: ['d3', 'jquery'],
            exports: 'plotly'
          }}
        }}
      }});

      require(['datalab/charting',
               'datalab/element!{id}',
               'base/js/events',
               'datalab/style!/nbextensions/gcpdatalab/charting.css'
              ],
        function(charts, dom, events) {{
          charts.render(
              '{driver}',
              dom,
              events,
              '{chart_type}',
              {control_ids},
              {data},
              {options},
              {refresh_data},
              {refresh_interval},
              {total_rows});
          }}
        );
    </script>
  """
  count = 25 if chart_type == 'paged_table' else -1
  data, total_count = get_data(source, fields, control_defaults, 0, count, schema)
  if refresh_data is None:
    if isinstance(source, basestring):
      source_index = get_data_source_index(source)
      refresh_data = {'source_index': source_index, 'name': source_index}
    else:
      refresh_data = {'name': 'raw data'}
  refresh_data['fields'] = fields

  # TODO(gram): check if we need to augment env with user_ns
  return _HTML_TEMPLATE \
      .format(driver=driver_name,
              controls=controls_html,
              id=div_id,
              chart_type=chart_type,
              extra_class=" bqgc-controlled" if len(controls_html) else '',
              data=json.dumps(data, cls=google.datalab.utils.JSONEncoder),
              options=json.dumps(chart_options, cls=google.datalab.utils.JSONEncoder),
              refresh_data=json.dumps(refresh_data, cls=google.datalab.utils.JSONEncoder),
              refresh_interval=refresh_interval,
              control_ids=str(control_ids),
              total_rows=total_count)
예제 #50
0
        mean_result, single_results = calculate_image_overlap(
            validation_datasets[validation_dataset_name],
            current_map_filename,
            current_source_labelmap_filename,
            current_target_labelmap_filename,
            current_warped_labelmap_filename,
            source_id,
            target_id,
            use_sym_links=not args.do_not_use_symlinks)

        validation_results['source_id'].append(source_id)
        validation_results['target_id'].append(target_id)
        validation_results['mean_target_overlap'].append(mean_result)
        validation_results['single_results'].append(single_results)

        print('mean label overlap for ' + str(source_id) + ' -> ' +
              str(target_id) + ': ' + str(mean_result))
        if save_results:
            res_file.write(
                str(source_id) + ', ' + str(target_id) + ', ' +
                str(mean_result) + '\n')

    mean_target_overlap_results = np.array(
        validation_results['mean_target_overlap'])
    print('\nOverall results:')
    print('min = ' + str(mean_target_overlap_results.min()))
    print('max = ' + str(mean_target_overlap_results.max()))
    print('mean = ' + str(mean_target_overlap_results.mean()))
    print('median = ' + str(np.percentile(mean_target_overlap_results, 50)))
    print('\n')
예제 #51
0
    def processAlgorithm(self, context, feedback):
        layer = self.getParameterValue(self.INPUT_LAYER)
        mapping = self.getParameterValue(self.FIELDS_MAPPING)
        output = self.getOutputFromName(self.OUTPUT_LAYER)

        layer = dataobjects.getLayerFromString(layer)
        fields = []
        expressions = []

        da = QgsDistanceArea()
        da.setSourceCrs(layer.crs())
        da.setEllipsoid(QgsProject.instance().ellipsoid())

        exp_context = layer.createExpressionContext()

        for field_def in mapping:
            fields.append(
                QgsField(name=field_def['name'],
                         type=field_def['type'],
                         len=field_def['length'],
                         prec=field_def['precision']))

            expression = QgsExpression(field_def['expression'])
            expression.setGeomCalculator(da)
            expression.setDistanceUnits(QgsProject.instance().distanceUnits())
            expression.setAreaUnits(QgsProject.instance().areaUnits())
            expression.prepare(exp_context)
            if expression.hasParserError():
                raise GeoAlgorithmExecutionException(
                    self.tr(u'Parser error in expression "{}": {}').format(
                        str(expression.expression()),
                        str(expression.parserErrorString())))
            expressions.append(expression)

        writer = output.getVectorWriter(fields, layer.wkbType(), layer.crs(),
                                        context)

        # Create output vector layer with new attributes
        error_exp = None
        inFeat = QgsFeature()
        outFeat = QgsFeature()
        features = QgsProcessingUtils.getFeatures(layer, context)
        count = QgsProcessingUtils.featureCount(layer, context)
        if count > 0:
            total = 100.0 / count
            for current, inFeat in enumerate(features):
                rownum = current + 1

                geometry = inFeat.geometry()
                outFeat.setGeometry(geometry)

                attrs = []
                for i in range(0, len(mapping)):
                    field_def = mapping[i]
                    expression = expressions[i]
                    exp_context.setFeature(inFeat)
                    exp_context.lastScope().setVariable("row_number", rownum)
                    value = expression.evaluate(exp_context)
                    if expression.hasEvalError():
                        error_exp = expression
                        break

                    attrs.append(value)
                outFeat.setAttributes(attrs)

                writer.addFeature(outFeat)

                feedback.setProgress(int(current * total))
        else:
            feedback.setProgress(100)

        del writer

        if error_exp is not None:
            raise GeoAlgorithmExecutionException(
                self.tr(u'Evaluation error in expression "{}": {}').format(
                    str(error_exp.expression()),
                    str(error_exp.parserErrorString())))
예제 #52
0
파일: mixins.py 프로젝트: einarfelix/xl
 def mother_tongues(self, ar):
     self.load_language_knowledge()
     return ' '.join([str(lng) for lng in self._mother_tongues])
예제 #53
0
    def download(self, engine=None, debug=False):
        Script.download(self, engine, debug)

        engine = self.engine
        csv_files = []
        request_src = "http://www.data-retriever.org/"
        base_url = "http://www.usanpn.org/npn_portal/observations/getObservations.xml?start_date={startYear}&end_date={endYear_date}&request_src={request_src}"
        header_values = ["observation_id",
                         "update_datetime",
                         "site_id",
                         "latitude",
                         "longitude",
                         "elevation_in_meters",
                         "state",
                         "species_id",
                         "genus",
                         "species",
                         "common_name",
                         "kingdom",
                         "individual_id",
                         "phenophase_id",
                         "phenophase_description",
                         "observation_date",
                         "day_of_year",
                         "phenophase_status",
                         "intensity_category_id",
                         "intensity_value",
                         "abundance_value"
                         ]

        columns = [("record_id", ("pk-auto",)),
                   ("observation_id", ("int",)),  # subsequently refered to as "status record"
                   ("update_datetime", ("char",)),
                   ("site_id", ("int",)),
                   ("latitude", ("double",)),
                   ("longitude", ("double",)),
                   ("elevation_in_meters", ("char",)),
                   ("state", ("char",)),
                   ("species_id", ("int",)),
                   ("genus", ("char",)),
                   ("species", ("char",)),
                   ("common_name", ("char",)),
                   ("kingdom", ("char",)),  # skip kingdom
                   ("individual_id", ("char",)),
                   ("phenophase_id", ("int",)),
                   ("phenophase_description", ("char",)),
                   ("observation_date", ("char",)),
                   ("day_of_year", ("char",)),
                   ("phenophase_status", ("char",)),
                   ("intensity_category_id", ("char",)),
                   ("intensity_value", ("char",)),
                   ("abundance_value", ("char",))
                   ]

        start_date = datetime.date(2009, 1, 1)
        end_date = datetime.date.today()

        while start_date < end_date:
            to_date = start_date + datetime.timedelta(90)
            if to_date >= end_date:
                data_url = base_url.format(startYear=str(start_date), endYear_date=str(end_date),
                                           request_src=request_src)
            else:
                data_url = base_url.format(startYear=str(start_date), endYear_date=str(to_date),
                                           request_src=request_src)

            xml_file_name = '{}'.format(start_date) + ".xml"
            engine.download_file(data_url, xml_file_name)

            # Create csv files for 3 months
            csv_observation = '{}'.format(start_date) + ".csv"
            csv_files.append(csv_observation)
            csv_buff = open_fw(engine.format_filename(csv_observation))
            csv_writer = open_csvw(csv_buff)

            csv_writer.writerow(header_values)

            # Parse xml to read data
            file_read = ""
            fname = DATA_WRITE_PATH.strip('{dataset}') + 'NPN/' + xml_file_name
            with open(fname, 'r') as fp1:
                file_read = fp1.read()

            root = ET.fromstring(file_read)

            for elements in root:
                index_map = {val: i for i, val in enumerate(header_values)}
                diction = sorted(elements.attrib.items(), key=lambda pair: index_map[pair[0]])
                csv_writer.writerow([x[1] for x in diction])

            csv_buff.close()
            start_date = to_date + datetime.timedelta(1)

        # Create table
        table = Table('obsercations', delimiter=',', pk='record_id', contains_pk=True)
        table.columns = columns
        engine.table = table
        engine.create_table()
        for data_file in csv_files:
            engine.insert_data_from_file(engine.find_file(data_file))
        return engine
예제 #54
0
 def getValueAsCommandLineParameter(self):
     return '"' + str(self.value) + '"'
예제 #55
0
    def __initMachineStats(self, pathCpuInfo=None):
        """Updates static machine information during initialization"""
        self.__renderHost.name = self.getHostname()
        self.__renderHost.boot_time = self.getBootTime()
        self.__renderHost.facility = rqd.rqconstants.DEFAULT_FACILITY
        self.__renderHost.attributes['SP_OS'] = rqd.rqconstants.SP_OS

        self.updateMachineStats()

        __numProcs = __totalCores = 0
        if platform.system() == "Linux" or pathCpuInfo is not None:
            # Reads static information for mcp
            mcpStat = os.statvfs(self.getTempPath())
            self.__renderHost.total_mcp = mcpStat.f_blocks * mcpStat.f_frsize // KILOBYTE

            # Reads static information from /proc/cpuinfo
            with open(pathCpuInfo or rqd.rqconstants.PATH_CPUINFO,
                      "r") as cpuinfoFile:
                singleCore = {}
                procsFound = []
                for line in cpuinfoFile:
                    lineList = line.strip().replace("\t", "").split(": ")
                    # A normal entry added to the singleCore dictionary
                    if len(lineList) >= 2:
                        singleCore[lineList[0]] = lineList[1]
                    # The end of a processor block
                    elif lineList == ['']:
                        # Check for hyper-threading
                        hyperthreadingMultiplier = (
                            int(singleCore.get('siblings', '1')) //
                            int(singleCore.get('cpu cores', '1')))

                        __totalCores += rqd.rqconstants.CORE_VALUE
                        if "core id" in singleCore \
                           and "physical id" in singleCore \
                           and not singleCore["physical id"] in procsFound:
                            procsFound.append(singleCore["physical id"])
                            __numProcs += 1
                        elif "core id" not in singleCore:
                            __numProcs += 1
                        singleCore = {}
                    # An entry without data
                    elif len(lineList) == 1:
                        singleCore[lineList[0]] = ""
        else:
            hyperthreadingMultiplier = 1

        if platform.system() == 'Windows':
            # Windows memory information
            stat = self.getWindowsMemory()
            TEMP_DEFAULT = 1048576
            self.__renderHost.total_mcp = TEMP_DEFAULT
            self.__renderHost.total_mem = int(stat.ullTotalPhys / 1024)
            self.__renderHost.total_swap = int(stat.ullTotalPageFile / 1024)

            # Windows CPU information
            import psutil
            logical_core_count = psutil.cpu_count(logical=True)
            actual_core_count = psutil.cpu_count(logical=False)
            hyperthreadingMultiplier = logical_core_count // actual_core_count

            __totalCores = logical_core_count * rqd.rqconstants.CORE_VALUE
            __numProcs = 1  # TODO: figure out how to count sockets in Python

        # All other systems will just have one proc/core
        if not __numProcs or not __totalCores:
            __numProcs = 1
            __totalCores = rqd.rqconstants.CORE_VALUE

        if rqd.rqconstants.OVERRIDE_MEMORY is not None:
            log.warning("Manually overriding the total memory")
            self.__renderHost.total_mem = rqd.rqconstants.OVERRIDE_MEMORY

        if rqd.rqconstants.OVERRIDE_CORES is not None:
            log.warning("Manually overriding the number of reported cores")
            __totalCores = rqd.rqconstants.OVERRIDE_CORES * rqd.rqconstants.CORE_VALUE

        if rqd.rqconstants.OVERRIDE_PROCS is not None:
            log.warning("Manually overriding the number of reported procs")
            __numProcs = rqd.rqconstants.OVERRIDE_PROCS

        # Don't report/reserve cores added due to hyperthreading
        __totalCores = __totalCores // hyperthreadingMultiplier

        self.__coreInfo.idle_cores = __totalCores
        self.__coreInfo.total_cores = __totalCores
        self.__renderHost.num_procs = __numProcs
        self.__renderHost.cores_per_proc = __totalCores // __numProcs

        if hyperthreadingMultiplier > 1:
            self.__renderHost.attributes['hyperthreadingMultiplier'] = str(
                hyperthreadingMultiplier)
예제 #56
0
    def getConsoleCommands(self):
        inLayer = self.getParameterValue(self.INPUT)
        ogrLayer = ogrConnectionString(inLayer)[1:-1]
        noData = self.getParameterValue(self.NO_DATA)
        if noData is not None:
            noData = str(noData)
        jpegcompression = str(self.getParameterValue(self.JPEGCOMPRESSION))
        predictor = str(self.getParameterValue(self.PREDICTOR))
        zlevel = str(self.getParameterValue(self.ZLEVEL))
        tiled = str(self.getParameterValue(self.TILED))
        compress = self.COMPRESSTYPE[self.getParameterValue(self.COMPRESS)]
        bigtiff = self.BIGTIFFTYPE[self.getParameterValue(self.BIGTIFF)]
        tfw = str(self.getParameterValue(self.TFW))
        out = self.getOutputValue(self.OUTPUT)
        extra = self.getParameterValue(self.EXTRA)
        if extra is not None:
            extra = str(extra)
        rastext = str(self.getParameterValue(self.RAST_EXT))

        arguments = []
        arguments.append('-a')
        arguments.append(str(self.getParameterValue(self.FIELD)))

        arguments.append('-ot')
        arguments.append(self.TYPE[self.getParameterValue(self.RTYPE)])
        dimType = self.getParameterValue(self.DIMENSIONS)
        arguments.append('-of')
        arguments.append(GdalUtils.getFormatShortNameFromFilename(out))

        regionCoords = rastext.split(',')
        try:
            rastext = []
            rastext.append('-te')
            rastext.append(regionCoords[0])
            rastext.append(regionCoords[2])
            rastext.append(regionCoords[1])
            rastext.append(regionCoords[3])
        except IndexError:
            rastext = []
        if rastext:
            arguments.extend(rastext)

        if dimType == 0:
            # size in pixels
            arguments.append('-ts')
            arguments.append(str(self.getParameterValue(self.WIDTH)))
            arguments.append(str(self.getParameterValue(self.HEIGHT)))
        else:
            # resolution in map units per pixel
            arguments.append('-tr')
            arguments.append(str(self.getParameterValue(self.WIDTH)))
            arguments.append(str(self.getParameterValue(self.HEIGHT)))

        if noData and len(noData) > 0:
            arguments.append('-a_nodata')
            arguments.append(noData)

        if (GdalUtils.getFormatShortNameFromFilename(out) == "GTiff"):
            arguments.append("-co COMPRESS=" + compress)
            if compress == 'JPEG':
                arguments.append("-co JPEG_QUALITY=" + jpegcompression)
            elif (compress == 'LZW') or (compress == 'DEFLATE'):
                arguments.append("-co PREDICTOR=" + predictor)
            if compress == 'DEFLATE':
                arguments.append("-co ZLEVEL=" + zlevel)
            if tiled == "True":
                arguments.append("-co TILED=YES")
            if tfw == "True":
                arguments.append("-co TFW=YES")
            if len(bigtiff) > 0:
                arguments.append("-co BIGTIFF=" + bigtiff)
        if extra and len(extra) > 0:
            arguments.append(extra)
        arguments.append('-l')

        arguments.append(ogrLayerName(inLayer))
        arguments.append(ogrLayer)

        arguments.append(str(self.getOutputValue(self.OUTPUT)))
        return ['gdal_rasterize', GdalUtils.escapeAndJoin(arguments)]
예제 #57
0
def lineno():
    """Returns the current line number in our program."""
    return str(' - Violation - line number: '+str(inspect.currentframe().f_back.f_lineno))
예제 #58
0
파일: ConvexHull.py 프로젝트: zgustaf/QGIS
    def processAlgorithm(self, context, feedback):
        layer = dataobjects.getLayerFromString(
            self.getParameterValue(self.INPUT))
        useField = self.getParameterValue(self.METHOD) == 1
        fieldName = self.getParameterValue(self.FIELD)

        f = QgsField('value', QVariant.String, '', 255)
        if useField:
            index = layer.fields().lookupField(fieldName)
            fType = layer.fields()[index].type()
            if fType in [
                    QVariant.Int, QVariant.UInt, QVariant.LongLong,
                    QVariant.ULongLong
            ]:
                f.setType(fType)
                f.setLength(20)
            elif fType == QVariant.Double:
                f.setType(QVariant.Double)
                f.setLength(20)
                f.setPrecision(6)
            else:
                f.setType(QVariant.String)
                f.setLength(255)

        fields = [
            QgsField('id', QVariant.Int, '', 20), f,
            QgsField('area', QVariant.Double, '', 20, 6),
            QgsField('perim', QVariant.Double, '', 20, 6)
        ]

        writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
            fields, QgsWkbTypes.Polygon, layer.crs(), context)

        outFeat = QgsFeature()
        inGeom = QgsGeometry()
        outGeom = QgsGeometry()

        fid = 0
        val = None
        features = QgsProcessingUtils.getFeatures(layer, context)
        if useField:
            unique = layer.uniqueValues(index)
            current = 0
            total = 100.0 / (QgsProcessingUtils.featureCount(layer, context) *
                             len(unique))
            for i in unique:
                first = True
                hull = []
                features = QgsProcessingUtils.getFeatures(layer, context)
                for f in features:
                    idVar = f[fieldName]
                    if str(idVar).strip() == str(i).strip():
                        if first:
                            val = idVar
                            first = False

                        inGeom = f.geometry()
                        points = vector.extractPoints(inGeom)
                        hull.extend(points)
                    current += 1
                    feedback.setProgress(int(current * total))

                if len(hull) >= 3:
                    tmpGeom = QgsGeometry(outGeom.fromMultiPoint(hull))
                    try:
                        outGeom = tmpGeom.convexHull()
                        (area, perim) = vector.simpleMeasure(outGeom)
                        outFeat.setGeometry(outGeom)
                        outFeat.setAttributes([fid, val, area, perim])
                        writer.addFeature(outFeat)
                    except:
                        raise GeoAlgorithmExecutionException(
                            self.tr('Exception while computing convex hull'))
                fid += 1
        else:
            hull = []
            total = 100.0 / layer.featureCount()
            features = QgsProcessingUtils.getFeatures(layer, context)
            for current, f in enumerate(features):
                inGeom = f.geometry()
                points = vector.extractPoints(inGeom)
                hull.extend(points)
                feedback.setProgress(int(current * total))

            tmpGeom = QgsGeometry(outGeom.fromMultiPoint(hull))
            try:
                outGeom = tmpGeom.convexHull()
                (area, perim) = vector.simpleMeasure(outGeom)
                outFeat.setGeometry(outGeom)
                outFeat.setAttributes([0, 'all', area, perim])
                writer.addFeature(outFeat)
            except:
                raise GeoAlgorithmExecutionException(
                    self.tr('Exception while computing convex hull'))

        del writer
예제 #59
0
    def getInstalledPlugin(self, key, path, readOnly, testLoad=True):
        """ get the metadata of an installed plugin """
        def metadataParser(fct):
            """ plugin metadata parser reimplemented from qgis.utils
                for better control on wchich module is examined
                in case there is an installed plugin masking a core one """
            global errorDetails
            cp = configparser.ConfigParser()
            try:
                with codecs.open(metadataFile, "r", "utf8") as f:
                    cp.read_file(f)
                return cp.get('general', fct)
            except Exception as e:
                if not errorDetails:
                    errorDetails = e.args[0]  # set to the first problem
                return ""

        def pluginMetadata(fct):
            """ calls metadataParser for current l10n.
                If failed, fallbacks to the standard metadata """
            locale = QLocale.system().name()
            if locale and fct in translatableAttributes:
                value = metadataParser("%s[%s]" % (fct, locale))
                if value:
                    return value
                value = metadataParser("%s[%s]" % (fct, locale.split("_")[0]))
                if value:
                    return value
            return metadataParser(fct)

        if not QDir(path).exists():
            return

        global errorDetails  # to communicate with the metadataParser fn
        plugin = dict()
        error = ""
        errorDetails = ""
        version = None

        metadataFile = os.path.join(path, 'metadata.txt')
        if os.path.exists(metadataFile):
            version = normalizeVersion(pluginMetadata("version"))

        if version:
            qgisMinimumVersion = pluginMetadata("qgisMinimumVersion").strip()
            if not qgisMinimumVersion:
                qgisMinimumVersion = "0"
            qgisMaximumVersion = pluginMetadata("qgisMaximumVersion").strip()
            if not qgisMaximumVersion:
                qgisMaximumVersion = qgisMinimumVersion[0] + ".99"
            # if compatible, add the plugin to the list
            if not isCompatible(Qgis.QGIS_VERSION, qgisMinimumVersion, qgisMaximumVersion):
                error = "incompatible"
                errorDetails = "%s - %s" % (qgisMinimumVersion, qgisMaximumVersion)
            elif testLoad:
                # only testLoad if compatible version
                try:
                    pkg = __import__(key)
                    reload(pkg)
                    pkg.classFactory(iface)
                except Exception as e:
                    error = "broken"
                    errorDetails = str(e.args[0])
                except SystemExit as e:
                    error = "broken"
                    errorDetails = QCoreApplication.translate("QgsPluginInstaller", "The plugin exited with error status: {0}").format(e.args[0])
                except:
                    error = "broken"
                    errorDetails = QCoreApplication.translate("QgsPluginInstaller", "Unknown error")
        elif not os.path.exists(metadataFile):
            error = "broken"
            errorDetails = QCoreApplication.translate("QgsPluginInstaller", "Missing metadata file")
        else:
            error = "broken"
            e = errorDetails
            errorDetails = QCoreApplication.translate("QgsPluginInstaller", u"Error reading metadata")
            if e:
                errorDetails += ": " + e

        if not version:
            version = "?"

        if error[:16] == "No module named ":
            mona = error.replace("No module named ", "")
            if mona != key:
                error = "dependent"
                errorDetails = mona

        icon = pluginMetadata("icon")
        if QFileInfo(icon).isRelative():
            icon = path + "/" + icon

        plugin = {
            "id": key,
            "plugin_id": None,
            "name": pluginMetadata("name") or key,
            "description": pluginMetadata("description"),
            "about": pluginMetadata("about"),
            "icon": icon,
            "category": pluginMetadata("category"),
            "tags": pluginMetadata("tags"),
            "changelog": pluginMetadata("changelog"),
            "author_name": pluginMetadata("author_name") or pluginMetadata("author"),
            "author_email": pluginMetadata("email"),
            "homepage": pluginMetadata("homepage"),
            "tracker": pluginMetadata("tracker"),
            "code_repository": pluginMetadata("repository"),
            "version_installed": version,
            "library": path,
            "pythonic": True,
            "experimental": pluginMetadata("experimental").strip().upper() in ["TRUE", "YES"],
            "deprecated": pluginMetadata("deprecated").strip().upper() in ["TRUE", "YES"],
            "trusted": False,
            "version_available": "",
            "zip_repository": "",
            "download_url": path,      # warning: local path as url!
            "filename": "",
            "downloads": "",
            "average_vote": "",
            "rating_votes": "",
            "available": False,     # Will be overwritten, if any available version found.
            "installed": True,
            "status": "orphan",  # Will be overwritten, if any available version found.
            "error": error,
            "error_details": errorDetails,
            "readonly": readOnly}
        return plugin
예제 #60
0
    def rssUpdate(self, frames):
        """Updates the rss and maxrss for all running frames"""
        if platform.system() != 'Linux':
            return

        pids = {}
        for pid in os.listdir("/proc"):
            if pid.isdigit():
                try:
                    with open("/proc/%s/stat" % pid, "r") as statFile:
                        statFields = statFile.read().split()

                    # See "man proc"
                    pids[pid] = {
                        "session": statFields[5],
                        "vsize": statFields[22],
                        "rss": statFields[23],
                        # These are needed to compute the cpu used
                        "utime": statFields[13],
                        "stime": statFields[14],
                        "cutime": statFields[15],
                        "cstime": statFields[16],
                        # The time in jiffies the process started
                        # after system boot.
                        "start_time": statFields[21],
                    }

                except Exception as e:
                    log.exception('failed to read stat file for pid %s' % pid)

        try:
            now = int(time.time())
            pidData = {"time": now}
            bootTime = self.getBootTime()

            values = list(frames.values())

            for frame in values:
                if frame.pid > 0:
                    session = str(frame.pid)
                    rss = 0
                    vsize = 0
                    pcpu = 0
                    for pid, data in pids.items():
                        if data["session"] == session:
                            try:
                                rss += int(data["rss"])
                                vsize += int(data["vsize"])

                                # jiffies used by this process, last two means that dead children are counted
                                totalTime = int(data["utime"]) + \
                                            int(data["stime"]) + \
                                            int(data["cutime"]) + \
                                            int(data["cstime"])

                                # Seconds of process life, boot time is already in seconds
                                seconds = now - bootTime - \
                                          float(data["start_time"]) / rqd.rqconstants.SYS_HERTZ
                                if seconds:
                                    if pid in self.__pidHistory:
                                        # Percent cpu using decaying average, 50% from 10 seconds ago, 50% from last 10 seconds:
                                        oldTotalTime, oldSeconds, oldPidPcpu = self.__pidHistory[
                                            pid]
                                        #checking if already updated data
                                        if seconds != oldSeconds:
                                            pidPcpu = (totalTime - oldTotalTime
                                                       ) / float(seconds -
                                                                 oldSeconds)
                                            pcpu += (oldPidPcpu +
                                                     pidPcpu) / 2  # %cpu
                                            pidData[
                                                pid] = totalTime, seconds, pidPcpu
                                    else:
                                        pidPcpu = totalTime / seconds
                                        pcpu += pidPcpu
                                        pidData[
                                            pid] = totalTime, seconds, pidPcpu

                            except Exception as e:
                                log.warning('Failure with pid rss update due to: %s at %s' % \
                                            (e, traceback.extract_tb(sys.exc_info()[2])))

                    rss = (rss * resource.getpagesize()) // 1024
                    vsize = int(vsize / 1024)

                    frame.rss = rss
                    frame.maxRss = max(rss, frame.maxRss)

                    if os.path.exists(frame.runFrame.log_dir_file):
                        stat = os.stat(frame.runFrame.log_dir_file).st_mtime
                        frame.lluTime = int(stat)

                    frame.vsize = vsize
                    frame.maxVsize = max(vsize, frame.maxVsize)

                    frame.runFrame.attributes["pcpu"] = str(pcpu)

            # Store the current data for the next check
            self.__pidHistory = pidData

        except Exception as e:
            log.exception('Failure with rss update due to: {0}'.format(e))