コード例 #1
0
ファイル: commands.py プロジェクト: jkeifer/pyHytemporal
def plot_points(multidateraster, pointfile, startdoy, doyinterval):
    """

    """
    import os
    from utils import unique_name
    from plotting import PixelPlot
    from core import pixel as pixelObject
    from vectorFunctions import get_px_coords_from_shapefile
    from imageFunctions import openImage

    outpath = unique_name(os.path.dirname(multidateraster), "plots", ext=".pdf", usetime=True)

    coords = get_px_coords_from_shapefile(multidateraster, pointfile)

    plot = PixelPlot(os.path.dirname(outpath), os.path.basename(outpath))
    raster = openImage(multidateraster)

    for coord in coords:
        pixel = pixelObject(coord[0], coord[1])
        pixel.get_pixel_values(raster, startdoy, doyinterval)
        plot.add_pixel(pixel, closefigure=True)

    plot.close_plot()
    raster = None
コード例 #2
0
 def refile_error(self, src):
     # TODO: errors should be a Filer
     # and moved elsewhere. Perhaps FilerMaster should
     # override refile_error and use FilerError to refile
     root, ext = os.path.splitext(src)
     dst = os.path.join(self.files_path, self.err_dir,
                        os.path.basename(src))
     if os.path.exists(dst):
         dst = utils.unique_name(dst)
     try:
         self.refile(src, dsts=[dst], op='mv')
     except Exception as e:
         self.logger.error(e)
コード例 #3
0
ファイル: Filer.py プロジェクト: reece/docdepot
	def refile_error(self,src):
		# TODO: errors should be a Filer
		# and moved elsewhere. Perhaps FilerMaster should
		# override refile_error and use FilerError to refile
		root,ext = os.path.splitext(src)
		dst = os.path.join( self.files_path,
							self.err_dir,
							os.path.basename(src) )
		if os.path.exists(dst):
			dst = utils.unique_name(dst)
		try:
			self.refile(src,dsts=[dst],op='mv')
		except Exception as e:
			self.logger.error(e)
コード例 #4
0
ファイル: interface.py プロジェクト: yasusii/shaling
 def show_binary(self, data, mimetype):
   import tempfile
   prog = config.MIME_HELPER.get(mimetype)
   if not prog:
     raise Interface.Aborted("Cannot display to a terminal: %s" % mimetype)
   fp = tempfile.NamedTemporaryFile(prefix=unique_name('view'), dir=config.TMP_DIR)
   fp.write(data)
   fp.flush()
   cmdline = prog % fp.name
   status = os.WEXITSTATUS(os.system(cmdline))
   fp.close()
   if status:
     raise Interface.Aborted("Viewer aborted: (%04x) %r" % (status, cmdline))
   return
コード例 #5
0
ファイル: interface.py プロジェクト: yasusii/shaling
 def show_binary(self, data, mimetype):
     import tempfile
     prog = config.MIME_HELPER.get(mimetype)
     if not prog:
         raise Interface.Aborted("Cannot display to a terminal: %s" %
                                 mimetype)
     fp = tempfile.NamedTemporaryFile(prefix=unique_name('view'),
                                      dir=config.TMP_DIR)
     fp.write(data)
     fp.flush()
     cmdline = prog % fp.name
     status = os.WEXITSTATUS(os.system(cmdline))
     fp.close()
     if status:
         raise Interface.Aborted("Viewer aborted: (%04x) %r" %
                                 (status, cmdline))
     return
コード例 #6
0
    def __rename_with_md5(self, path):
        new_path = utils.unique_name(path, self.custom_config.md5_len,
                                     self.custom_config.md5_concat_by)
        if new_path is None:
            return None
        (parent_path, new_file_name) = os.path.split(new_path)

        if new_file_name.startswith('basic'):
            logger.debug('%s %s %s' % (path, parent_path,new_file_name))

        #TODO
        #[x] Rename
        if self.custom_config.delete_source:
            os.rename(path, new_path)
        else:
            shutil.copy(path, new_path)
        return new_file_name
コード例 #7
0
ファイル: commands.py プロジェクト: jkeifer/pyHytemporal
def plot_sigs(signaturedirectory, outputdirectory, name, signaturename):
    """

    """
    import os
    from utils import find_files, unique_name
    from core import signatureCollection
    from plotting import SignaturePlot

    if not outputdirectory:
        outputdirectory = signaturedirectory

    sigs = find_files(signaturedirectory, "mean.ref")

    if not sigs:
        click.BadParameter("Did not find any signature files in the specified directory.")

    if signaturename:
        filteredsigs = []
        for searchstring in signaturename:
            for sig in sigs:
                if searchstring.upper() in sig.upper():
                    filteredsigs.append(sig)
        sigs = filteredsigs

    signatures = signatureCollection()

    for sig in sigs:
        try:
            signatures.add(sig)
        except Exception as e:
            print e

        #TODO Fix core temporalSignature to use exceptions so they can be properly handled here

    name, ext = os.path.splitext(name)
    path = unique_name(outputdirectory, name, ext=ext)

    print("Outputting to {0}".format(path))

    plot = SignaturePlot(outputdirectory, os.path.basename(path))
    plot.plot_collection(signatures)
コード例 #8
0
ファイル: interface.py プロジェクト: yasusii/shaling
 def edit_text(self, kernel, data, original=None):
   import stat
   def modtime(fname):
     return os.stat(fname)[stat.ST_MTIME]
   fname = os.path.join(config.TMP_DIR, unique_name('edit'))
   fp = file(fname, 'wb')
   fp.write(self.to_terminal(data))
   fp.close()
   t0 = modtime(fname)
   cmdline = config.EDITOR % fp.name
   status = os.WEXITSTATUS(os.system(cmdline))
   if config.CHECK_EDITOR_STATUS and status:
     raise Interface.Aborted("Editor aborted: (%04x) %r" % (status, cmdline))
   if modtime(fname) <= t0:
     raise Interface.Cancelled("Edit cancelled.")
   fp = file(fname, 'rb')
   data = self.from_terminal(fp.read())
   fp.close()
   kernel.submit_message(data, original)
   return
コード例 #9
0
    def __init__(self,
                 dataset_path,
                 dataset_where_sql=None,
                 view_name=None,
                 force_nonspatial=False):
        """Initialize instance.

        Args:
            dataset_path (str): Path of the dataset.
            dataset_where_sql (str): SQL where-clause for dataset
                subselection.
            view_name (str): Name of the view to create.
            force_nonspatial (bool): Flag that forces a nonspatial view.

        """
        self.name = view_name if view_name else unique_name('view')
        self.dataset_path = dataset_path
        self.dataset_meta = dataset_metadata(dataset_path)
        self.is_spatial = all(
            (self.dataset_meta['is_spatial'], not force_nonspatial))
        self._where_sql = dataset_where_sql
コード例 #10
0
ファイル: interface.py プロジェクト: yasusii/shaling
    def edit_text(self, kernel, data, original=None):
        import stat

        def modtime(fname):
            return os.stat(fname)[stat.ST_MTIME]

        fname = os.path.join(config.TMP_DIR, unique_name('edit'))
        fp = file(fname, 'wb')
        fp.write(self.to_terminal(data))
        fp.close()
        t0 = modtime(fname)
        cmdline = config.EDITOR % fp.name
        status = os.WEXITSTATUS(os.system(cmdline))
        if config.CHECK_EDITOR_STATUS and status:
            raise Interface.Aborted("Editor aborted: (%04x) %r" %
                                    (status, cmdline))
        if modtime(fname) <= t0:
            raise Interface.Cancelled("Edit cancelled.")
        fp = file(fname, 'rb')
        data = self.from_terminal(fp.read())
        fp.close()
        kernel.submit_message(data, original)
        return
コード例 #11
0
ファイル: commands.py プロジェクト: jkeifer/pyHytemporal
def extract_signatures(image, shapefiledirectory, startdoy, doyinterval, outputdir, filelabel, plotsigs):
    """
    Extracts temporal signatures for a set of point geometry shapefiles in a specified directory and outputs them to a
    set of .ref files in an output directory.
    """
    import os
    from plotting import SignaturePlot
    from utils import find_files, create_output_dir, unique_name
    from signatureFunctions import get_sigs_in_dir, get_reference_curves

    if outputdir is None:
        outputdir = create_output_dir(os.path.dirname(image), "signatures", usetime=True)

    shapefiles = find_files(shapefiledirectory, ".shp", recursive=False)

    #TODO: Need a method to find only valid shapefiles in the directory

    get_reference_curves(image, shapefiles, startdoy, doyinterval, outdir=outputdir, filepostfix=filelabel)

    if plotsigs:
        path = unique_name(outputdir, "signaturePlot", ext=".pdf")
        sigs = get_sigs_in_dir(outputdir)
        plot = SignaturePlot(outputdir, os.path.basename(path))
        plot.plot_collection(sigs)
コード例 #12
0
ファイル: function.py プロジェクト: hrittich/firedrake
    def _interpolate_c_kernel(self, expression, to_pts, to_element, fs, coords):
        """Produce a :class:`PyOP2.Kernel` from the c expression provided."""

        coords_space = coords.function_space()
        coords_element = coords_space.fiat_element

        names = {v[0] for v in expression._user_args}

        X = coords_element.tabulate(0, to_pts).values()[0]

        # Produce C array notation of X.
        X_str = "{{"+"},\n{".join([",".join(map(str, x)) for x in X.T])+"}}"

        A = utils.unique_name("A", names)
        X = utils.unique_name("X", names)
        x_ = utils.unique_name("x_", names)
        k = utils.unique_name("k", names)
        d = utils.unique_name("d", names)
        i_ = utils.unique_name("i", names)
        # x is a reserved name.
        x = "x"
        if "x" in names:
            raise ValueError("cannot use 'x' as a user-defined Expression variable")
        ass_exp = [ast.Assign(ast.Symbol(A, (k,), ((len(expression.code), i),)),
                              ast.FlatBlock("%s" % code))
                   for i, code in enumerate(expression.code)]
        vals = {
            "X": X,
            "x": x,
            "x_": x_,
            "k": k,
            "d": d,
            "i": i_,
            "x_array": X_str,
            "dim": coords_space.dim,
            "xndof": coords_element.space_dimension(),
            # FS will always either be a functionspace or
            # vectorfunctionspace, so just accessing dim here is safe
            # (we don't need to go through ufl_element.value_shape())
            "nfdof": to_element.space_dimension() * np.prod(fs.dim, dtype=int),
            "ndof": to_element.space_dimension(),
            "assign_dim": np.prod(expression.value_shape(), dtype=int)
        }
        init = ast.FlatBlock("""
const double %(X)s[%(ndof)d][%(xndof)d] = %(x_array)s;

double %(x)s[%(dim)d];
const double pi = 3.141592653589793;

""" % vals)
        block = ast.FlatBlock("""
for (unsigned int %(d)s=0; %(d)s < %(dim)d; %(d)s++) {
  %(x)s[%(d)s] = 0;
  for (unsigned int %(i)s=0; %(i)s < %(xndof)d; %(i)s++) {
        %(x)s[%(d)s] += %(X)s[%(k)s][%(i)s] * %(x_)s[%(i)s][%(d)s];
  };
};

""" % vals)
        loop = ast.c_for(k, "%(ndof)d" % vals, ast.Block([block] + ass_exp,
                                                         open_scope=True))
        user_args = []
        user_init = []
        for _, arg in expression._user_args:
            if arg.shape == (1, ):
                user_args.append(ast.Decl("double *", "%s_" % arg.name))
                user_init.append(ast.FlatBlock("const double %s = *%s_;" %
                                               (arg.name, arg.name)))
            else:
                user_args.append(ast.Decl("double *", arg.name))
        kernel_code = ast.FunDecl("void", "expression_kernel",
                                  [ast.Decl("double", ast.Symbol(A, (int("%(nfdof)d" % vals),))),
                                   ast.Decl("double**", x_)] + user_args,
                                  ast.Block(user_init + [init, loop],
                                            open_scope=False))
        return op2.Kernel(kernel_code, "expression_kernel")
コード例 #13
0
def insert_features_from_path(dataset_path,
                              insert_dataset_path,
                              field_names=None,
                              **kwargs):
    """Insert features into dataset from another dataset.

    Args:
        dataset_path (str): Path of the dataset.
        insert_dataset_path (str): Path of dataset to insert features from.
        field_names (iter): Collection of field names to insert. Listed field must be
            present in both datasets. If field_names is None, all fields will be
            inserted.
        **kwargs: Arbitrary keyword arguments. See below.

    Keyword Args:
        insert_where_sql (str): SQL where-clause for insert-dataset subselection.
        use_edit_session (bool): Flag to perform updates in an edit session. Default is
            False.
        log_level (str): Level to log the function at. Defaults to 'info'.

    Returns:
        str: Path of the dataset updated.

    """
    kwargs.setdefault('insert_where_sql')
    kwargs.setdefault('use_edit_session', False)
    log = leveled_logger(LOG, kwargs.get('log_level', 'info'))
    log("Start: Insert features into %s from %s.", dataset_path,
        insert_dataset_path)
    meta = {
        'dataset': dataset_metadata(dataset_path),
        'insert': dataset_metadata(insert_dataset_path)
    }
    if field_names is None:
        keys = set.intersection(*(set(
            name.lower() for name in _meta['field_names_tokenized'])
                                  for _meta in meta.values()))
    else:
        keys = set(name.lower() for name in contain(field_names))
    # OIDs & area/length "fields" have no business being part of an update.
    # Geometry itself is handled separately in append function.
    for _meta in meta.values():
        for key in chain(*_meta['field_token'].items()):
            keys.discard(key)
    append_kwargs = {
        'inputs': unique_name('view'),
        'target': dataset_path,
        'schema_type': 'no_test',
        'field_mapping': arcpy.FieldMappings()
    }
    # Create field maps.
    # ArcGIS Pro's no-test append is case-sensitive (verified 1.0-1.1.1).
    # Avoid this problem by using field mapping.
    # BUG-000090970 - ArcGIS Pro 'No test' field mapping in Append tool does
    # not auto-map to the same field name if naming convention differs.
    for key in keys:
        field_map = arcpy.FieldMap()
        field_map.addInputField(insert_dataset_path, key)
        append_kwargs['field_mapping'].addFieldMap(field_map)
    view = DatasetView(
        insert_dataset_path,
        kwargs['insert_where_sql'],
        view_name=append_kwargs['inputs'],
        # Must be nonspatial to append to nonspatial table.
        force_nonspatial=(not meta['dataset']['is_spatial']))
    session = Editor(meta['dataset']['workspace_path'],
                     kwargs['use_edit_session'])
    with view, session:
        arcpy.management.Append(**append_kwargs)
        feature_count = Counter({'inserted': view.count})
    log("%s features inserted.", feature_count['inserted'])
    log("End: Insert.")
    return feature_count