Beispiel #1
0
def place_record(document_path, delete_original=False):
    """Place deed or record document in RLID repository."""
    doc_id, ext = os.path.splitext(
        os.path.basename(document_path).replace("-", "_"))
    place_path = rlid_record_path(doc_id, ext)
    if place_path:
        # Create bin (if necessary).
        path.create_directory(os.path.dirname(place_path),
                              exist_ok=True,
                              create_parents=True)
        try:
            shutil.copy2(document_path, place_path)
        except IOError:
            placed = False
        else:
            os.chmod(place_path, stat.S_IWRITE)
            placed = True
    else:
        place_path = "{unknown path}"
        placed = False
    LOG.log(
        (logging.INFO if placed else logging.WARNING),
        ("%r placed at %r." if placed else "%r failed to place at %r."),
        os.path.basename(document_path),
        place_path,
    )
    if placed and delete_original:
        os.remove(document_path)
    return "placed" if placed else "failed to place"
Beispiel #2
0
def send_metadata_to_dataset(record):
    """Send metadata record to the related dataset.

    Args:
        record (MetadataRecord): Metadata record to sync.

    Returns:
        str: Message describing any problems that arose. NoneType if no problems.

    """
    message = None
    # Create metadata XML file.
    xml_path = staging_xml_path(record)
    path.create_directory(os.path.dirname(xml_path),
                          exist_ok=True,
                          create_parents=True)
    with NamedTemporaryFile(suffix=".xml", delete=False) as xmlfile:
        try:
            tree = ET.ElementTree(ET.fromstring(record.xml.encode("utf-8")))
        except ET.ParseError as error:
            message = "Bad XML: {}".format(error.message)
            LOG.warning("%s.", message)
        else:
            tree.write(xmlfile, encoding="utf-8", xml_declaration=True)
    if message is None:
        if os.path.exists(xml_path):
            os.remove(xml_path)
        arcpy.conversion.XSLTransform(source=xmlfile.name,
                                      xslt=PATH["xslt"],
                                      output=xml_path)
        os.remove(xmlfile.name)
        LOG.info("Created staging-send XML file.")
        # DBF files (likely an old-style image catalog) cannot receive metadata
        # applied via Arc. Just copy an adjacent XML file.
        if os.path.splitext(record.dataset_path)[-1].lower() == ".dbf":
            shutil.copyfile(xml_path,
                            os.path.splitext(record.dataset_path)[0] + ".xml")
            LOG.info(
                "Dataset type not syncable: Placed copy of XML file adjacent.")
        else:
            # Push metadata onto dataset.
            try:
                arcpy.conversion.MetadataImporter(source=xml_path,
                                                  target=record.dataset_path)
            except arcpy.ExecuteError:
                message = ("Failed to write metadata to dataset" +
                           " (likely process user has no write-access)")
                LOG.warning("%s. Dataset path: %s", message,
                            record.dataset_path)
    if message is None:
        try:
            arcpy.conversion.UpgradeMetadata(record.dataset_path,
                                             Upgrade_Type="fgdc_to_arcgis")
        except arcpy.ExecuteError:
            message = "Failed to upgrade metadata on record"
            LOG.warning("%s.", message)
    return message
def geodatabase_backup_schema_etl():
    """Run ETL for geodatabase schema backup."""
    LOG.info("Starting backup of geodatabase schema.")
    for gdb in randomized(database.GISRV106_DATABASES):
        if not gdb.back_up_gdb_schema:
            continue

        # Ensure backup directory is present.
        backup_path = os.path.join(path.SDE_DATA_BACKUP_SHARE, gdb.name,
                                   "Schema")
        path.create_directory(backup_path, exist_ok=True, create_parents=True)
        xml_path = os.path.join(backup_path,
                                "{}_{}.xml".format(gdb.name, timestamp()))
        arcetl.workspace.create_geodatabase_xml_backup(
            geodatabase_path=gdb.path,
            output_path=xml_path,
            include_data=False,
            include_metadata=True,
        )
    LOG.info("Geodatabase schema backup complete.")
def geodatabase_backup_build_etl():
    """Run ETL for geodatabase build SQL backup."""
    LOG.info("Starting backup of geodatabase SQL build scripts.")
    for gdb in randomized(database.GISRV106_DATABASES):
        if not gdb.back_up_build_sql:
            continue

        # Ensure backup directory is present.
        backup_path = os.path.join(path.SDE_DATA_BACKUP_SHARE, gdb.name,
                                   "Build_SQL")
        path.create_directory(backup_path, exist_ok=True, create_parents=True)
        sql_path = os.path.join(backup_path,
                                "{}_{}.sql".format(gdb.name, timestamp()))
        LOG.info("Start: Generate build SQL for %s", gdb.name)
        subprocess.check_call(
            "powershell.exe -ExecutionPolicy Bypass"
            " -File {} -instance {} -database {} -output {}".format(
                path.GENERATE_BUILD_SQL, "gisrv106", gdb.name, sql_path))
        LOG.info("Generated at %s", sql_path)
        LOG.info("End: Generate")
    LOG.info("Geodatabase SQL build scripts backup complete.")
Beispiel #5
0
def return_metadata_to_geoportal(record):
    """Return metadata record to geoportal from related dataset.

    Args:
        record (MetadataRecord): Metadata record to sync.

    Returns:
        str: Message describing any problems that arose. NoneType if no problems.

    """
    message = None
    # Create metadata XML file.
    xml_path = staging_xml_path(record)
    path.create_directory(os.path.dirname(xml_path),
                          exist_ok=True,
                          create_parents=True)
    arcpy.conversion.ExportMetadata(
        Source_Metadata=record.dataset_path,
        Output_File=xml_path,
        Translator=PATH["metadata_translator"],
    )
    # Prep XML for posting to Geoportal.
    with io.open(xml_path, mode="r", encoding="utf-8-sig") as xmlfile:
        # Dataset strips XML declaration  & reverts character entities: Fix.
        post_xml = apply_character_entities(
            """<?xml version="1.0" encoding="UTF-8"?>\n""" + xmlfile.read())
    # Update return-file for posterity.
    with io.open(xml_path, mode="w", encoding="utf-8-sig") as xmlfile:
        xmlfile.write(post_xml)
    LOG.info("Created staging-return XML file.")
    # Post to record on Geoportal.
    try:
        record.xml = post_xml
    except pyodbc.OperationalError as error:
        message = "ODBC operational error: {}".format(error.message)
        LOG.warning("%s.", message)
    LOG.info("Metadata record synced back to %s.",
             record.geoportal.database.name)
    return message
Beispiel #6
0
def extract_records(archive_path, archive_original=False):
    """Extract deeds & records archives."""
    extracted = path.extract_archive(archive_path, PATH["staging"])
    LOG.log(
        (logging.INFO if extracted else logging.WARNING),
        ("%r extracted." if extracted else "%r failed to extract."),
        os.path.basename(archive_path),
    )
    if archive_original:
        move_path = os.path.join(
            PATH["staging"],
            ("Extracted_Archives" if extracted else "Invalid_Archives"),
            os.path.basename(archive_path),
        )
        path.create_directory(os.path.dirname(move_path),
                              exist_ok=True,
                              create_parents=True)
        shutil.move(archive_path, move_path)
        LOG.log(
            (logging.INFO if extracted else logging.WARNING),
            ("%r archived at %r." if extracted else "%r failed to archive."),
            os.path.basename(move_path),
        )
    return "extracted" if extracted else "failed to extract"
def geodatabase_backup_datasets_etl():
    """Run ETL for geodatabase datasets backup."""
    LOG.info("Starting backup of geodatabase datasets.")
    ignore_dataset_names = [
        name.lower() for name in IGNORE_DATASETS_DATA_BACKUP
    ]
    for gdb in randomized(database.GISRV106_DATABASES):
        if not gdb.back_up_gdb_data:
            continue

        # Ensure backup directory is present.
        backup_path = os.path.join(path.SDE_DATA_BACKUP_SHARE, gdb.name,
                                   "Data")
        path.create_directory(backup_path, exist_ok=True, create_parents=True)
        gdb_path = os.path.join(backup_path,
                                "{}_{}.gdb".format(gdb.name, timestamp()))
        arcetl.workspace.create_file_geodatabase(gdb_path)
        for name in arcetl.workspace.dataset_names(gdb.path):
            # Specific database/datasets to not back up. These cause problems, and
            # generally are not important/can be restored from SQL backup.
            if name.lower() in ignore_dataset_names:
                LOG.warning("%s listed in ignore-datasets: skipping", name)
                continue

            # Certain patterns indicate datasets that don't need to be backed up (e.g.
            # views of other data).
            if any(pattern.lower() in name.lower()
                   for pattern in IGNORE_PATTERNS_DATA_BACKUP):
                LOG.warning("%s matches ignore-pattern: skipping.", name)
                continue

            source_path = os.path.join(gdb.path, name)
            copy_path = os.path.join(gdb_path, name.split(".")[-1])
            arcetl.dataset.copy(source_path, copy_path)
        arcetl.workspace.compress(gdb_path)
    LOG.info("Geodatabase datasets backup complete.")