def main():
    """"""

    args = sys.argv[1:]
    opts = process_options(args)

    start_time = datetime.now().strftime('%I:%M %p')
    print '4) Creating isochrones with walk distance of {0} feet, start ' \
          'time is: {1}, run time is: ~1.25 minutes...\n'.format(
               opts.walk_distance, start_time)

    # configure arcpy settings
    env.overwriteOutput = True
    checkout_arcgis_extension('Network')

    # Prep stop data
    add_name_field()
    assign_max_zones()
    add_inception_year()

    create_isochrone_fc()
    generate_isochrones(MAX_STOPS, opts.walk_distance)
    add_iso_attributes()

    CheckInExtension('Network')
Example #2
0
def process():
    # TODO: Add User Error Reporting alerting user of issue with accessing their bucket.
    # Begin Script
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            bucket_url = 'https://s3.{0}.amazonaws.com/{1}/'.format(region, bucket_name)
            f_list = []
            fp_list = []
            '''
            conn = client('s3')  # again assumes boto.cfg setup, assume AWS S3
            
            for key in conn.list_objects(Bucket=bucket_name)['Contents']:
                if not key['Key'].endswith('/') and list_folders is False:
                    f_list.append(key['Key'])
                    fp_list.append(bucket_url + key['Key'])
                if list_folders is True:
                    f_list.append(key['Key'])
                    fp_list.append(bucket_url + key['Key'])
            '''
            s3r = resource('s3')
            bucket_list = [item.key for item in list(s3r.Bucket(bucket_name).objects.all())]
            for key in bucket_list:
                if not key.endswith('/') and list_folders is False:
                    f_list.append(key)
                    fp_list.append(bucket_url + key)
                if list_folders is True:
                    f_list.append(key)
                    fp_list.append(bucket_url + key)


            # Create a Pandas dataframe from the data.
            df = pd.DataFrame({'bucket_url': bucket_url, 'key': f_list, 'full_path': fp_list})

            with pd.ExcelWriter(out_spreadsheet) as writer:
                df.to_excel(writer)
        except NoCredentialsError:
            err_str = 'Detected Boto3 Credentials are not set. see the following instructions ' \
                      'https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html#configuration'
            AddError(err_str)
            raiseValueError(err_str)
        except s3_client.exceptions.NoSuchBucket:
            AddError('Aws bucket %s does not exist' % bucket_name)
            raise ValueError('Aws bucket %s does not exist' % bucket_name)
        # Check back in Image Analyst license
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("ImageAnalyst license is unavailable")
        print("ImageAnalyst license is unavailable")
    except ExecuteError:
        AddError(GetMessages(2))
        print(GetMessages(2))
Example #3
0
def calculate_network_locations(points, network):
    """
  Computes the locations of |points| in |network|.
  |points|: a feature class (points or polygons).
  |network|: a network dataset.
  """
    CheckOutExtension("Network")
    CalculateLocations_na(in_point_features=points,
                          in_network_dataset=network,
                          search_tolerance="5000 Meters",
                          search_criteria=("%s SHAPE; %s SHAPE;" %
                                           network_features(network)),
                          exclude_restricted_elements="INCLUDE")
    CheckInExtension("Network")
def roadsNonMonoCheck():
    try:
        # Check out license
        print('The result of CheckExtension("Highways") is ' + str(CheckExtension("Highways")) + '.')
        if CheckExtension("Highways") == 'Available':
            CheckOutExtension("Highways")
            
            # Do the license check before the deletion, so that you don't
            # remove data and then not put it back in the case that the
            # license is not available.
            from arcpy import DetectNonMonotonicRoutes_locref
            
            if Exists(nonMonotonicOutputGDB):
                try:
                    Delete_management(nonMonotonicOutputGDB)
                except:
                    pass
            else:
                pass
            
            nonMonotonicOutputGDBName = returnGDBOrSDEName(nonMonotonicOutputGDB)
            
            CreateFileGDB_management(mainFolder, nonMonotonicOutputGDBName)
            time.sleep(1)
            
            DetectNonMonotonicRoutes_locref(networkToReview, nonMonotonicOutputFC, "Any", "F_Date", "T_Date", "SourceRouteId")
            
            print("The Roads & Highways Non-Monotonic routes check for " + str(networkToReview) + " has completed.\n")
            
        else:
            print('The Roads & Highways extension is not currently available.')
            print('Skipping R&H Non-Monotonicity check.')
        
    except Exception as Exception1:
        # If an error occurred, print line number and error message
        import traceback, sys
        tb = sys.exc_info()[2]
        print "Line %i" % tb.tb_lineno
        print Exception1.message
        try:
            del Exception1
        except:
            pass
    finally:
        try:
            # Check the license back in
            CheckInExtension("Highways")
        except:
            pass
Example #5
0
 def arcpy_schema_compare(self, base, output_folder_location, test):
     self.queue.put("Importing ArcPy\n\n")
     from arcpy import CheckOutExtension
     from arcpy import CheckInExtension
     from arcpy import env
     CheckOutExtension('Datareviewer')
     self.queue.put("Checking Out Datareviewer Extension\n\n")
     from arcpy import GeodatabaseSchemaCompare_Reviewer
     env.overwriteOutput = True
     self.queue.put('Arcpy and Datareviewer checked out\n\n')
     result = GeodatabaseSchemaCompare_Reviewer(
         base, test, output_folder_location
     )  # THIS IS CAUSING PYTHON TO CRASH ON RE_RUN
     self.queue.put(result.getMessages())
     CheckInExtension('Datareviewer')
     del result
     self.queue.put("\n\nChecking In Datareviewer Extension\n\n")
Example #6
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        subset_image_for_texture(in_image, in_polygon, area, out_image)
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        create_mask(in_raster, in_polygon, out_raster)
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddError

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            from PIL import Image
        except ModuleNotFoundError:
            from arcpy import AddError
            AddError(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            print(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            exit()
        i_list, extent = get_images_and_stats(
            in_mosaic
        )  # Obtain image statistics and info from mosaic for processing
        for i in i_list:  # Check that output folder is not the path of i
            if out_folder == path.dirname(i[0]):
                AddError(
                    "outFolder cannot be the same folder/directory as images referenced in the mosaic dataset"
                )
                exit()
        if not path.exists(out_folder):
            makedirs(out_folder)
        texture_images(i_list, extent, in_texture, in_polygon, out_folder,
                       method, blur_distance)  # Generate Texture-Masked tiles

        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("Image Analyst license is unavailable")
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
def main():
    from arcpy import ExecuteError, GetMessages, CheckOutExtension, CheckExtension, CheckInExtension

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        seamless_texture(inImg, outImg)
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
def las_tiles_to_numpy_pandas(in_lidar_folder, sr, lidar_format, returns,
                              class_codes, format_for_library):
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("3D") == "Available":
            CheckOutExtension("3D")
        else:
            # raise a custom exception
            raise LicenseError

        if not lidar_format.startswith(
                "."):  # Ensure lidar format contains a format decimal
            lidar_format = ".{}".format(lidar_format)
        supported_lidar_formats = [".las", ".zlas"]
        assert lidar_format in supported_lidar_formats, \
            "LiDAR format {0} unsupported. Ensure LiDAR format is in {1}".format(lidar_format, supported_lidar_formats)

        lidar_tiles = [
            f for f in listdir(in_lidar_folder)
            if f.endswith("{}".format(lidar_format))
        ]
        if len(lidar_tiles) < 1:
            AddError("No LiDAR tiles detected in input directory")
        count = 0
        for tile in lidar_tiles:
            AddMessage("processing lidar tile {0} of {1} : {2}".format(
                count + 1, len(lidar_tiles), tile))
            lidar_tile = join(in_lidar_folder, tile)
            las_tile_to_numpy_pandas(lidar_tile, sr, returns, class_codes,
                                     format_for_library)
            count += 1
        AddMessage("processing {} lidar tiles complete".format(count))

        # Check back in 3D Analyst license
        CheckInExtension("3D")
    except LicenseError:
        AddError("3D Analyst license is unavailable")
    except ExecuteError:
        AddError("3D Analyst license is unavailable")
        print(GetMessages(2))
Example #11
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddMessage
    from arcpy.management import BuildPyramids

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            from PIL import Image
        except ModuleNotFoundError:
            from arcpy import AddError
            AddError(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            print(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            exit()

        mask_image(in_image, in_mask, in_texture, out_image, method,
                   blur_distance)
        AddMessage("Building Pyramids")
        BuildPyramids(out_image, -1, "NONE", "NEAREST", "DEFAULT", 75,
                      "OVERWRITE")
        CheckInExtension("ImageAnalyst")

    except LicenseError:
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddError

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        batch_create_tiled_ortho_mosaics(in_folder, image_format, num_bands,
                                         pixel_depth, product_definition,
                                         product_band_definitions, pixel_size,
                                         out_folder)

        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("Image Analyst license is unavailable")
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))
Example #13
0
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        # System Parameters
        tile_name = "FileName"

        # Begin Script
        temp_fc = join("in_memory", "temp_fc")
        CopyFeatures(in_fc, temp_fc)
        for f in file_names:
            AddField(temp_fc, f, "TEXT")

        df = pd.read_excel(in_xlsx, index_col=0)

        def attribute_tile(in_feature_class,
                           in_tile_name,
                           in_df,
                           in_name,
                           xlsx_row_name=xlsx_row_name):
            with da.UpdateCursor(in_feature_class,
                                 [in_tile_name, in_name]) as cursor:
                for fc_r in cursor:
                    for df_i, df_r in in_df.iterrows():
                        url = df_r[xlsx_row_name]
                        n = Path(url).stem
                        t_name = fc_r[0]
                        t_n = Path(t_name).stem
                        if n.startswith(in_name) and t_n in n:
                            fc_r[1] = url
                    cursor.updateRow(fc_r)

        # Attribute the LiDAR Derivatives
        for n in file_names:
            attribute_tile(temp_fc, tile_name, df, n)

        def attribute_tile_lidar(in_feature_class,
                                 in_tile_name,
                                 in_df,
                                 in_name,
                                 xlsx_row_name=xlsx_row_name):
            with da.UpdateCursor(in_feature_class,
                                 [in_tile_name, in_name]) as cursor:
                for fc_r in cursor:
                    for df_i, df_r in in_df.iterrows():
                        url = df_r[xlsx_row_name]
                        n = split(url)[1]
                        t_name = fc_r[0]
                        if n == t_name:
                            fc_r[1] = url
                    cursor.updateRow(fc_r)

        # Attribute the LiDAR tile now
        AddField(temp_fc, in_lidar_format, "TEXT")
        attribute_tile_lidar(temp_fc,
                             tile_name,
                             df,
                             in_lidar_format,
                             xlsx_row_name=xlsx_row_name)
        '''
        # Print Fields for debugging/assessing results of above operations
        file_names.append(in_lidar_format)
        print(file_names)
        with da.SearchCursor(temp_fc, file_names) as cursor:
            for fc_r in cursor:
                print(fc_r)
        '''

        # Delete Pandas Dataframe from Memory
        del df

        # Copy in_memory temporary feature class to output location
        CopyFeatures(temp_fc, out_fc)

        # Delete Temporary Feature Class
        Delete(temp_fc)

        # Check back in Image Analyst license
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("ImageAnalyst license is unavailable")
        print("ImageAnalyst license is unavailable")
    except ExecuteError:
        AddError(GetMessages(2))
        print(GetMessages(2))
Example #14
0
def reviewData():
    try:
        print("Starting the Data Reviewer batch job at:\n" +
              str(reviewerBatchJob) + ".")
        print("For the data located in:\n" + str(workspaceToReview) + ".")
        print(
            "If one of the feature classes, Routes or CalPts, does not exist in the place that the"
        )
        print(
            "data reviewer batch job looks for it, then you will get an 'Unknown Error'."
        )
        print(
            "This can be remedied by updating the data reviewer batch job's workspace settings."
        )
        # Test the data reviewer part:
        if CheckExtension("datareviewer") == 'Available':
            print("Extension availability check complete.")
            CheckOutExtension("datareviewer")

            # Checking to see if the output already exists.
            # If so, remove it so that it can be recreated. -- For the errors, might need a better process, so that
            # it's possible to track where the errors were at the start and how things progressed.
            if Exists(reviewerSessionGDB):
                Delete_management(reviewerSessionGDB)
            else:
                pass

            # Create new geodatabase
            # Replace with returnGDBOrSDEPath(reviewerSessionGDB), returnGDBOrSDEName(reviewerSessionGDB)
            # or similar functions
            CreateFileGDB_management(reviewerSessionGDBFolder,
                                     returnGDBOrSDEName(reviewerSessionGDB))

            # Execute EnableDataReviewer
            EnableDataReviewer_Reviewer(reviewerSessionGDB, "#", "#",
                                        "DEFAULTS")

            # Create a new Reviewer session
            ##CreateReviewerSession_Reviewer (reviewer_workspace, session_name, {session_template}, {duplicate_checking}, {store_geometry}, {username}, {version})
            CreateReviewerSession_Reviewer(reviewerSessionGDB, reviewerSession,
                                           "", "NONE", "STORE_GEOMETRY")

            # execute the batch job
            batchJobResult = ExecuteReviewerBatchJob_Reviewer(
                reviewerSessionGDB, sessionReviewerSession, reviewerBatchJob,
                workspaceToReview)

            print("Data Reviewer batch job complete.")

            # get the output table view from the result object
            outputTable = batchJobResult.getOutput(0)

            print("The output table is called " + str(outputTable.name) + "."
                  )  # prints REVBATCHRUNTABLE

            CheckInExtension("datareviewer")

        else:
            print(
                "The 'datareviewer' extension is not available. Skipping checks."
            )

    except Exception as Exception1:
        # If an error occurred, print line number and error message
        import traceback, sys
        tb = sys.exc_info()[2]
        print "Line %i" % tb.tb_lineno
        print Exception1.message
        try:
            del Exception1
        except:
            pass
    finally:
        CheckInExtension("datareviewer")
Example #15
0
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("3D") == "Available":
            CheckOutExtension("3D")
        else:
            # raise a custom exception
            raise LicenseError

        # Constants - DO NOT MODIFY
        split_area = "split_area"
        orig_area = "orig_area"

        def calc_area(in_fc, field_name):
            AddField(in_fc, field_name, "DOUBLE")
            with da.UpdateCursor(in_fc, [field_name, "SHAPE@AREA"]) as cursor1:
                for r1 in cursor1:
                    r1[0] = r1[1]
                    cursor1.updateRow(r1)

        def field_exists(in_fc, in_field):
            from arcpy import ListFields
            if in_field in [f.name for f in ListFields(in_fc)]:
                return True
            else:
                return False

        def delete_field_if_exists(in_fc, in_field):
            if field_exists(in_fc, in_field):
                DeleteField(in_fc, in_field)

        assert field_exists(in_buildings, building_fid), \
            "no attribute named {} in feature class".format(building_fid)

        for field in [tile_fid, file_name]:
            delete_field_if_exists(in_buildings, field)

        temp_fp = join("in_memory", "mp_fp")
        ddd.MultiPatchFootprint(in_buildings, temp_fp, "bldg_fid")

        calc_area(in_fc=temp_fp, field_name=orig_area)

        temp_isect = join("in_memory", "temp_isect")
        Intersect(r"{0} #;{1} #".format(temp_fp, in_tiles), temp_isect, "ALL",
                  None, "INPUT")

        # Delete Temporary Multipatch Footprint
        Delete(temp_fp)

        calc_area(in_fc=temp_isect, field_name=split_area)

        temp_isect_asc = join("in_memory", "temp_isect_asc")
        Sort(temp_isect, temp_isect_asc, [[building_fid, "ASCENDING"]])
        # Delete Temporary Intersect Feature Class
        Delete(temp_isect)

        fields = [building_fid, tile_fid, file_name, orig_area, split_area]

        # Generate a list of duplicates
        bldg_list = []
        with da.SearchCursor(temp_isect_asc, building_fid) as cursor2:
            for row in cursor2:
                bldg_list.append(row[0])

        duplicates = [
            item for item, count in Counter(bldg_list).items() if count > 1
        ]

        duplicates_list = []
        for i in duplicates:
            duplicates_list.append([i, bldg_list.count(i)])

        # TODO: Resolve why tile_fid is not showing up below when BuildingFID and TileFID are OID fields. "In_memory" issue
        '''
        # \\ Begin Debug print code
        from arcpy import AddMessage
        fds = [f.name for f in arcpy.ListFields(temp_isect_asc) if f.name in fields]
        AddMessage(fds)
        nfds = [f.name for f in arcpy.ListFields(temp_isect_asc) if f.name not in fields]
        AddMessage(nfds)
        # End Debug pring code //
        '''
        final_list = []
        with da.SearchCursor(temp_isect_asc, fields) as cursor3:
            prev_area = -1
            prev_item_list = []
            item_count = 0
            fcound = 0
            for row in cursor3:
                if row[0] not in duplicates:
                    final_list.append([row[0], row[1], row[2]])
                else:
                    area = row[3] - row[4]
                    index = duplicates.index(row[0])
                    total_items = duplicates_list[index][1]
                    if row[0] == duplicates[
                            0] and item_count == 0:  # Deal with first item differently
                        item_count += 1
                        prev_area = area
                        prev_item_list = [row[0], row[1], row[2]]
                    elif item_count + 1 == total_items:  # Deal with last item in list
                        if prev_area <= area:
                            prev_area = area
                            prev_item_list = [row[0], row[1], row[2]]
                        final_list.append(prev_item_list)
                        item_count = 0
                        prev_area = -1
                        prev_item_list = []
                    elif item_count + 1 != total_items:
                        if prev_area <= area:
                            prev_area = area
                            prev_item_list = [row[0], row[1], row[2]]
                        item_count += 1
        # Append results back to Input Feature Class
        AddField(in_buildings, tile_fid, "LONG")
        AddField(in_buildings, file_name, "TEXT")
        with da.UpdateCursor(in_buildings,
                             [building_fid, tile_fid, file_name]) as cursor:
            for r in cursor:
                for i in final_list:
                    if r[0] == i[0]:
                        r[1] = int(i[1])
                        r[2] = str(i[2])
                cursor.updateRow(r)

        Delete(temp_isect)
        del bldg_list
        del duplicates_list
        del duplicates

        # Check back in 3D Analyst license
        CheckInExtension("3D")
    except LicenseError:
        AddError("3D Analyst license is unavailable")
        print("3D Analyst license is unavailable")
    except ExecuteError:
        AddError("3D Analyst license is unavailable")
        print(GetMessages(2))
def process():
    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError

        # Constants - DO NOT MODIFY
        supported_folder_extensions = [
            'gdb'
        ]  # Currently not using this.... for future needs...

        def ensure_dir(file_path):
            directory = os.path.dirname(file_path)
            if not os.path.exists(directory):
                os.makedirs(directory)

        def zipper(in_list, out_file_path):
            out_file = '{0}.zip'.format(out_file_path)
            ensure_dir(out_file)
            with zipfile.ZipFile(out_file, 'w') as zipMe:
                for f in in_list:
                    zipMe.write(f, compress_type=zipfile.ZIP_DEFLATED)

        def zipper_gdb(in_gdb, out_file_name):
            assert in_gdb.endswith(
                '.gdb'
            ), "Error: file extension {0} not detected in in_folder".format(
                ".gdb")
            root_dir = os.path.dirname(in_gdb)
            gdb_name = os.path.basename(in_gdb)
            myzip = zipfile.ZipFile(os.path.join(root_dir, out_file_name), 'w',
                                    zipfile.ZIP_DEFLATED)
            for folder, subfolder, file in os.walk(
                    os.path.join(root_dir, gdb_name)):
                for each in subfolder + file:
                    source = os.path.join(folder, each)
                    if not source.endswith(".lock"):
                        # remove the absolute path to compose arcname
                        # also handles the remaining leading path separator with lstrip
                        arcname = source[len(root_dir):].lstrip(os.sep)
                        # write the file under a different name in the archive
                        myzip.write(source, arcname=arcname)
            myzip.close()

        def zip_folder(in_folder, out_file_name):
            myzip = zipfile.ZipFile(os.path.join(in_folder, out_file_name),
                                    'w', zipfile.ZIP_DEFLATED)
            for folder, subfolder, file in os.walk(in_folder):
                for each in subfolder + file:
                    source = os.path.join(folder, each)
                    # remove the absolute path to compose arcname
                    # also handles the remaining leading path separator with lstrip
                    arcname = source[len(in_folder):].lstrip(os.sep)
                    # write the file under a different name in the archive
                    myzip.write(source, arcname=arcname)
            myzip.close()

        # TODO: do something with folder-based file structures. ex: GDB .... user the zipper_folder_structure() function above.
        from arcpy import AddMessage

        files_in_dir = []
        for root, dirs, files in os.walk(in_directory):
            for filename in files:
                files_in_dir.append([root, filename])

        file_name_list = []
        files_to_zip = []
        for f in files_in_dir:
            root = f[0]
            filename = f[1]
            file = os.path.join(root, filename)
            file_partitioned = filename.partition('.')[0]
            if file_partitioned not in file_name_list:
                if len(files_to_zip) > 1:
                    out_file_path = files_to_zip[0].replace(
                        in_directory, out_directory).partition('.')[0]
                    zipper(files_to_zip, out_file_path)
                    AddMessage(files_to_zip)
                    files_to_zip = []
                file_name_list.append(file_partitioned)
            else:
                files_to_zip.append(file)
                # If last file in directory for processing
                if root == files_in_dir[-1][0] and filename == files_in_dir[
                        -1][1]:
                    out_file_path = files_to_zip[0].replace(
                        in_directory, out_directory).partition('.')[0]
                    zipper(files_to_zip, out_file_path)

        # Check back in Image Analyst license
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("ImageAnalyst license is unavailable")
        print("ImageAnalyst license is unavailable")
    except ExecuteError:
        AddError(GetMessages(2))
        print(GetMessages(2))
Example #17
0
def main():
    from arcpy import CheckExtension, CheckOutExtension, CheckInExtension, ExecuteError, GetMessages, AddError,\
        ListDatasets, env, SetProgressor, SetProgressorLabel, SetProgressorPosition, ResetProgressor, Exists
    from arcpy.management import CreateFileGDB, CreateMosaicDataset, AddRastersToMosaicDataset
    from arcpy import Describe
    from os.path import join, exists
    from os import mkdir, makedirs

    class LicenseError(Exception):
        pass

    try:
        if CheckExtension("ImageAnalyst") == "Available":
            CheckOutExtension("ImageAnalyst")
        else:
            # raise a custom exception
            raise LicenseError
        try:
            from PIL import Image
        except ModuleNotFoundError:
            AddError(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            print(
                "PILLOW Library Not Detected. Install using Python Manager in ArcGIS Pro"
            )
            exit()

        env.workspace = in_mosaic_gdb
        mosaics = ListDatasets("*", "Mosaic")
        file_count = len(mosaics)
        count = 0
        SetProgressor("step", "Begin Processing Files...", 0, file_count, 1)
        if not exists(out_folder):
            makedirs(out_folder)
        fileGDB = join(out_folder, "ortho_mosaics.gdb")
        if not Exists(fileGDB):
            CreateFileGDB(out_folder, "ortho_mosaics.gdb")
        for mosaic in mosaics:
            print("processing mosaic {0} of {1}".format(count + 1, file_count))
            in_mosaic = join(in_mosaic_gdb, mosaic)
            i_list, extent = get_images_and_stats(
                in_mosaic
            )  # Obtain image statistics and info from mosaic for processing
            for i in i_list:  # Check that output folder is not the path of i
                if out_folder == path.dirname(i[0]):
                    AddError(
                        "outFolder cannot be the same folder/directory as images referenced in the mosaic dataset"
                    )
                    exit()
            if not path.exists(out_folder):
                makedirs(out_folder)
            out_tile_folder = join(out_folder, "tiles{}".format(count))
            mkdir(out_tile_folder)
            SetProgressorLabel("Texturing Mosaic {0}...".format(count))
            texture_images(i_list, extent, in_texture, in_polygon,
                           out_tile_folder, method,
                           blur_distance)  # Generate Texture-Masked tiles

            mosaic_name = "tiles{}_".format(count)
            mosaic_dataset = join(fileGDB, mosaic_name)
            SetProgressorLabel(
                "Creating Mosaic Dataset for Tiles of {0}...".format(mosaic))
            sr = Describe(in_mosaic).spatialReference
            CreateMosaicDataset(fileGDB, mosaic_name, sr, num_bands,
                                pixel_depth, product_definition,
                                product_band_definitions)
            SetProgressorLabel(
                "Adding of {0} to Mosaic Dataset...".format(mosaic))
            AddRastersToMosaicDataset(mosaic_dataset, "Raster Dataset",
                                      out_tile_folder, "UPDATE_CELL_SIZES",
                                      "UPDATE_BOUNDARY", "NO_OVERVIEWS", None,
                                      0, 1500, None, '', "SUBFOLDERS",
                                      "ALLOW_DUPLICATES", "NO_PYRAMIDS",
                                      "NO_STATISTICS", "NO_THUMBNAILS", '',
                                      "NO_FORCE_SPATIAL_REFERENCE",
                                      "NO_STATISTICS", None, "NO_PIXEL_CACHE")
            SetProgressorPosition()
            count += 1
        ResetProgressor()
        CheckInExtension("ImageAnalyst")
    except LicenseError:
        AddError("Image Analyst license is unavailable")
        print("Image Analyst license is unavailable")
    except ExecuteError:
        print(GetMessages(2))