Exemplo n.º 1
0
def create_and_append(feature_list=None,
                      token=None,
                      portal_url=None,
                      service_url=None,
                      matching=None):

    token = arcpy.GetSigninToken()
    portal_url = arcpy.GetActivePortalURL()
    gis = GIS(portal_url, token=token['token'])
    layer = FeatureLayer(service_url)

    features_to_append = []

    for feature in feature_list:
        new_feature = {'attributes': {}, 'geometry': feature['geometry']}

        #Find fields and it's value
        for field in matching:
            new_feature['attributes'][field[0]] = [
                x[1] for x in feature['attributes'].items() if x[0] == field[1]
            ][0]

        features_to_append.append(new_feature.copy())

        if len(features_to_append) > 500:
            result = layer.edit_features(adds=features_to_append)
            features_to_append = []

    if features_to_append:
        layer.edit_features(adds=features_to_append)
Exemplo n.º 2
0
def thanks(df=None):

    portal_url = current_app.config['PORTAL_URL']
    portal_user = current_app.config['PORTAL_USER']
    portal_password = current_app.config['PORTAL_PASSWORD']

    cases_url = current_app.config['COVID_CASES_URL']
    ppe_url = current_app.config['PPE_INVENTORY_URL']

    # Show the data that was just entered.
    portal = GIS(portal_url, portal_user, portal_password)
    if df == 'cases':
        # Generate new CSV files while we're at it.
        # In production they will be written to the "capacity" webserver
        # In a test environment they end up in the local folder.
        results_df = FeatureLayer(cases_url).query(
            where="editor='EMD'",
            order_by_fields="utc_date DESC",
            return_all_records=False,
            result_record_count=1,
            return_geometry=False).sdf
    elif df == 'ppe':
        results_df = FeatureLayer(ppe_url).query(
            where="facility='Clatsop'",
            order_by_fields="utc_date DESC",
            return_all_records=False,
            result_record_count=1,
            return_geometry=False).sdf
    else:
        results_df = pd.DataFrame()
    return render_template('thanks.html', df=results_df)
Exemplo n.º 3
0
 def __init__(self, service_url=default_service_url):
     self.layer = FeatureLayer(service_url)
     self.postcode_field = 'Postcode'
     self.date_field = 'Date'
     self.date_code_field = 'DateCode'
     self.tests_field = 'Tests'
     self.total_tests_field = 'TotalTests'
     self.date_code_format = '%Y%m%d'
Exemplo n.º 4
0
def delete_rows(service_url, where_clause):
    deletes = []
    lyr = FeatureLayer(service_url)
    query_result = lyr.query(where=where_clause, return_ids_only=True)
    deletes = query_result['objectIds']

    if deletes:
        return lyr.edit_features(deletes=str(deletes))
    else:
        return None
Exemplo n.º 5
0
 def __init__(self, service_url=default_service_url):
     self.layer = FeatureLayer(service_url)
     self.lga_code_field = 'LGA_CODE'
     self.lga_name_field = 'LGA_NAME'
     self.lga_version_field = 'LGA_Version'
     self.date_field = 'Date'
     self.date_code_field = 'DateCode'
     self.cases_field = 'Cases'
     self.total_cases_field = 'TotalCases'
     self.date_code_format = '%Y%m%d'
Exemplo n.º 6
0
  def run(self):
    layer = FeatureLayer("https://services-eu1.arcgis.com/CZ1GXX3MIjSRSHoC/ArcGIS/rest/services/Covid19_Impfmeldungen_%c3%96ffentlich/FeatureServer/0")

    start = time.time()
    data = layer.query(order_by_fields='Einrichtung, Meldedatum, Impfungen_proTyp')
    print('> Queried data in %.1fs' % (time.time() - start))

    if len(data) == 0:
      raise Exception('Queried data is empty')

    features_filtered = list(filter_duplicate_days(data.features))
    apply_manual_fixes(features_filtered)
    check_cumulative_plausability(features_filtered)

    rows_nach_einrichtung = list(map(map_nach_einrichtung, features_filtered))
    rows_nach_einrichtung_file = os.path.join('corona-impfungen', 'arcgisImpfungenNachEinrichtung.csv')
    rows_nach_einrichtung_diff = self.get_csv_diff(rows_nach_einrichtung_file, rows_nach_einrichtung)
    if len(rows_nach_einrichtung_diff) > 0:
      self.write_csv_rows(rows_nach_einrichtung_file, rows_nach_einrichtung)

      if self.telegram_bot != None and self.telegram_chat_id != None:
        data = ''.join(rows_nach_einrichtung_diff)
        self.telegram_bot.send_message(
          self.telegram_chat_id,
          '```\n' + (data[:4080] if len(data) > 4080 else data) + '```',
          parse_mode = "Markdown"
        )

    rows_nach_geschlecht = list(itertools.chain.from_iterable(map(map_nach_geschlecht, features_filtered)))
    rows_nach_geschlecht_file = os.path.join('corona-impfungen', 'arcgisImpfungenNachGeschlecht.csv')
    rows_nach_geschlecht_diff = self.get_csv_diff(rows_nach_geschlecht_file, rows_nach_geschlecht)
    if len(rows_nach_geschlecht_diff) > 0:
      self.write_csv_rows(rows_nach_geschlecht_file, rows_nach_geschlecht)

      if self.telegram_bot != None and self.telegram_chat_id != None:
        data = ''.join(rows_nach_geschlecht_diff)
        self.telegram_bot.send_message(
          self.telegram_chat_id,
          '```\n' + (data[:4080] if len(data) > 4080 else data) + '```',
          parse_mode = "Markdown"
        )

    rows_nach_alter = list(itertools.chain.from_iterable(map(map_nach_alter, features_filtered)))
    rows_nach_alter_file = os.path.join('corona-impfungen', 'arcgisImpfungenNachAlter.csv')
    rows_nach_alter_diff = self.get_csv_diff(rows_nach_alter_file, rows_nach_alter)
    if len(rows_nach_alter_diff) > 0:
      self.write_csv_rows(rows_nach_alter_file, rows_nach_alter)

      if self.telegram_bot != None and self.telegram_chat_id != None:
        data = ''.join(rows_nach_alter_diff)
        self.telegram_bot.send_message(
          self.telegram_chat_id,
          '```\n' + (data[:4080] if len(data) > 4080 else data) + '```',
          parse_mode = "Markdown"
        )
Exemplo n.º 7
0
 def __init__(self,
              service_url: str,
              postcode_field: str = 'PostCode',
              total_cases_field: str = 'TotalCases',
              date_of_last_case_field: str = 'DateOfLastCase',
              days_since_last_case_field: str = 'DaysSinceLastCase'):
     self.layer = FeatureLayer(service_url)
     self.postcode_field = postcode_field
     self.totalCases_field = total_cases_field
     self.dateOfLastCase_field = date_of_last_case_field
     self.daysSinceLastCase_field = days_since_last_case_field
Exemplo n.º 8
0
    def run(self):
        csv_filename = os.path.join('corona-fallzahlen',
                                    'arcgisInzidenzGemeinden.csv')
        current_rows = self.read_csv_rows(csv_filename)

        layer = FeatureLayer(
            "https://services-eu1.arcgis.com/CZ1GXX3MIjSRSHoC/ArcGIS/rest/services/EBE_Gemeinden_Inzidenztabelle_3/FeatureServer/0"
        )

        start = time.time()
        data = layer.query(order_by_fields='Ort, Datum_Meldung')
        print('> Queried data in %.1fs' % (time.time() - start))

        if len(data) == 0:
            raise Exception('Queried data is empty')

        if len(data) < len(current_rows) * (1 / 1.5):
            raise Exception(
                'Queried data has much less items (%d) than current data (%d)'
                % (len(data), len(current_rows)))

        if len(data) > len(current_rows) * 1.5:
            raise Exception(
                'Queried data has much more items (%d) than current data (%d)'
                % (len(data), len(current_rows)))

        rows = list(
            map(
                lambda x: {
                    'datum':
                    datetime.utcfromtimestamp(x.attributes['Datum_Meldung'] /
                                              1000).strftime('%Y-%m-%d'),
                    'ort':
                    x.attributes['Ort'],
                    'neuPositiv':
                    str(x.attributes['positiv_neu']),
                    'inzidenz7tage':
                    str(round(x.attributes['inzidenz_letzte7Tage'], 2)),
                }, data.features))

        csv_diff = self.get_csv_diff(csv_filename, rows)

        if len(csv_diff) == 0:
            return

        if self.telegram_bot != None and self.telegram_chat_id != None:
            data = ''.join(csv_diff)
            self.telegram_bot.send_message(
                self.telegram_chat_id,
                '```\n' + (data[:4080] if len(data) > 4080 else data) + '```',
                parse_mode="Markdown")

        self.write_csv_rows(csv_filename, rows)
Exemplo n.º 9
0
 def __init__(self,
              service_url: str,
              date_string_field='DateString',
              postcode_field='PostCode',
              report_date_field='ReportDate',
              report_age_field='ReportAge',
              oid_field='OBJECTID'):
     self.layer = FeatureLayer(service_url)
     self.oid_field = oid_field
     self.dateString_field = date_string_field
     self.postcode_field = postcode_field
     self.reportAge_field = report_age_field
     self.reportDate_field = report_date_field
Exemplo n.º 10
0
def main(arguments):
    # initialize logger
    logger = initialize_logging(arguments.log_file)
    # Create the GIS
    logger.info("Authenticating...")
    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)
    if not gis.properties.isPortal:
        logger.error("This script only works with ArcGIS Enterprise")
        sys.exit(0)

    logger.info("Getting location tracking service")
    try:
        tracks_layer = gis.admin.location_tracking.tracks_layer
    except Exception as e:
        logger.info(e)
        logger.info(
            "Getting location tracking service failed - check that you are an admin and that location tracking is enabled for your organization"
        )
        sys.exit(0)

    logger.info("Getting polygon layer")
    try:
        layer = FeatureLayer(url=args.layer_url, gis=gis)
        _ = layer._lyr_json
    except Exception as e:
        logger.info(e)
        logger.info(
            "Layer could not be found based on given input. Please check your parameters again. Exiting the script"
        )
        sys.exit(0)

    features = layer.query(where=args.where, out_sr=3857).features
    if len(features) > 0:
        geometries = [feature.geometry for feature in features]
        logger.info("Unifying geometry data")
        union_geometry = geometry.union(spatial_ref=3857,
                                        geometries=geometries,
                                        gis=gis)
        if args.symmetric_difference:
            union_geometry['rings'] = form_donut(union_geometry['rings'])
        intersect_filter = geometry.filters.intersects(union_geometry, sr=3857)
        logger.info("Querying features")
        x = tracks_layer.delete_features(geometry_filter=intersect_filter)
        logger.info("Deleting features")
        logger.info("Deleted: " + str(len(x['deleteResults'])) + " tracks")
        logger.info("Completed!")
def main(arguments):
    # initialize logger
    logger = initialize_logging(arguments.log_file)
    # Create the GIS
    logger.info("Authenticating...")
    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the feature layer
    if gis.content.get(arguments.item_id):
        logger.info("Getting feature layer")
        item = gis.content.get(arguments.item_id)
        mirror_layer = item.layers[0]
        if arguments.lkl_layer_url:
            lkl_layer = FeatureLayer(url=arguments.lkl_layer_url)
        else:
            logger.info("Please pass an LKL layer url!")
            sys.exit(0)

        # Query LKL and mirror layer
        lkl_fset = lkl_layer.query('1=1', out_sr=3857)
        if len(lkl_fset) == 0:
            logger.info("No LKLs in your layer yet!")
            sys.exit(0)
        mirror_fset = mirror_layer.query('1=1', out_sr=3857)

        add_features = []
        update_features = []
        logger.info("Iterating through current LKL data")
        for feature in lkl_fset:
            for mirror_feature in mirror_fset:
                # use "in" instead of == comparison due to the potential for brackets to be in the GUID field
                if mirror_feature.attributes[return_field_name(
                        mirror_layer, "global_id")].lower(
                        ) in feature.attributes["globalid"].lower():
                    update_features.append(feature)
                    break
            else:
                add_features.append(feature)

        logger.info("Posting updated data to mirrored layer")
        mirror_layer.edit_features(adds=add_features,
                                   updates=update_features,
                                   use_global_ids=True)
        logger.info("Completed!")
    else:
        logger.info("Item not found")
def temporal_accuracy(c_features, curr_url, output_workspace, output_features,
                      years, curr_gis):

    import zipfile
    import arcpy

    fl = FeatureLayer(url=curr_url, gis=curr_gis)

    item = curr_gis.content.get(fl.properties.serviceItemId)

    export_item = item.export(export_format='File Geodatabase',
                              title='CURRENCY')

    result = export_item.download(save_path=output_workspace)

    folder = os.path.dirname(result)

    with zipfile.ZipFile(result, "r") as zip_ref:
        zip_ref.extractall(folder)

    gdbs = []
    for file in zip_ref.namelist():
        gdbs.append(os.path.split(file)[0])

    gdb = os.path.join(folder, most_common(gdbs))

    arcpy.env.workspace = gdb
    fc = arcpy.ListFeatureClasses()

    feature_class = os.path.join(gdb, fc[0])

    temp_acc_calc = ta.TemporalAccuracy(c_features, feature_class,
                                        output_features, years)

    temp_acc_calc.create_temporal_accuracy()
Exemplo n.º 13
0
def get_features_from_feature_server(url, query):
    """
    Given a url to a Feature Server, return a list
    of Features (for example, parking lots that are not full)

    :param url: url for Feature Server
    :param query: query to select features
                  example: {'where': '1=1', 'out_sr': '4326'}
    :return: list of all features returned from the query
    """
    features = []
    f = FeatureLayer(url=url)
    feature_set = f.query(**query)
    for feature in feature_set:
        features.append(feature.as_dict)
    return features
def row_in_feature_layer(row: pd.Series, feature_layer: FeatureLayer) -> bool:
    # Null check
    if pd.isna(row['pin_longitude']) or pd.isna(row['pin_latitude']):
        return False
    # Construct a point at the row's coordinates
    pin = Point({"x": row['pin_longitude'], "y": row['pin_latitude']})
    # construct a geometry filter to check if each point is in a disputed area
    pin_filter = intersects(pin)

    continue_query = True
    retries = 0
    MAX_RETRIES = 9
    # Default to setting in_disputed_area = True to ensure we never show pins in disputed area
    in_disputed_area = True
    # Make query to determine whether or not the pin is in the disputed area
    # If the query times out, retry with exponential backoff
    while continue_query:
        try:
            in_disputed_area = len(feature_layer.query(geometry_filter=pin_filter).features) > 0
            continue_query = False
        except Exception as e:
            # send slack message if we exceed retry count
            if retries > MAX_RETRIES:
                body = f'Unable to check if the record with ID {row["source_id"]} is in a disputed region.'
                send_slack_message(body, channel='#dev-logging-etl')
                continue_query = False
            else:
                sleep(1.5**(retries))
                retries += 1

    return in_disputed_area
Exemplo n.º 15
0
 def layer(self):
     """returns the Parcel Layer for the service"""
     if "controllerDatasetLayers" in self._flc.properties and \
        "parcelLayerId" in self._flc.properties.controllerDatasetLayers:
         url = "%s/%s" % (self._flc.url,
                          self._flc.properties.controllerDatasetLayers.parcelLayerId)
         return FeatureLayer(url=url, gis=self._gis)
     return None
Exemplo n.º 16
0
def update_database(current, last):
    """ 
Update the big database from the little one if the little one has new data for the big one.

current = todays data
last = the last row from the big database

Returns True if the database was updated
    """

    # If these all match then the latest data
    # has already been written to the database

    if ((last.new_cases == current.new_cases) \
        and (last.total_cases == current.total_cases) 
        and (last.last_update == current.date)):
        print("Database is current.")

        # If you want to test the update, comment this out
        # and then go in and manually delete the extra row(s).
        return False

    print("Appending the new record.")

    #return False

    attributes = {
        "utc_date": datetime.utcnow(),
        "last_update": current.date,
        "editor": os.environ.get('USERNAME') or os.environ.get('USER'),
        "source": "CC",
        "new_cases" : current.new_cases,
        "total_cases" : current.total_cases,
        "new_deaths" : current.new_deaths,
        "total_deaths" : current.total_deaths,
        "name": "Clatsop"
    }

    gis = GIS(Config.PORTAL_URL, Config.PORTAL_USER, Config.PORTAL_PASSWORD)
    layer = FeatureLayer(Config.COVID_CASES_URL, gis)
    f = Feature.from_dict({"attributes": attributes})
    fs = FeatureSet(features=[f])
    results = layer.edit_features(adds=fs)

    return True
Exemplo n.º 17
0
def get_features_from_feature_server(url, query):
    """
    Given a url to a City of Boston Feature Server, return a list
    of Features (for example, parking lots that are not full)

    :param url: url for Feature Server
    :param query: a JSON object (example: { 'where': '1=1', 'out_sr': '4326' })
    :return: list of all features returned from the query
    """

    logger.debug('url received: ' + url + ', query received: ' + str(query))

    features = []
    f = FeatureLayer(url=url)
    feature_set = f.query(**query)
    for feature in feature_set:
        features.append(feature.as_dict)
    return features
Exemplo n.º 18
0
def get_features_from_feature_server(url, query):
    """
    Given a url to a City of Boston Feature Server, return a list
    of Features (for example, parking lots that are not full)
    
    :param url: url for Feature Server
    :param query: query to select features (example: "Spaces > 0")
    :return: list of all features returned from the query
    """

    logger.debug('url received: ' + url + ', query received: ' + query)

    features = []
    f = FeatureLayer(url = url)
    feature_set = f.query(where = query)
    for feature in feature_set:
        features.append(feature.as_dict)
    return features
Exemplo n.º 19
0
def replace_in_ago(**kwargs):
    from arcgis.features import FeatureLayer

    gis = GIS("https://detroitmi.maps.arcgis.com", Variable.get('AGO_USER'),
              Variable.get('AGO_PASS'))
    item = gis.content.get(kwargs['id'])
    layer_url = f"{item.url}/0"

    layer = FeatureLayer(layer_url)

    if 'conn_id' in kwargs.keys():
        hook = PostgresHook(kwargs['conn_id'])
    else:
        hook = PostgresHook('etl_postgres')

    # Here's a query to make an ArcJSON item from each row in the table
    # Note: only works for point geometries right now.
    query = f"""
    SELECT jsonb_build_object(
        'geometry', jsonb_build_object(
          'x', ST_X(geom),
          'y', ST_Y(geom)
        ),
        'attributes', to_jsonb(row) - 'gid' - 'geom'
    ) FROM (SELECT * FROM {kwargs['table']}) row
  """

    res = hook.get_records(query)

    payload = [r[0] for r in res]

    # clear out all the rows in the table
    layer.manager.truncate()

    # write all the rows in `res`
    chunk_size = 1000
    print(
        f"Sending up {len(payload)} features with a batch size of {chunk_size}"
    )
    for i in range(0, len(payload), chunk_size):
        try:
            layer.edit_features(adds=payload[i:i + chunk_size])
        except:
            print(f"Errored on {i} - splitting into 2 batches")

            start = i
            middle = int(i + (chunk_size / 2))
            end = i + chunk_size

            print(f"start: {start} middle: {middle} end: {end}")

            layer.edit_features(adds=payload[start:middle])
            layer.edit_features(adds=payload[middle:end])
def main(arguments):
    # initialize logger
    logger = initialize_logging(arguments.log_file)
    # Create the GIS
    logger.info("Authenticating...")
    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the feature layer
    logger.info("Getting feature layer")
    layer = FeatureLayer(arguments.layer_url)
    logger.info("Getting tracks layer")
    if arguments.tracks_layer_url:
        tracks_layer = FeatureLayer(url=arguments.tracks_layer_url)
    else:
        try:
            tracks_layer = gis.admin.location_tracking.tracks_layer
        except Exception as e:
            logger.info(e)
            logger.info(
                "Getting location tracking service failed - check that you are an admin and that location tracking is enabled for your organization"
            )
            sys.exit(0)

    # Return invalid work orders
    workers = arguments.workers.replace(" ", "").split(",")
    invalid_work_orders = get_invalid_work_orders(layer, arguments.field_name,
                                                  arguments.time_tolerance,
                                                  arguments.distance_tolerance,
                                                  arguments.min_accuracy,
                                                  workers, tracks_layer,
                                                  logger)
    if len(invalid_work_orders) == 0:
        logger.info("No features found that match the criteria you've set")
    else:
        for work_order in invalid_work_orders:
            logger.info(
                f"The user {work_order[0]} who last edited the feature with OBJECTID {work_order[1]} was potentially "
                f"not within the distance tolerance when updating the field {arguments.field_name}"
            )
Exemplo n.º 21
0
def read_local_cases_df():
    gis = GIS(Config.PORTAL_URL, Config.PORTAL_USER, Config.PORTAL_PASSWORD)
    layer = FeatureLayer(Config.COVID_CASES_URL, gis)
    # Be careful, Esri 'where' clauses are very broken.
    # It would be elegant if they worked but, they don't.
    #sdf = layer.query(where="name=='Clatsop'", out_fields="*").sdf
    sdf = pd.DataFrame.spatial.from_layer(layer)
    df = sdf[(sdf['name'] == 'Clatsop') & (sdf['source'] != 'worldometer') &
             (sdf['source'] != 'OHA')]
    #print(df)
    return df
def compute_pin_info(df: pd.DataFrame, geo_dfs: dict) -> pd.DataFrame:
    # Get record coordinates
    df['pin_latitude'], df['pin_longitude'] = \
        zip(*df.apply(lambda record: get_record_coordinates(record, geo_dfs), axis=1))
    # Populate in_disputed_area col
    WHO_FL_URL = "https://services.arcgis.com/5T5nSi527N4F7luB/arcgis/rest/services/DISPUTED_AREAS_mask/FeatureServer/0"
    # Create feature layer object
    disputed_areas_fl = FeatureLayer(WHO_FL_URL)
    # apply row_in_disputed_area across the whole df
    df['in_disputed_area'] = df.apply(lambda row: row_in_feature_layer(row, disputed_areas_fl), axis=1)
    return df
Exemplo n.º 23
0
 def __all_events(self):
     """
     Fetches all events for particular hub.
     """
     events = []
     _events_layer = self._gis.content.search(query="typekeywords:hubEventsLayer", max_items=5000)[0]
     _events_layer_url = _events_layer.url + '/0'
     _events_data = FeatureLayer(_events_layer_url).query().features
     for event in _events_data:
         events.append(Event(self._gis, event))
     return events
Exemplo n.º 24
0
 def __init__(self, service_url: str,
              date_field: str = 'Date',
              region_field: str = 'Region',
              new_cases_field: str = 'NewCases',
              new_deaths_field: str = 'NewDeaths',
              new_recoveries_field: str = 'NewRecoveries',
              total_cases_field: str = 'TotalCases',
              total_deaths_field: str = 'TotalDeaths',
              total_recoveries_field: str = 'TotalRecoveries',
              active_cases_field: str = 'ActiveCases'):
     self.layer = FeatureLayer(service_url)
     self.region_field = region_field
     self.date_field = date_field
     self.new_cases_field = new_cases_field
     self.new_deaths_field = new_deaths_field
     self.new_recoveries_field = new_recoveries_field
     self.total_cases_field = total_cases_field
     self.total_deaths_field = total_deaths_field
     self.total_recoveries_field = total_recoveries_field
     self.active_cases_field = active_cases_field
    def run(self):
        csv_filename = os.path.join('corona-impfungen', 'arcgisImpfungen.csv')
        current_rows = self.read_csv_rows(csv_filename)

        layer = FeatureLayer(
            "https://services-eu1.arcgis.com/CZ1GXX3MIjSRSHoC/ArcGIS/rest/services/EBE_Gesamtsummen_Impfmeldungen_Öffentlich/FeatureServer/0"
        )

        start = time.time()
        data = layer.query(order_by_fields='Meldedatum')
        print('> Queried data in %.1fs' % (time.time() - start))

        if len(data) == 0:
            raise Exception('Queried data is empty')

        if len(data) < len(current_rows) * (1 / 1.5):
            raise Exception(
                'Queried data has much less items (%d) than current data (%d)'
                % (len(data), len(current_rows)))

        if len(data) > len(current_rows) * 1.5:
            raise Exception(
                'Queried data has much more items (%d) than current data (%d)'
                % (len(data), len(current_rows)))

        rows = list(map(feature_to_row, data.features))

        csv_diff = self.get_csv_diff(csv_filename, rows)

        if len(csv_diff) == 0:
            return

        if self.telegram_bot != None and self.telegram_chat_id != None:
            data = ''.join(csv_diff)
            self.telegram_bot.send_message(
                self.telegram_chat_id,
                '```\n' + (data[:4080] if len(data) > 4080 else data) + '```',
                parse_mode="Markdown")

        self.write_csv_rows(csv_filename, rows)
Exemplo n.º 26
0
def append_geo_json(geo_json):
    gis = GIS("https://www.arcgis.com", username="", password="")
    crime_properties = {
        'title': 'Crime data',
        'tags': 'crimes, open data, devlabs',
        'type': 'GeoJson'
    }

    search_result = gis.content.search(query="", item_type="Feature Layer")
    crime_data_item = search_result[0]
    crime_data_feature_layer = FeatureLayer.fromitem(crime_data_item,
                                                     layer_id=0)
    new_crime_set = FeatureSet.from_geojson(geo_json)
    crime_data_feature_layer.edit_features(adds=new_crime_set)
Exemplo n.º 27
0
def update(config):
    lyr = FeatureLayer(url=config['FeatureLyr'])
    lyr_properties = lyr.properties
    lyr_fields = lyr_properties._mapping['fields']
    entity_type = config['EntityType']
    uid_field = get_uid_field(entity_type)
    for field_dict in lyr_fields:
        field_name_upper = field_dict['name'].upper()
        if uid_field == field_name_upper:
            entity_uid_field_dict = field_dict
    if entity_uid_field_dict == None:
        return "Configured EntityUid field is not in provided Feature Layer"
    else:
        query(config, lyr, entity_uid_field_dict)
Exemplo n.º 28
0
    def execute(self, parameters, messages):
        """The source code of the tool."""

        arcpy.SetProgressor("default",
                            message="Accesing to a destinational resouse")

        # Acessing outpud data
        token = arcpy.GetSigninToken()
        portal_url = arcpy.GetActivePortalURL()
        gis = GIS(portal_url, token=token['token'])
        layer = FeatureLayer(parameters[0].valueAsText)

        arcpy.SetProgressorLabel("Prepearing input data")
        #Prepearing input data

        feature_set = arcpy.FeatureSet(parameters[1].valueAsText)
        feature_set_dict = json.loads(feature_set.JSON)

        # Matching parameter
        matching = parameters[2].value

        # Split features by number of threads
        list_of_lists = chunkIt(feature_set_dict['features'],
                                parameters[3].value)

        # List of threads
        threads = []

        arcpy.SetProgressorLabel("Starting threads")

        # Starting threads
        for feature_list in list_of_lists:
            threads.append(
                Thread(target=create_and_append,
                       args=[
                           feature_list,
                           arcpy.GetSigninToken(), portal_url,
                           parameters[0].valueAsText, matching
                       ]))
            threads[-1].start()

        # Joining all threads

        arcpy.SetProgressorLabel("Executing appendence")

        for thread in threads:
            thread.join()

        return
def main(arguments):
    # initialize logger
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    if args.item_id is None and args.layer_url is None:
        raise Exception("Must pass either item id or layer url")
    if args.item_id:
        item = gis.content.get(args.item_id)
        if item:
            layers = item.layers + item.tables
        else:
            raise Exception("Bad item id, please check again")
    else:
        layers = [FeatureLayer(url=args.layer_url, gis=gis)]

    logger.info("Downloading attachments")

    for layer in layers:
        try:
            found_attach = layer.attachments.search(args.where)
            for attach in found_attach:
                try:
                    layer.attachments.download(attachment_id=attach['ID'], oid=attach['PARENTOBJECTID'], save_path=args.out_folder)
                except Exception:
                    try:
                        print(f"Failed to download attachment {attach['NAME']} from object id {attach['PARENTOBJECTID']}'")
                    except Exception as e:
                        print(e)
        except Exception:
            pass
    logger.info("Completed successfully!")
Exemplo n.º 30
0
def process_by_metadata(gis):
    return_all_records = False

    look_back_days = config.look_back_days

    dates = csl.get_dates_in_range(look_back_days)
    where_clause = csl.form_query_string(dates)

    grid_fl = FeatureLayer(url=config.grid_url)
    grid_sdf = grid_fl.query(return_all_records=return_all_records,
                             where=where_clause).df

    geometry = grid_sdf.geometry
    sr = {'wkid': 4326}
    sp_rel = "esriSpatialRelIntersects"

    for idx, row in enumerate(grid_sdf.iterrows()):
        geom = row[1].SHAPE

        new_geom = Geometry({
            "rings":
            [[[geom.extent.upperRight.X - .1, geom.extent.lowerLeft.Y + .1],
              [geom.extent.lowerLeft.X + .1, geom.extent.lowerLeft.Y + .1],
              [geom.extent.lowerLeft.X + .1, geom.extent.upperRight.Y - .1],
              [geom.extent.upperRight.X - .1, geom.extent.upperRight.Y - .1],
              [geom.extent.upperRight.X - .1, geom.extent.lowerLeft.Y + .1]]],
            "spatialReference": {
                "wkid": 4326
            }
        })

        grid_filter = filters._filter(new_geom, sr, sp_rel)
        sp_filter = filters._filter(geom, sr, sp_rel)

        data_fl = FeatureLayer(url=config.features_url)
        #out_fields=in_fields,
        data_sdf = data_fl.query(geometry_filter=sp_filter,
                                 return_geometry=True,
                                 return_all_records=return_all_records).df

        print('Processing Completeness')
        #bounding_box = '(37.708132, -122.513617, 37.832132, -122.349607)'
        bounding_box = '(' + \
                    str(geom.extent.lowerLeft.Y) + ',' + \
                    str(geom.extent.lowerLeft.X) + ',' + \
                    str(geom.extent.upperRight.Y) + ',' + \
                    str(geom.extent.upperRight.X) + ')'

        osm_sdf = runner.gen_osm_sdf('line',
                                     bounding_box,
                                     osm_tag='highway',
                                     present=True)
        completeness_sdf, completeness_fl = comp.completeness(
            gis, osm_sdf, data_sdf, config.completeness_url, grid_filter, geom)
        print(completeness_sdf)
        #update_features(them_acc_sdf, them_acc_fl)
        print('Completeness Updated')

        print('Processing Logical Consistency')
        lc_sdf, lc_fl = lc.logical_consisitency(
            gis, config.template_fc, config.template_gdb,
            config.attr_check_file, config.attr_check_tab, data_sdf,
            config.features_url, config.logical_consistency_url, grid_filter,
            geom, config.attr_error_field_count, config.attr_error_field_def)
        print(lc_sdf)
        update_features(lc_sdf, lc_fl)
        print('Logical Consistency Updated.')

        print('Processing temporal currency')
        tc_sdf, tc_fl = tc.temporal_currency(gis, data_sdf,
                                             config.currency_url, grid_filter,
                                             geom, config.currency_field)
        print(tc_sdf)
        #update_features(tc_sdf, tc_fl)
        print('Temporal Currency Updated')

        print('Processing source lineage')
        sl_sdf, sl_fl = sl.source_lineage(gis, data_sdf,
                                          config.source_lineage_url,
                                          grid_filter, geom,
                                          config.search_field,
                                          config.value_field)
        print(sl_sdf)
        #update_features(sl_sdf, sl_fl)
        print('Source Lineage Updated')

        print('Processing Positional Accuracy')
        pa_sdf, pa_fl = pa.positional_accuracy(gis, data_sdf,
                                               config.positional_acc_url,
                                               grid_filter, geom,
                                               config.positional_acc_field)
        print(pa_sdf)
        #update_features(pa_sdf, pa_fl)
        print('Positional Accuracy Updated')

        print('Processing Thematic Accuracy')
        them_acc_sdf, them_acc_fl = them_acc.thematic_accuracy(
            gis, data_sdf, config.thematic_url, grid_filter, geom,
            config.thematic_acc_field)
        print(them_acc_sdf)
        #update_features(them_acc_sdf, them_acc_fl)
        print('Theamatic Accuracy Updated')

    return