Example #1
0
def add_features_to_featurelayer(featurelayer, nexrad_data):
    """add each record retrieved from the database as a new Feature"""
    # much faster to project all points at once
    geometries = project_all_geometries(nexrad_data)

    new_features = []
    for station, geom in zip(nexrad_data, geometries):
        logging.info(f"adding station {station[1]}...")

        new_features.append(
            Feature(geometry=geom,
                    attributes={
                        "STATION_ID": station[0],
                        "STATION_NAME": station[1],
                        "STATE": station[2],
                        "BEGIN_DATE": station[3],
                        "END_DATE": station[4],
                        "COUNTY": station[5],
                        "COUNTRY": station[6],
                        "LATITUDE": station[7],
                        "LONGITUDE": station[8],
                        "ELEVATION": station[9],
                        "OBJECTID": station[10]
                    }))

    # commit changes to the hosted FeatureLayer
    result = featurelayer.edit_features(adds=new_features)
    added_count = len(result['addResults'])
    logging.info(f"added {added_count} stations")
    if added_count != len(nexrad_data):
        raise RuntimeError(
            "Number of records added to FeatureLayer does not equal the record count from database"
        )
def add_request_point(gis, item_id, address_json, ip_address, user_agent,
                      request_time):
    # get feature layer to edit
    layer_item = gis.content.get(item_id)
    feature_layer = layer_item.layers[0]

    # compose a Point object
    pt = Point({
        'x': address_json['longitude'],
        'y': address_json['latitude'],
        'spatialReference': {
            'wkid': 4326
        }
    })

    # compose a Feature object
    request_attributes = {
        'ip_address': ip_address,
        'user_agent': user_agent,
        'request_address':
        f"{address_json['city']}, {address_json['region_name']}, {address_json['country_name']}, {address_json['zip']}",
        'request_time2': request_time.timestamp() * 1000
    }

    ft = Feature(geometry=pt, attributes=request_attributes)

    # Edit the feature layer
    edit_result = feature_layer.edit_features(adds=[ft])
    return edit_result
Example #3
0
    def process_summaries(self,
                          processed_dir,
                          processed_file_details,
                          make_historical_csv=False):

        if self.verbose:
            print("Starting load of summary table...")

        summary_filename = self.agol.layers['summary_table'][
            'original_file_name']

        summary_df = pd.DataFrame()
        for f in processed_file_details:
            fname = f["processed_filename"]
            size = os.path.getsize(os.path.join(processed_dir, fname))
            if size > 0:
                df = load_csv_to_df(os.path.join(processed_dir, fname))
                table_row = create_summary_table_row(df, f["source_datetime"],
                                                     f["filename"])
                summary_df = summary_df.append(table_row, ignore_index=True)
            else:
                print(f"{fname} has a filesize of {size}, not processing.")

        if make_historical_csv:
            out_csv_file = os.path.join(processed_dir, summary_filename)
            summary_df.to_csv(out_csv_file, index=False, header=True)
            if self.verbose:
                print(
                    "Finished creation of historical summary table CSV, returning."
                )
            return

        layer_conf = self.agol.layers['summary_table']

        table = self.agol.gis.content.get(layer_conf['id'])
        t = table.tables[0]

        new_col_names = {}
        for name in t.properties.fields:
            new_col_names[name["alias"]] = name["name"]
        summary_df = summary_df.rename(columns=new_col_names)
        df_as_dict = summary_df.to_dict(orient='records')

        features = []
        for r in df_as_dict:
            ft = Feature(attributes=r)
            features.append(ft)
        # It's okay if features is empty; status will reflect arcgis telling us that,
        # but it won't stop the processing.
        fs = FeatureSet(features)
        if self.dry_run:
            if self.verbose:
                print("Dry run set, not editing features.")
        else:
            status = t.edit_features(adds=features)
            if self.verbose:
                print(status)
        if self.verbose:
            print("Finished load of summary table")
Example #4
0
def update_date_field(row: Feature, field_name: str, new_value: datetime.date):
    current_date_value = datetime_utils.to_date(row.attributes[field_name])

    if current_date_value == new_value:
        # if the values are the same, return False.
        return False

    # The values are different, Update the row.
    row.attributes[field_name] = new_value
    return True
Example #5
0
    def update_str_field(self, row: Feature, field_name: str, new_value: str = None):
        current_value = row.attributes[field_name]

        # if both are equal (str=str or None=None) return False.
        if current_value == new_value:
            return False

        # if (str and None) or (None and str)
        if not (current_value and new_value):
            row.attributes[field_name] = new_value
            return True

        # both values are non-identical strings.
        # if the test is not case sensitive and both UC strings match, no update needed
        if not self._case_sensitive and current_value.upper() == new_value.upper():
            return False

        # the strings are non-equivalent.  Update.
        row.attributes[field_name] = new_value
        return True
Example #6
0
 def __init__(self, project=None, feature_layer=None, feature=None):
     """ Creates a new instance that is owned by the project.
         :param project: The project that owns this model.
         :type project: Project
     """
     self._schema = None  # Implement in subclasses
     self.project = project
     self.feature_layer = feature_layer
     self._feature = feature
     if self._feature is None:
         self._feature = Feature(attributes={})
def get_geo_location_info_by_OD_matrix(lon, lat):
    origin_coords = [
        '50.448069, 30.5194453', '50.448616, 30.5116673',
        '50.913788, 34.7828343'
    ]
    # origin_coords = ['-117.187807, 33.939479', '-117.117401, 34.029346']

    origin_features = []

    for origin in origin_coords:
        reverse_geocode = geocoding.reverse_geocode({
            "x": origin.split(',')[0],
            "y": origin.split(',')[1]
        })

        origin_feature = Feature(geometry=reverse_geocode['location'],
                                 attributes=reverse_geocode['address'])
        origin_features.append(origin_feature)

    origin_fset = FeatureSet(origin_features,
                             geometry_type='esriGeometryPoint',
                             spatial_reference={'latestWkid': 4326})

    destinations_address = r"data/destinations_address.csv"
    destinations_df = pd.read_csv(destinations_address)
    destinations_sdf = pd.DataFrame.spatial.from_df(destinations_df, "Address")

    destinations_fset = destinations_sdf.spatial.to_featureset()

    try:
        results = generate_origin_destination_cost_matrix(
            origins=origin_fset,  # origins_fc_latlong,
            destinations=destinations_fset,  # destinations_fs_address,
            cutoff=200,
            origin_destination_line_shape='Straight Line')
        od_df = results.output_origin_destination_lines.sdf

        # filter only the required columns
        od_df2 = od_df[[
            'DestinationOID', 'OriginOID', 'Total_Distance', 'Total_Time'
        ]]

        # user pivot_table
        od_pivot = od_df2.pivot_table(index='OriginOID',
                                      columns='DestinationOID')
        return od_pivot
    except Exception as exp:
        print(exp)

    return None
Example #8
0
def writeToIndex(logVals):
    # function used to write the published service information to a hosted table
    writeValList = logVals.split(",")[1:]
    indxVal = int(logVals.split(",")[0])

    fldNames = ['mapname','itemID','title','geonodeSRC']

    logRow = {k:v for k,v in zip(fldNames,writeValList)}

    logRow['indxCol'] = indxVal
    logRow['geometryType'] = "NA"

    logRowAdd = Feature(attributes=logRow)

    indexTable.edit_features(adds=[logRowAdd])
Example #9
0
    def update_float_field(self, row: Feature, field_name: str, new_value: float = None):
        current_value = row.attributes[field_name]
        if current_value:
            current_value = round(current_value, self._rounding)  # round non zero, non-null values.

        if new_value:
            test_value = round(new_value, self._rounding)  # round non zero, non-null values.
        else:
            test_value = new_value

        if current_value == test_value:
            return False

        row.attributes[field_name] = new_value
        return True
Example #10
0
    def to_featureset(self):
        """
        Return a feature set
        """
        bin_features = []
        for bin_entry in self.bins():
            bin_feature = Feature(
                geometry=bin_entry['geometry'],
                attributes={'hitCount': bin_entry['hitCount']})
            bin_features.append(bin_feature)

        bin_fields = ['hitCount']
        return FeatureSet(bin_features,
                          bin_fields,
                          geometry_type='esriGeometryPolygon',
                          spatial_reference=self._wkid)
Example #11
0
def update_database(current, last):
    """ 
Update the big database from the little one if the little one has new data for the big one.

current = todays data
last = the last row from the big database

Returns True if the database was updated
    """

    # If these all match then the latest data
    # has already been written to the database

    if ((last.new_cases == current.new_cases) \
        and (last.total_cases == current.total_cases) 
        and (last.last_update == current.date)):
        print("Database is current.")

        # If you want to test the update, comment this out
        # and then go in and manually delete the extra row(s).
        return False

    print("Appending the new record.")

    #return False

    attributes = {
        "utc_date": datetime.utcnow(),
        "last_update": current.date,
        "editor": os.environ.get('USERNAME') or os.environ.get('USER'),
        "source": "CC",
        "new_cases" : current.new_cases,
        "total_cases" : current.total_cases,
        "new_deaths" : current.new_deaths,
        "total_deaths" : current.total_deaths,
        "name": "Clatsop"
    }

    gis = GIS(Config.PORTAL_URL, Config.PORTAL_USER, Config.PORTAL_PASSWORD)
    layer = FeatureLayer(Config.COVID_CASES_URL, gis)
    f = Feature.from_dict({"attributes": attributes})
    fs = FeatureSet(features=[f])
    results = layer.edit_features(adds=fs)

    return True
Example #12
0
    def update_int_field(row: Feature, field_name: str, new_value: int = None):
        if row.attributes[field_name] != new_value:
            row.attributes[field_name] = new_value
            return True

        return False
def main(arguments):  # noqa: C901
    # Initialize logging
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the workforce project
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)
    try:
        if project._is_v2_project:
            raise Exception("This is a v2 project. Please migrate v1 projects")
    except AttributeError:
        raise Exception(
            "Cannot find the attribute is v2 project. "
            "Are you sure you have the API version 1.8.3 or greater installed? Check with `arcgis.__version__` in your Python console"
        )
    logger.info(project)
    logger.info("Creating base v2 project...")

    # Create WF Project w given title
    if arguments.title:
        if arguments.title != project.title:
            title = arguments.title
        else:
            raise Exception(
                "Cannot name your project the same as the old one. Please provide a unique name"
            )
    else:
        title = project.title + " Updated"
    v2_project = workforce.create_project(title=title,
                                          summary=project.summary,
                                          major_version=2)

    # Update thumbnail
    with tempfile.TemporaryDirectory() as dirpath:
        try:
            thumbnail = item.download_thumbnail(save_folder=dirpath)
            v2_project._item.update(thumbnail=thumbnail)
            gis.content.get(
                v2_project.worker_web_map_id).update(thumbnail=thumbnail)
            gis.content.get(
                v2_project.dispatcher_web_map_id).update(thumbnail=thumbnail)
        except Exception:
            logger.info("Thumbnail not migrated successfully")

    # Migrate Assignment Types
    logger.info("Migrating assignment types...")
    existing_assignment_types = project.assignment_types.search()
    at_to_add = []

    for assignment_type in existing_assignment_types:
        if assignment_type.name:
            at_to_add.append(
                workforce.AssignmentType(project=v2_project,
                                         name=assignment_type.name))
        else:
            logger.info(
                "Assignment Type migration skipped - does not have a name")

    # Get Assignment Types in migrated project before you potentially add a bad worker / dispatcher
    v2_project.assignment_types.batch_add(at_to_add)
    new_assignment_types = v2_project.assignment_types.search()
    if len(existing_assignment_types) == len(new_assignment_types):
        logger.info("Assignment Types successfully migrated")
    else:
        cleanup_project(gis, title)
        raise Exception(
            "Assignment Types not successfully migrated. Cleaning up new project"
        )

    # Migrate Dispatchers
    if not arguments.skip_dispatchers:
        logger.info("Migrating dispatchers...")
        dispatcher_ghost = False

        # Get Existing Dispatchers
        existing_dispatchers = project.dispatchers.search()
        dispatchers_to_add = []
        layer = v2_project.dispatchers_layer

        # Get Custom Dispatcher Fields and Templates
        custom_fields = add_custom_fields(project.dispatchers_layer, layer)

        # Prepare Dispatchers to be added
        for dispatcher in existing_dispatchers:

            # Validate that there is a user id populated and that the user id isn't yourself (since that was added during project creation).
            # Otherwise, skip adding the dispatcher
            if dispatcher.user_id and dispatcher.user_id != arguments.username:

                # Validate a name exists, otherwise populate with an empty string
                dispatcher_name = dispatcher.user_id if dispatcher.name is None else dispatcher.name

                attributes = {
                    v2_project._dispatcher_schema.name: dispatcher_name,
                    v2_project._dispatcher_schema.contact_number:
                    dispatcher.contact_number,
                    v2_project._dispatcher_schema.user_id: dispatcher.user_id,
                    v2_project._dispatcher_schema.global_id:
                    dispatcher.global_id
                }

                # Add Custom Field Values
                for field in custom_fields:
                    attributes[field["name"]] = dispatcher._feature.attributes[
                        field["name"]]
                feature = Feature(geometry=dispatcher.geometry,
                                  attributes=attributes)
                dispatchers_to_add.append(feature)
            else:
                if not dispatcher.user_id:
                    logger.info(
                        "Dispatcher was skipped from migrating. The dispatcher does not a valid user_id in the layer, or 2. "
                        "The dispatcher was already added. Please check the original dispatchers layer."
                    )
                    dispatcher_ghost = True
                else:
                    # update info for owner dispatcher
                    v2_dispatcher = v2_project.dispatchers.search()[0]
                    v2_dispatcher.update(
                        contact_number=dispatcher.contact_number,
                        name=dispatcher.name)

        # Add Dispatchers
        layer.edit_features(adds=FeatureSet(dispatchers_to_add),
                            use_global_ids=True)
        # add dispatcher named users to the project's group.
        max_add_per_call = 25
        for i in range(0,
                       math.ceil(len(dispatchers_to_add) / max_add_per_call)):
            v2_project.group.add_users([
                d.attributes[v2_project._dispatcher_schema.user_id]
                for d in dispatchers_to_add[i * max_add_per_call:(
                    i * max_add_per_call) + max_add_per_call]
            ])
        new_dispatchers = v2_project.dispatchers_layer.query(
            "1=1", return_all_records=True).features
        if len(existing_dispatchers) == len(
                new_dispatchers) or dispatcher_ghost:
            logger.info("Dispatchers successfully migrated")
        else:
            raise Exception("Dispatchers not migrated successfully")

    # Migrate Workers
    logger.info("Migrating workers...")
    worker_ghost = False

    # Get Existing Workers
    existing_workers = project.workers_layer.query(
        "1=1", return_all_records=True).features
    workers_to_add = []
    layer = v2_project.workers_layer

    # Get Custom Worker Fields
    custom_fields = add_custom_fields(project.workers_layer, layer)
    # Prepare Workers to be added
    for worker in existing_workers:
        if worker.attributes[project._worker_schema.user_id]:
            worker_name = worker.attributes[project._worker_schema.user_id] if worker.attributes[project._worker_schema.name] is None else \
                worker.attributes[project._worker_schema.name]
            worker_status = 0 if worker.attributes[
                project._worker_schema.status] is None else worker.attributes[
                    project._worker_schema.status]
            attributes = {
                v2_project._worker_schema.name:
                worker_name,
                v2_project._worker_schema.contact_number:
                worker.attributes[project._worker_schema.contact_number],
                v2_project._worker_schema.notes:
                worker.attributes[project._worker_schema.notes],
                v2_project._worker_schema.status:
                worker_status,
                v2_project._worker_schema.title:
                worker.attributes[project._worker_schema.title],
                v2_project._worker_schema.user_id:
                worker.attributes[project._worker_schema.user_id],
                v2_project._worker_schema.global_id:
                worker.attributes[project._worker_schema.global_id]
            }

            # Add Custom Field Values
            for field in custom_fields:
                attributes[field["name"]] = worker.attributes[field["name"]]
            feature = Feature(geometry=worker.geometry, attributes=attributes)
            workers_to_add.append(feature)
        else:
            worker_ghost = True
            logger.info("Worker migration skipped - does not have a user id")

    # Add Workers
    layer.edit_features(adds=FeatureSet(workers_to_add), use_global_ids=True)
    # add worker named users to the project's group.
    max_add_per_call = 25
    for i in range(0, math.ceil(len(workers_to_add) / max_add_per_call)):
        v2_project.group.add_users([
            w.attributes[v2_project._worker_schema.user_id]
            for w in workers_to_add[i *
                                    max_add_per_call:(i * max_add_per_call) +
                                    max_add_per_call]
        ])
    new_workers = v2_project.workers_layer.query(
        "1=1", return_all_records=True).features
    if (len(existing_workers) == len(new_workers)) or worker_ghost:
        logger.info("Workers successfully migrated")
    else:
        cleanup_project(gis, title)
        raise Exception(
            "Workers not migrated successfully. Cleaning up new project")

    # Migrate Assignments
    logger.info("Migrating assignments")
    assignment_ghost = False

    # Get Existing Assignments
    existing_assignments = project.assignments_layer.query(
        "1=1", return_all_records=True).features
    assignments_to_add = []
    layer = v2_project.assignments_layer

    # Set Custom Fields for Assignments and Templates
    custom_fields = add_custom_fields(project.assignments_layer, layer)

    # Prepare Assignments to be Added
    for assignment in existing_assignments:
        if assignment.attributes[project._assignment_schema.assignment_type]:

            # set attributes in case they are empty
            assignment_location = (str(assignment.geometry["x"]) + " " + str(assignment.geometry["y"])) if \
                assignment.attributes[project._assignment_schema.location] is None else assignment.attributes[project._assignment_schema.location]
            assignment_status = 0 if assignment.attributes[project._assignment_schema.status] is None else \
                assignment.attributes[project._assignment_schema.status]
            assignment_priority = 0 if assignment.attributes[project._assignment_schema.priority] is None else \
                assignment.attributes[project._assignment_schema.priority]

            assignment_type_name = ""
            for at in existing_assignment_types:
                if at.code == assignment.attributes[
                        project._assignment_schema.assignment_type]:
                    assignment_type_name = at.name
                    break
            attributes = {
                v2_project._assignment_schema.status:
                assignment_status,
                v2_project._assignment_schema.notes:
                assignment.attributes[project._assignment_schema.notes],
                v2_project._assignment_schema.priority:
                assignment_priority,
                v2_project._assignment_schema.assignment_type:
                get_assignment_type_global_id(new_assignment_types,
                                              assignment_type_name),
                v2_project._assignment_schema.work_order_id:
                assignment.attributes[
                    project._assignment_schema.work_order_id],
                v2_project._assignment_schema.due_date:
                assignment.attributes[project._assignment_schema.due_date],
                v2_project._assignment_schema.description:
                assignment.attributes[project._assignment_schema.description],
                v2_project._assignment_schema.worker_id:
                get_worker_global_id(
                    project.workers.search(), assignment.attributes[
                        project._assignment_schema.worker_id]),
                v2_project._assignment_schema.location:
                assignment_location,
                v2_project._assignment_schema.declined_comment:
                assignment.attributes[
                    project._assignment_schema.declined_comment],
                v2_project._assignment_schema.assigned_date:
                assignment.attributes[
                    project._assignment_schema.assigned_date],
                v2_project._assignment_schema.in_progress_date:
                assignment.attributes[
                    project._assignment_schema.in_progress_date],
                v2_project._assignment_schema.completed_date:
                assignment.attributes[
                    project._assignment_schema.completed_date],
                v2_project._assignment_schema.declined_date:
                assignment.attributes[
                    project._assignment_schema.declined_date],
                v2_project._assignment_schema.paused_date:
                assignment.attributes[project._assignment_schema.paused_date],
                v2_project._assignment_schema.dispatcher_id:
                get_dispatcher_global_id(
                    arguments.skip_dispatchers, project.dispatchers.search(),
                    assignment.attributes[
                        project._assignment_schema.dispatcher_id]),
                v2_project._assignment_schema.global_id:
                assignment.attributes[project._assignment_schema.global_id],
                v2_project._assignment_schema.object_id:
                assignment.attributes[project._assignment_schema.object_id]
            }

            # Add Custom Field Values
            for field in custom_fields:
                attributes[field["name"]] = assignment.attributes[
                    field["name"]]
            feature = Feature(geometry=assignment.geometry,
                              attributes=attributes)
            assignments_to_add.append(feature)
        else:
            logger.info(
                "One assignment's migration skipped - does not have an assignment type"
            )
            assignment_ghost = True

    # Add Assignments
    layer.edit_features(adds=FeatureSet(assignments_to_add),
                        use_global_ids=True)
    new_assignments = v2_project.assignments_layer.query(
        "1=1", return_all_records=True).features
    if (len(new_assignments) == len(existing_assignments)) or assignment_ghost:
        logger.info("Assignments successfully migrated")
    else:
        cleanup_project(gis, title)
        raise Exception(
            "Assignments not migrated successfully. Cleaning up new project")

    # Migrate Attachments
    logger.info("Migrating Attachments")
    for assignment in existing_assignments:
        object_id = assignment.attributes[project._assignment_schema.object_id]
        new_assignment_object_id = v2_project.assignments.get(
            global_id=assignment.attributes[
                project._assignment_schema.global_id]).object_id
        if len(project.assignments_layer.attachments.get_list(object_id)) > 0:
            try:
                with tempfile.TemporaryDirectory() as dirpath:
                    paths = project.assignments_layer.attachments.download(
                        oid=object_id, save_path=dirpath)
                    for path in paths:
                        v2_project.assignments_layer.attachments.add(
                            oid=new_assignment_object_id, file_path=path)
            except Exception as e:
                logger.info(e)
                logger.info(
                    "Skipping migration of this attachment. It did not download successfully"
                )
    if len(project.assignments_layer.attachments.search("1=1")) == len(
            v2_project.assignments_layer.attachments.search("1=1")):
        logger.info("Attachments successfully migrated")
    else:
        logger.info(
            "Not all of your attachments migrated successfully. Continuing with migration"
        )

    # Migrate Integrations
    logger.info("Migrating Integrations")
    v2_project.integrations.batch_delete(
        [v2_project.integrations.get("arcgis-navigator")[0]])
    previous_integrations = project.integrations.search()

    # Replacing AT Code with GUID
    for integration in previous_integrations:
        if "assignmentTypes" in integration:
            types = integration["assignmentTypes"]
            key_list = list(sorted(types.keys()))
            for key in key_list:
                at_name = project.assignment_types.get(code=int(key)).name
                guid = get_assignment_type_global_id(new_assignment_types,
                                                     at_name)
                v2_project.integrations.add(
                    integration_id=integration["id"],
                    prompt=integration["prompt"],
                    url_template=types[key]["urlTemplate"],
                    assignment_types=guid)
        else:
            # default id changed
            if integration["id"] == "default-navigator":
                integration["id"] = "arcgis-navigator"
            v2_project.integrations.add(
                integration_id=integration["id"],
                prompt=integration["prompt"],
                url_template=integration["urlTemplate"])
    logger.info("Integrations migrated successfully")

    # Get rid of old URL patterns
    integrations = v2_project.integrations.search()
    generate_universal_links(integrations)

    # Migrate Webmaps - Retain non-WF layers
    logger.info("Migrating Webmaps")
    upgrade_webmaps(project.worker_webmap, v2_project.worker_webmap)
    upgrade_webmaps(project.dispatcher_webmap, v2_project.dispatcher_webmap)
    logger.info("Script Completed")
Example #14
0
        "wind_angle": "",
        "gust_strength": "",
        "gust_angle": "",
        "wind_timeutc": ""
        '''

for measure in measuresDict["values"]:
    attr = dict()
    attr["id"] = measure["_id"]
    attr["altitude"] = measure["altitude"]
    attr["temperature"] = measure["temperature"]
    attr["humidity"] = measure["humidity"]
    attr["pressure"] = measure["pressure"]
    attr["rain_60min"] = measure["rain_60min"]
    attr["rain_24h"] = measure["rain_24h"]
    attr["rain_live"] = measure["rain_live"]
    attr["rain_timeutc"] = measure["rain_timeutc"]
    attr["wind_strength"] = measure["wind_strength"]
    attr["wind_angle"] = measure["wind_angle"]
    attr["gust_strength"] = measure["gust_strength"]
    attr["gust_angle"] = measure["gust_angle"]
    attr["wind_timeutc"] = measure["wind_timeutc"]
    lat = measure["Y"]
    lon = measure["X"]
    pt = Point({"x": lon, "y": lat, "spatialReference": {"wkid": 4326}})
    feature = Feature(pt, attr)
    featuresToAdd.append(feature)
#add all the points
#test
#netAtmoFl.manager.truncate()
netAtmoFl.edit_features(adds=featuresToAdd)
Example #15
0
def main(mytimer: func.TimerRequest) -> None:
    utc_timestamp = datetime.datetime.utcnow().replace(
        tzinfo=datetime.timezone.utc).isoformat()

    if mytimer.past_due:
        logging.info('The timer is past due!')


    
    

    # function parse the data from netatmo and share the infos in measuresDict
    # First we need to authenticate, give back an access_token used by requests 
    payload = {'grant_type': 'refresh_token',
            'client_id': privatepass.getClientId(),
            'client_secret': privatepass.getClientSecret(),
            'refresh_token' : privatepass.getRefreshToken(),
            'scope': 'read_station'}
    try:
        response = requests.post("https://api.netatmo.com/oauth2/token", data=payload)
        response.raise_for_status()
        access_token=response.json()["access_token"]
        refresh_token=response.json()["refresh_token"]
        scope=response.json()["scope"]
        
    except requests.exceptions.HTTPError as error:
        print(error.response.status_code, error.response.text)



    '''
    netatmo data is dependent on extent queried, the more you zoom the more you

    https://dev.netatmo.com/en-US/resources/technical/guides/ratelimits
    Per user limits
    50 requests every 10 seconds
    > One global request, and multiple litle on a specific area, while staying under api limit
    '''

    # first global request

    payload = {'access_token': access_token,
            'lat_ne':52.677040100097656,
            'lon_ne': 13.662185668945312,
            'lat_sw' : 52.374916076660156,
            'lon_sw':13.194580078125
                # filter wierd/wrong data 
                ,'filter': 'true'
            }
    try:
        response = requests.post("https://api.netatmo.com/api/getpublicdata", data=payload)
        response.raise_for_status()
        resultJson=response.json()
        parseData(resultJson)
        
    except requests.exceptions.HTTPError as error:
        print(error.response.status_code, error.response.text)



    base_lat_ne = 52.677040100097656
    base_lon_ne = 13.662185668945312
    base_lat_sw = 52.374916076660156
    base_lon_sw = 13.194580078125


    # calc each subextent size
    lon_step = (base_lon_ne - base_lon_sw)/4
    lat_step = (base_lat_ne - base_lat_sw)/4

    currentStep=0

    # we cut the extent in x/x and go through each sub-extent
    lat_sw = base_lat_sw
    while(lat_sw < base_lat_ne):
        lat_ne = lat_sw + lat_step
        #reset the lon_sw
        lon_sw = base_lon_sw
        while(lon_sw < base_lon_ne):
            lon_ne = lon_sw + lon_step
            payload = {'access_token': access_token,
                'lat_sw' : lat_sw,
                'lon_sw':lon_sw,
                'lat_ne':lat_ne,
                'lon_ne': lon_ne,
                    # filter wierd/wrong data 
                    'filter': 'true'
                }
            try:
                currentStep=currentStep+1
                #print(str(lat_ne)  + "   " + str(lon_ne))
                response = requests.post("https://api.netatmo.com/api/getpublicdata", data=payload)
                response.raise_for_status()
                resultJson=response.json()
                # parse the data
                parseData(resultJson)
            except requests.exceptions.HTTPError as error:
                print(error.response.status_code, error.response.text)
            lon_sw = lon_ne
        lat_sw = lat_ne



    # last part - json can be dumped in a file for test purpose or geoevent server integration
    #with open('dataNetAtmo.json', 'w') as outfile:  
    #    json.dump(measuresDict, outfile)

    # or we can get each object and push it as a feature !

    # connect to to the gis
    # get the feature layer
    gis = GIS("https://esrich.maps.arcgis.com", "cede_esrich", privatepass.getPass())       
    netAtmoFl =  gis.content.get('0078c29282174460b57ce7ca72262549').layers[0]        

    featuresToAdd = []
    '''" sample value
            _id": "70:ee:50:3f:4d:26",
            "X": 13.5000311,
            "Y": 52.5020974,
            "altitude": 37,
            "temperature": 10.4,
            "humidity": 71,
            "pressure": 1018.1,
            "rain_60min": "",
            "rain_24h": "",
            "rain_live": "",
            "rain_timeutc": "",
            "wind_strength": "",
            "wind_angle": "",
            "gust_strength": "",
            "gust_angle": "",
            "wind_timeutc": ""
            '''

    for measure in measuresDict["values"]:
        attr = dict()
        attr["id"] = measure["_id"]
        attr["altitude"] = measure["altitude"]
        attr["temperature"] = measure["temperature"]
        attr["humidity"] = measure["humidity"]
        attr["pressure"] = measure["pressure"]
        attr["rain_60min"] = measure["rain_60min"]
        attr["rain_24h"] = measure["rain_24h"]
        attr["rain_live"] = measure["rain_live"]
        attr["rain_timeutc"] = measure["rain_timeutc"]
        attr["wind_strength"] = measure["wind_strength"]
        attr["wind_angle"] = measure["wind_angle"]
        attr["gust_strength"] = measure["gust_strength"]
        attr["gust_angle"] = measure["gust_angle"]
        attr["wind_timeutc"] = measure["wind_timeutc"]
        lat = measure["Y"]
        lon = measure["X"]
        pt = Point({"x" : lon, "y" : lat, "spatialReference" : {"wkid" : 4326}})
        feature = Feature(pt,attr)
        featuresToAdd.append(feature)
    #add all the points  
    #test

    netAtmoFl.edit_features(adds=featuresToAdd)


    logging.info('Python timer trigger function ran at %s', utc_timestamp)
Example #16
0
def main(arguments):  # noqa: C901
    # Initialize logging
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the old workforce project
    item = gis.content.get(arguments.classic_project_id)
    project = workforce.Project(item)
    try:
        if project._is_v2_project:
            raise Exception(
                "The first project provided is a v2 project. Please migrate assignment data from v1 projects"
            )
    except AttributeError:
        raise Exception(
            "Cannot find the attribute is v2 project. Are you sure you have the API version 1.8.3 or greater installed? "
            "Check with `arcgis.__version__` in your Python console")

    # Get new workforce project
    v2_project = workforce.Project(gis.content.get(arguments.new_project_id))
    if not v2_project._is_v2_project:
        raise Exception(
            "The second project provided is a v1 project. Please migrate assignment data to v2 projects"
        )

    # validate correct assignment types are present
    existing_assignment_types = project.assignment_types.search()
    for assignment_type in existing_assignment_types:
        if not v2_project.assignment_types.get(name=assignment_type.name):
            raise Exception(
                "One of your assignment types in your classic project is not in your offline project"
            )

    # validate correct workers are present
    for worker in project.workers.search():
        if not v2_project.workers.get(user_id=worker.user_id):
            raise Exception(
                "One of your workers in your classic project is not in your offline project"
            )

    # Migrate Assignments
    logger.info("Migrating assignments")
    assignment_ghost = False

    # Get Existing Assignments
    existing_assignments = project.assignments_layer.query(
        where=arguments.where, return_all_records=True).features
    assignments_to_add = []
    layer = v2_project.assignments_layer

    # Set Custom Fields for Assignments and Templates
    custom_fields = add_custom_fields(project.assignments_layer, layer)

    # Prepare Assignments to be Added
    for assignment in existing_assignments:
        if assignment.attributes[project._assignment_schema.assignment_type]:

            # set attributes in case they are empty
            assignment_location = (str(assignment.geometry["x"]) + " " + str(assignment.geometry["y"])) if \
                assignment.attributes[project._assignment_schema.location] is None else \
                assignment.attributes[project._assignment_schema.location]
            assignment_status = 0 if assignment.attributes[project._assignment_schema.status] is None else \
                assignment.attributes[project._assignment_schema.status]
            assignment_priority = 0 if assignment.attributes[project._assignment_schema.priority] is None else \
                assignment.attributes[project._assignment_schema.priority]

            # get AT name based on code stored
            assignment_type_name = ""
            for at in existing_assignment_types:
                if at.code == assignment.attributes[
                        project._assignment_schema.assignment_type]:
                    assignment_type_name = at.name
                    break

            # Set attributes
            attributes = {
                v2_project._assignment_schema.status:
                assignment_status,
                v2_project._assignment_schema.notes:
                assignment.attributes[project._assignment_schema.notes],
                v2_project._assignment_schema.priority:
                assignment_priority,
                v2_project._assignment_schema.assignment_type:
                get_assignment_type_global_id(
                    v2_project.assignment_types.search(),
                    assignment_type_name),
                v2_project._assignment_schema.work_order_id:
                assignment.attributes[
                    project._assignment_schema.work_order_id],
                v2_project._assignment_schema.due_date:
                assignment.attributes[project._assignment_schema.due_date],
                v2_project._assignment_schema.description:
                assignment.attributes[project._assignment_schema.description],
                v2_project._assignment_schema.worker_id:
                get_worker_global_id(
                    project.workers.search(), v2_project.workers,
                    assignment.attributes[
                        project._assignment_schema.worker_id]),
                v2_project._assignment_schema.location:
                assignment_location,
                v2_project._assignment_schema.declined_comment:
                assignment.attributes[
                    project._assignment_schema.declined_comment],
                v2_project._assignment_schema.assigned_date:
                assignment.attributes[
                    project._assignment_schema.assigned_date],
                v2_project._assignment_schema.in_progress_date:
                assignment.attributes[
                    project._assignment_schema.in_progress_date],
                v2_project._assignment_schema.completed_date:
                assignment.attributes[
                    project._assignment_schema.completed_date],
                v2_project._assignment_schema.declined_date:
                assignment.attributes[
                    project._assignment_schema.declined_date],
                v2_project._assignment_schema.paused_date:
                assignment.attributes[project._assignment_schema.paused_date],
                v2_project._assignment_schema.dispatcher_id:
                get_dispatcher_global_id(
                    project.dispatchers.search(), v2_project.dispatchers,
                    assignment.attributes[
                        project._assignment_schema.dispatcher_id]),
                v2_project._assignment_schema.global_id:
                assignment.attributes[project._assignment_schema.global_id],
                v2_project._assignment_schema.object_id:
                assignment.attributes[project._assignment_schema.object_id]
            }

            # Add Custom Field Values
            for field in custom_fields:
                attributes[field["name"]] = assignment.attributes[
                    field["name"]]
            feature = Feature(geometry=assignment.geometry,
                              attributes=attributes)
            assignments_to_add.append(feature)
        else:
            logger.info(
                "One assignment's migration skipped - does not have an assignment type"
            )
            assignment_ghost = True

    # Add Assignments
    layer.edit_features(adds=FeatureSet(assignments_to_add),
                        use_global_ids=True)
    new_assignments = v2_project.assignments_layer.query(
        "1=1", return_all_records=True).features
    # skip validation if there's a ghost
    if (len(new_assignments) == len(existing_assignments)) or assignment_ghost:
        logger.info("Assignments successfully migrated")
    else:
        raise Exception("Assignments not migrated successfully. Unknown error")

    # Migrate Attachments
    logger.info("Migrating Attachments")
    for assignment in existing_assignments:
        object_id = assignment.attributes[project._assignment_schema.object_id]
        new_assignment_object_id = v2_project.assignments.get(
            global_id=assignment.attributes[
                project._assignment_schema.global_id]).object_id
        if len(project.assignments_layer.attachments.get_list(object_id)) > 0:
            try:
                with tempfile.TemporaryDirectory() as dirpath:
                    paths = project.assignments_layer.attachments.download(
                        oid=object_id, save_path=dirpath)
                    for path in paths:
                        v2_project.assignments_layer.attachments.add(
                            oid=new_assignment_object_id, file_path=path)
            except Exception as e:
                logger.info(e)
                logger.info(
                    "Skipping migration of this attachment. It did not download successfully"
                )
    if len(project.assignments_layer.attachments.search("1=1")) == len(
            v2_project.assignments_layer.attachments.search("1=1")):
        logger.info("Attachments successfully migrated")
    else:
        logger.info(
            "Not all of your attachments migrated successfully. Continuing with migration"
        )
    logger.info("Script Completed")
Example #17
0
            classes = visual_recognition.classify(images_file,
                                                  parameters=json.dumps({
                                                      'classifier_ids':
                                                      ['##########'],
                                                      'threshold':
                                                      0.876
                                                  }))

        #print(json.dumps(classes, indent=2))
        data = json.loads(json.dumps(classes, indent=2))
        if len(data['images'][0]['classifiers'][0]['classes']) != 0:

            print(json.dumps(classes, indent=2))

            f = open(
                "./images/" +
                filename.replace("photo", "coor").replace("jpg", "txt"), "r")
            d = float(f.read()) * 0.000000301

            gis = GIS(username="******", password="******")
            p = Point({"x": -73.471977 + d, "y": 40.703342})
            a = {"pothole_layer": "pothole"}
            f = Feature(p, a)
            fs = FeatureSet([f])
            lyr = FeatureLayer(
                "https://services8.arcgis.com/x660UqfqVJlbWB0Y/arcgis/rest/services/pothole_layers/FeatureServer/0",
                gis=gis)
            lyr.edit_features(adds=fs)

        #delete photo and txt
Example #18
0
    def __getPointFeatureSet(self, points: List[Point]) -> FeatureSet:
        featureList = [Feature(point) for point in points]

        return FeatureSet(featureList,
                          geometry_type="esriGeometryMultipoint",
                          spatial_reference={'wkid': 4326})
Example #19
0
    def process_historical_hos(self,
                               processed_dir,
                               processed_file_details,
                               make_historical_csv=False):

        if self.verbose:
            print("Starting load of historical HOS table...")

        layer_conf = self.agol.layers['full_historical_table']
        original_data_file_name = layer_conf['original_file_name']

        table = self.agol.gis.content.get(layer_conf['id'])
        t = table.layers[0]

        # iterate all csvs and collect the information from each one.
        # normalize header names at the same time
        hist_csv_rows = []
        for f in processed_file_details:
            fname = f["processed_filename"]
            print(f"    working on {fname}..")
            size = os.path.getsize(os.path.join(processed_dir, fname))
            if size > 0:
                processed_time = datetime.utcnow().isoformat()
                with open(os.path.join(processed_dir, fname),
                          newline='') as csvfile:
                    reader = csv.DictReader(csvfile)
                    for row in reader:

                        row["Source_Data_Timestamp"] = f[
                            "source_datetime"].isoformat()
                        row["Processed_At"] = processed_time
                        row["Source_Filename"] = f["filename"]
                        hist_csv_rows.append(row)

            else:
                print(f"{fname} has a filesize of {size}, not processing.")

        # historical for generating a new source CSV
        if make_historical_csv and len(hist_csv_rows) > 0:
            agol_fieldnames = [n["name"] for n in t.properties.fields]
            headers = set(agol_fieldnames + list(hist_csv_rows[0].keys()))
            with open(os.path.join(processed_dir, original_data_file_name),
                      "w",
                      newline="") as csvfile:
                writer = csv.DictWriter(csvfile, fieldnames=headers)
                writer.writeheader()
                writer.writerows(hist_csv_rows)

        # It's okay if features is empty; status will reflect arcgis telling us that,
        # but it won't stop the processing.
        features = [Feature(attributes=row) for row in hist_csv_rows]
        if self.dry_run:
            if self.verbose:
                print("Dry run set, not editing features.")
        else:
            fc = len(features)
            chunksize = 1000
            feature_batchs = chunks(features, chunksize)
            fb_list = list(feature_batchs)
            fbc = len(fb_list)
            if self.verbose:
                print(
                    f"Adding {fc} features to the historical table in {fbc} batches."
                )
            for batch in fb_list:
                status = t.edit_features(adds=batch)
                b_len = len(batch)
                num_success = len([
                    x["success"] for x in status["addResults"]
                    if x["success"] == True
                ])
                fails = b_len - num_success
                if fails != 0:
                    print(f"Not all updates succeeded; {fails} failures")
                    print("XXX do something about this failure!")
                else:
                    print(
                        f"All {num_success} features successfull updated in this batch."
                    )

        if self.verbose:
            print("Finished load of historical HOS table")