def main(arguments):
    # Initialize logging
    logger = initialize_logger(arguments.log_file)
    # Create the GIS
    logger.info("Authenticating...")
    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)
    # Get the workforce project
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)
    # Read the CVS file and loop through the workers information contained within this file
    logger.info("Parsing CSV...")
    with open(os.path.abspath(arguments.csv_file), 'r') as file:
        reader = csv.DictReader(file)
        # List of workers to add
        workers = []
        for row in reader:
            # Create a worker using the required fields
            worker = workforce.Worker(project,
                                      name=row[arguments.name_field],
                                      status=row[arguments.status_field],
                                      user_id=row[arguments.user_id_field])
            # These fields are optional, and are added separately
            if arguments.contact_number_field:
                worker.contact_number = row.get(arguments.contact_number_field)
            if arguments.title_field:
                worker.title = row.get(arguments.title_field)
            workers.append(worker)
        # Batch add workers
        logger.info("Adding Workers...")
        project.workers.batch_add(workers)
    logger.info("Completed")
def main(arguments):
    # initialize logger
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the project
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)
    invalid_assignments = get_invalid_assignments(project,
                                                  arguments.time_tolerance,
                                                  arguments.distance_tolerance,
                                                  arguments.min_accuracy,
                                                  arguments.workers)

    with open(arguments.config_file, 'r') as f:
        field_mappings = json.load(f)
    target_fl = arcgis.features.FeatureLayer(arguments.target_fl, gis)
    # Check if layer exists
    try:
        x = target_fl.properties
    except Exception as e:
        logger.info(e)
        logger.info(
            "Layer could not be found based on given input. Please check your parameters again. Exiting the script"
        )
        sys.exit(0)
    copy_assignments(project, invalid_assignments, target_fl, field_mappings)
def define_project(gis, project_id):
    '''read workforce project and add parameters to dictionaries
    :param gis: instance authenticated with AGOL or Portal account
    :param project_id: workforce item id
    :return: three dictionaries describing workforce parameters'''

    workforce_item = gis.content.get(project_id)
    workforce_project = workforce.Project(workforce_item)

    #fetch assignment types
    assignment_types = workforce_project.assignment_types.search()
    assignment_type_dict = {}
    for assignment_type in assignment_types:
        assignment_type_dict[assignment_type.name] = assignment_type

    # Fetch dispatchers
    dispatchers = workforce_project.dispatchers.search()
    dispatchers_dict = {}
    for dispatcher in dispatchers:
        dispatchers_dict[dispatcher.user_id] = dispatcher

    # Fetch the workers
    workers = workforce_project.workers.search()
    workers_dict = {}
    for worker in workers:
        workers_dict[worker.user_id] = worker

    return workforce_project, assignment_type_dict, dispatchers_dict, workers_dict
Example #4
0
def main(arguments):
	# Initialize logging
	logger = initialize_logging(arguments.log_file)
	
	# Create the GIS
	logger.info("Authenticating...")
	
	# First step is to get authenticate and get a valid token
	gis = GIS(arguments.org_url,
			  username=arguments.username,
			  password=arguments.password,
			  verify_cert=not arguments.skip_ssl_verification)
	
	logger.info("Getting workforce project")
	
	# Get your workforce project
	item = gis.content.get(arguments.project_id)
	try:
		project = workforce.Project(item)
	except Exception as e:
		logger.info(e)
		logger.info("Invalid project id")
		sys.exit(0)
	
	# Clone dispatcher map if desired by user
	if not arguments.use_dispatcher_webmap:
		logger.info("Saving copy of dispatcher webmap")
		map_id = project.dispatcher_webmap.save(item_properties={"title": project.title + " Dashboard Map", "tags": [], "snippet": "Dashboard Map"}).id
	else:
		map_id = project.dispatcher_web_map_id
		
	# Get example ops dashboard from workforce_scripts, map to your project
	logger.info("Getting example dashboard")
	if arguments.light_mode:
		item = gis.content.get('1cbac058ce1b4a008a6baa0f3cfd506a')
		item_mapping = {'2249c41dcec34b91b3990074ed8c8ffc': project.assignments_item.id,
						'6afe245f9f3f48e8884dc7e691841973': project.workers_item.id,
						 'e605c140ecf14cccaf1e7b3bcb4b1710': map_id}
	else:
		item = gis.content.get("af7cd356c21a4ded87d8cdd452fd8be3")
		item_mapping = {'377b2b2014f24b0ab9b053d9b2fed113': project.assignments_item.id,
						'e1904f5c56484163a021155f447adf34': project.workers_item.id,
						'bb7d2b495ecc4ea7810b28f16ef71cba': map_id}
	
	# Create new dashboard using your project
	logger.info("Creating dashboard")
	cloned_items = gis.content.clone_items([item], item_mapping=item_mapping, search_existing_items=False)
	if len(cloned_items) == 0:
		logger.info("You have already cloned a dashboard of this name! Check your item content and if necessary, set a title")
		sys.exit(0)
	
	# Save new name and share to group
	logger.info("Updating title and sharing to project group")
	if arguments.title:
		new_title = arguments.title
	else:
		new_title = project.title + " Dashboard"
	cloned_items[0].update(item_properties={"title": new_title})
	cloned_items[0].share(groups=[project.group])
	logger.info("Completed")
def main(arguments):
    # Initialize logging
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    logger.info("Getting workforce project")

    # Get the workforce project
    item = gis.content.get(arguments.project_id)
    try:
        project = workforce.Project(item)
    except Exception as e:
        logger.info(e)
        logger.info("Invalid project id")
        sys.exit(0)

    # First check if relative date
    logger.info("Formatting date")
    try:
        delta = int(args.cutoff_date)
        utc_dt = pendulum.now().subtract(minutes=delta).in_tz('UTC')
    except Exception:
        # If not relative date, then attempt to convert date and attach timezone to naive date value
        try:
            local_cutoff_date = pendulum.from_format(arguments.cutoff_date,
                                                     "MM/DD/YYYY hh:mm:ss",
                                                     tz=args.timezone,
                                                     formatter='alternative')
        except Exception as e:
            logger.info(e)
            logger.info(
                "Invalid date format. Please check documentation and try again"
            )
            sys.exit(0)
        utc_dt = local_cutoff_date.in_tz('UTC')
    formatted_date = utc_dt.strftime("%Y-%m-%d %H:%M:%S")

    # Query using UTC-formatted date and reset those workers
    logger.info("Querying workers")
    where = f"{project._worker_schema.edit_date} < TIMESTAMP '{formatted_date}'"
    workers = project.workers.search(where=where)
    for worker in workers:
        worker.status = "not_working"
    logger.info("Updating workers")
    project.workers.batch_update(workers)
    logger.info("Completed!")
Example #6
0
def main():
    config = configparser.ConfigParser()
    config.read("config.ini")

    logger = initialize_logging(config["LOG"]["LOGFILE"])
    db_conn = initialize_db(config["DB"]["DATABASE"])
    atexit.register(close, db_conn)

    logger.info("Authenticating with ArcGIS Online...")
    gis = GIS(username=config["AGOL"]["USERNAME"],
              password=config["AGOL"]["PASSWORD"])

    logger.info("Getting project info...")
    project = workforce.Project(gis.content.get(
        config["WORKFORCE"]["PROJECT"]))

    while True:
        logger.info("Querying assignments...")
        timestamp_last_hour = (
            datetime.datetime.utcnow() -
            datetime.timedelta(hours=1)).strftime("%Y-%m-%d %H:%M:%S")

        # If your Portal/Organization does not support Simplified Queries, this query may not work
        # In that event, you can use the subsequent one, but this will not stop trying old assignments that have failed
        query = "{} = 3 AND {} >= timestamp '{}' AND {} IS NOT NULL".format(
            project._assignment_schema.status,
            project._assignment_schema.completed_date, timestamp_last_hour,
            project._assignment_schema.work_order_id)
        # query = "{} = 3 AND {} IS NOT NULL".format(
        #     project._assignment_schema.status,
        #     project._assignment_schema.work_order_id
        # )
        logger.debug("Query is " + query)
        assignments = project.assignments.search(query)

        logger.info("Processing assignments...")
        for assignment in assignments:
            if not is_assignment_processed(db_conn, assignment):
                add_assignment_to_db(db_conn, assignment)
                # Do work here
                process_assignment(assignment, db_conn, config, logger)

        logger.info("Sleeping for 5 seconds...")
        time.sleep(5)
def main(arguments):
    # initialize logging
    formatter = logging.Formatter(
        "[%(asctime)s] [%(filename)30s:%(lineno)4s - %(funcName)30s()]\
             [%(threadName)5s] [%(name)10.10s] [%(levelname)8s] %(message)s")
    # Grab the root logger
    logger = logging.getLogger()
    # Set the root logger logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
    logger.setLevel(logging.DEBUG)
    # Create a handler to print to the console
    sh = logging.StreamHandler(sys.stdout)
    sh.setFormatter(formatter)
    sh.setLevel(logging.INFO)
    # Create a handler to log to the specified file
    if arguments.log_file:
        rh = logging.handlers.RotatingFileHandler(arguments.log_file,
                                                  mode='a',
                                                  maxBytes=10485760)
        rh.setFormatter(formatter)
        rh.setLevel(logging.DEBUG)
        logger.addHandler(rh)
    # Add the handlers to the root logger
    logger.addHandler(sh)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the project
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)

    # Call delete features on the layer
    logger.info("Deleting assignments...")
    project.assignments_layer.delete_features(where=arguments.where)
    # Note: could also use the following if validation of assignments is important:
    # project.assignments.batch_delete(project.assignments.search(where=arguments.where))
    logger.info("Completed")
def main(arguments):
    # initialize logging
    logger = initialize_logging(arguments.log_file)
    # Create the GIS
    logger.info("Authenticating...")
    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the project and data
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)
    # Find all assignment_types and assign
    assignment_types = project.assignment_types.search()
    logger.info("Deleting assignment types...")
    # batch delete assignment_types
    project.assignment_types.batch_delete(assignment_types)
    logger.info("Completed")
def main(arguments):
    # initialize logging
    logger = initialize_logging(arguments.log_file)
    # Create the GIS
    logger.info("Authenticating...")

    # Get the project and data
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)

    logger.info("Reading CSV...")
    # Next we want to parse the CSV file and create a list of assignment types
    assignment_types = get_assignment_types_from_csv(arguments.csv_file)
    assignment_types_to_add = []
    for at in assignment_types:
        assignment_types_to_add.append(
            workforce.AssignmentType(project, name=at))
    logger.info("Adding Assignment Types...")
    project.assignment_types.batch_add(assignment_types_to_add)
    logger.info("Completed")
def main(arguments):
    # initialize logger
    logger = initialize_logging(arguments.log_file)
    # Create the GIS
    logger.info("Authenticating...")
    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the project
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)
    invalid_assignments = get_invalid_assignments(project,
                                                  arguments.time_tolerance,
                                                  arguments.distance_tolerance,
                                                  arguments.min_accuracy,
                                                  arguments.workers)

    with open(arguments.config_file, 'r') as f:
        field_mappings = json.load(f)
    target_fl = arcgis.features.FeatureLayer(arguments.target_fl, gis)
    copy_assignments(project, invalid_assignments, target_fl, field_mappings)
Example #11
0
def main(arguments):
    # initialize logging
    logger = initialize_logging(arguments.log_file)

    # Set date params
    timezone = arguments.timezone
    date_format = arguments.date_format

    # Create the GIS
    logger.info("Authenticating...")
    # First step is to get authenticate
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the project and data
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)

    # Query features
    logger.info("Querying features...")
    assignments = project.assignments.search(where=arguments.where)
    assignments_to_export = []
    # Take the assignment data, format it correctly if necessary, and assign it to the dict
    for assignment in assignments:
        assignment_to_export = {}
        assignment_to_export["AssignedDate"] = assignment.assigned_date
        if assignment.assigned_date:
            assignment_to_export["AssignedDate"] = \
                arrow.get(assignment.assigned_date).to(timezone).strftime(
                date_format)
        if assignment.due_date:
            assignment_to_export["DueDate"] = \
                arrow.get(assignment.due_date).to(timezone).strftime(date_format)
        if assignment.creation_date:
            assignment_to_export["CreationDate"] = \
                arrow.get(assignment.creation_date).to(timezone).strftime(
                date_format)
        if assignment.declined_date:
            assignment_to_export["DeclinedDate"] = \
                arrow.get(assignment.declined_date).to(timezone).strftime(
                date_format)
        if assignment.paused_date:
            assignment_to_export["PausedDate"] = \
                arrow.get(assignment.paused_date).to(timezone).strftime(date_format)
        if assignment.completed_date:
            assignment_to_export["CompletedDate"] = \
                arrow.get(assignment.completed_date).to(timezone).strftime(
                date_format)
        if assignment.edit_date:
            assignment_to_export["EditDate"] = \
                arrow.get(assignment.edit_date).to(timezone).strftime(date_format)
        if assignment.in_progress_date:
            assignment_to_export["InProgressDate"] = \
                arrow.get(assignment.in_progress_date).to(timezone).strftime(
                date_format)
        assignment_to_export["X"] = assignment.geometry["x"]
        assignment_to_export["Y"] = assignment.geometry["y"]
        assignment_to_export["DispatcherId"] = assignment.dispatcher_id
        assignment_to_export["WorkOrderId"] = assignment.work_order_id
        assignment_to_export["Status"] = assignment.status
        assignment_to_export["Description"] = assignment.description
        assignment_to_export["Notes"] = assignment.notes
        assignment_to_export["Priority"] = assignment.priority
        assignment_to_export[
            "AssignmentType"] = assignment.assignment_type.name
        assignment_to_export["WorkerId"] = assignment.worker_id
        assignment_to_export["GlobalID"] = assignment.global_id
        assignment_to_export["Location"] = assignment.location
        assignment_to_export["Creator"] = assignment.creator
        assignment_to_export["Editor"] = assignment.editor
        assignment_to_export["DeclinedComment"] = assignment.declined_comment
        assignment_to_export["OBJECTID"] = assignment.object_id
        assignment_to_export["AssignmentRead"] = assignment.assignment_read
        # Append each field to the assignments to be exported
        assignments_to_export.append(assignment_to_export)
    logger.info("Writing to CSV...")
    # Create the CSV
    with open(arguments.csv_file, 'w', newline='',
              encoding='utf-8') as csv_file:
        fieldnames = [
            "OBJECTID", "X", "Y", "Description", "Status", "Notes", "Priority",
            "AssignmentType", "WorkOrderId", "DueDate", "WorkerId", "GlobalID",
            "Location", "DeclinedComment", "AssignedDate", "AssignmentRead",
            "InProgressDate", "CompletedDate", "DeclinedDate", "PausedDate",
            "DispatcherId", "CreationDate", "Creator", "EditDate", "Editor"
        ]
        writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
        writer.writeheader()
        writer.writerows(assignments_to_export)
    logger.info("Completed")
Example #12
0
def main(arguments):
    # initialize logging
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to authenticate
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the target feature layer
    target_fl = arcgis.features.FeatureLayer(arguments.target_fl, gis)
    # Check if layer exists
    try:
        x = target_fl.properties
    except Exception as e:
        logger.info(e)
        logger.info(
            "Layer could not be found based on given input. Please check your parameters again. Exiting the script")
        sys.exit(0)

    # Get the project info
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)

    # Open the field mappings config file
    logging.getLogger().info("Reading field mappings...")
    with open(arguments.config_file, 'r') as f:
        field_mappings = json.load(f)
    logging.getLogger().info("Validating field mappings...")

    # Query the source to get the features specified by the query string
    logger.info("Querying source features...")
    current_assignments = project.assignments.search(where=arguments.where)

    # Query the archived assignments to get all of the currently archived ones
    logger.info("Querying target features")
    archived_assignments = target_fl.query(out_fields=field_mappings[project._assignment_schema.global_id])

    # Create a list of GlobalIDs - These should be unique
    global_ids = [feature.attributes[field_mappings[project._assignment_schema.global_id]] for feature in archived_assignments.features]

    # Iterate through the the assignments returned and only add those that don't exist in the Feature Layer
    assignments_to_copy = []
    # Updated loop to get the global_id and only copy if it doesn't already exist in global_ids
    for assignment in current_assignments:
        if assignment.global_id not in global_ids:
            assignments_to_copy.append(assignment)

    # Create a new list to store the updated feature-dictionaries
    assignments_to_submit = []
    # Loop over all assignments that we want to add,
    for assignment in assignments_to_copy:
        # map the field names appropriately
        assignment_attributes = {}
        for key, value in field_mappings.items():
            # Updated the feature.attributes to call the correct field mapping items
            assignment_attributes[value] = assignment.feature.attributes[key]
        # create the new feature object to send to server
        assignments_to_submit.append(
            arcgis.features.Feature(geometry=assignment.geometry, attributes=assignment_attributes))
    logger.info("Copying assignments...")
    response = target_fl.edit_features(adds=arcgis.features.FeatureSet(assignments_to_submit))
    logger.info(response)
    if arguments.copy_attachments:
        if target_fl.properties.get("hasAttachments", None):
            logger.info("Copying Attachments...")
            for assignment in assignments_to_copy:
                with tempfile.TemporaryDirectory() as d:
                    attachments = assignment.attachments.download(out_folder=d)
                    if attachments:
                        feature = target_fl.query(where="{} = {}".format(field_mappings[project._assignment_schema.object_id], assignment.object_id)).features[0]
                        for attachment in attachments:
                            target_fl.attachments.add(feature.attributes[target_fl.properties["objectIdField"]], attachment)
        else:
            logger.warning("Attachments not supported on the target layer")
    logger.info("Completed")
def main(arguments):  # noqa: C901
    # Initialize logging
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the workforce project
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)
    try:
        if project._is_v2_project:
            raise Exception("This is a v2 project. Please migrate v1 projects")
    except AttributeError:
        raise Exception(
            "Cannot find the attribute is v2 project. "
            "Are you sure you have the API version 1.8.3 or greater installed? Check with `arcgis.__version__` in your Python console"
        )
    logger.info(project)
    logger.info("Creating base v2 project...")

    # Create WF Project w given title
    if arguments.title:
        if arguments.title != project.title:
            title = arguments.title
        else:
            raise Exception(
                "Cannot name your project the same as the old one. Please provide a unique name"
            )
    else:
        title = project.title + " Updated"
    v2_project = workforce.create_project(title=title,
                                          summary=project.summary,
                                          major_version=2)

    # Update thumbnail
    with tempfile.TemporaryDirectory() as dirpath:
        try:
            thumbnail = item.download_thumbnail(save_folder=dirpath)
            v2_project._item.update(thumbnail=thumbnail)
            gis.content.get(
                v2_project.worker_web_map_id).update(thumbnail=thumbnail)
            gis.content.get(
                v2_project.dispatcher_web_map_id).update(thumbnail=thumbnail)
        except Exception:
            logger.info("Thumbnail not migrated successfully")

    # Migrate Assignment Types
    logger.info("Migrating assignment types...")
    existing_assignment_types = project.assignment_types.search()
    at_to_add = []

    for assignment_type in existing_assignment_types:
        if assignment_type.name:
            at_to_add.append(
                workforce.AssignmentType(project=v2_project,
                                         name=assignment_type.name))
        else:
            logger.info(
                "Assignment Type migration skipped - does not have a name")

    # Get Assignment Types in migrated project before you potentially add a bad worker / dispatcher
    v2_project.assignment_types.batch_add(at_to_add)
    new_assignment_types = v2_project.assignment_types.search()
    if len(existing_assignment_types) == len(new_assignment_types):
        logger.info("Assignment Types successfully migrated")
    else:
        cleanup_project(gis, title)
        raise Exception(
            "Assignment Types not successfully migrated. Cleaning up new project"
        )

    # Migrate Dispatchers
    if not arguments.skip_dispatchers:
        logger.info("Migrating dispatchers...")
        dispatcher_ghost = False

        # Get Existing Dispatchers
        existing_dispatchers = project.dispatchers.search()
        dispatchers_to_add = []
        layer = v2_project.dispatchers_layer

        # Get Custom Dispatcher Fields and Templates
        custom_fields = add_custom_fields(project.dispatchers_layer, layer)

        # Prepare Dispatchers to be added
        for dispatcher in existing_dispatchers:

            # Validate that there is a user id populated and that the user id isn't yourself (since that was added during project creation).
            # Otherwise, skip adding the dispatcher
            if dispatcher.user_id and dispatcher.user_id != arguments.username:

                # Validate a name exists, otherwise populate with an empty string
                dispatcher_name = dispatcher.user_id if dispatcher.name is None else dispatcher.name

                attributes = {
                    v2_project._dispatcher_schema.name: dispatcher_name,
                    v2_project._dispatcher_schema.contact_number:
                    dispatcher.contact_number,
                    v2_project._dispatcher_schema.user_id: dispatcher.user_id,
                    v2_project._dispatcher_schema.global_id:
                    dispatcher.global_id
                }

                # Add Custom Field Values
                for field in custom_fields:
                    attributes[field["name"]] = dispatcher._feature.attributes[
                        field["name"]]
                feature = Feature(geometry=dispatcher.geometry,
                                  attributes=attributes)
                dispatchers_to_add.append(feature)
            else:
                if not dispatcher.user_id:
                    logger.info(
                        "Dispatcher was skipped from migrating. The dispatcher does not a valid user_id in the layer, or 2. "
                        "The dispatcher was already added. Please check the original dispatchers layer."
                    )
                    dispatcher_ghost = True
                else:
                    # update info for owner dispatcher
                    v2_dispatcher = v2_project.dispatchers.search()[0]
                    v2_dispatcher.update(
                        contact_number=dispatcher.contact_number,
                        name=dispatcher.name)

        # Add Dispatchers
        layer.edit_features(adds=FeatureSet(dispatchers_to_add),
                            use_global_ids=True)
        # add dispatcher named users to the project's group.
        max_add_per_call = 25
        for i in range(0,
                       math.ceil(len(dispatchers_to_add) / max_add_per_call)):
            v2_project.group.add_users([
                d.attributes[v2_project._dispatcher_schema.user_id]
                for d in dispatchers_to_add[i * max_add_per_call:(
                    i * max_add_per_call) + max_add_per_call]
            ])
        new_dispatchers = v2_project.dispatchers_layer.query(
            "1=1", return_all_records=True).features
        if len(existing_dispatchers) == len(
                new_dispatchers) or dispatcher_ghost:
            logger.info("Dispatchers successfully migrated")
        else:
            raise Exception("Dispatchers not migrated successfully")

    # Migrate Workers
    logger.info("Migrating workers...")
    worker_ghost = False

    # Get Existing Workers
    existing_workers = project.workers_layer.query(
        "1=1", return_all_records=True).features
    workers_to_add = []
    layer = v2_project.workers_layer

    # Get Custom Worker Fields
    custom_fields = add_custom_fields(project.workers_layer, layer)
    # Prepare Workers to be added
    for worker in existing_workers:
        if worker.attributes[project._worker_schema.user_id]:
            worker_name = worker.attributes[project._worker_schema.user_id] if worker.attributes[project._worker_schema.name] is None else \
                worker.attributes[project._worker_schema.name]
            worker_status = 0 if worker.attributes[
                project._worker_schema.status] is None else worker.attributes[
                    project._worker_schema.status]
            attributes = {
                v2_project._worker_schema.name:
                worker_name,
                v2_project._worker_schema.contact_number:
                worker.attributes[project._worker_schema.contact_number],
                v2_project._worker_schema.notes:
                worker.attributes[project._worker_schema.notes],
                v2_project._worker_schema.status:
                worker_status,
                v2_project._worker_schema.title:
                worker.attributes[project._worker_schema.title],
                v2_project._worker_schema.user_id:
                worker.attributes[project._worker_schema.user_id],
                v2_project._worker_schema.global_id:
                worker.attributes[project._worker_schema.global_id]
            }

            # Add Custom Field Values
            for field in custom_fields:
                attributes[field["name"]] = worker.attributes[field["name"]]
            feature = Feature(geometry=worker.geometry, attributes=attributes)
            workers_to_add.append(feature)
        else:
            worker_ghost = True
            logger.info("Worker migration skipped - does not have a user id")

    # Add Workers
    layer.edit_features(adds=FeatureSet(workers_to_add), use_global_ids=True)
    # add worker named users to the project's group.
    max_add_per_call = 25
    for i in range(0, math.ceil(len(workers_to_add) / max_add_per_call)):
        v2_project.group.add_users([
            w.attributes[v2_project._worker_schema.user_id]
            for w in workers_to_add[i *
                                    max_add_per_call:(i * max_add_per_call) +
                                    max_add_per_call]
        ])
    new_workers = v2_project.workers_layer.query(
        "1=1", return_all_records=True).features
    if (len(existing_workers) == len(new_workers)) or worker_ghost:
        logger.info("Workers successfully migrated")
    else:
        cleanup_project(gis, title)
        raise Exception(
            "Workers not migrated successfully. Cleaning up new project")

    # Migrate Assignments
    logger.info("Migrating assignments")
    assignment_ghost = False

    # Get Existing Assignments
    existing_assignments = project.assignments_layer.query(
        "1=1", return_all_records=True).features
    assignments_to_add = []
    layer = v2_project.assignments_layer

    # Set Custom Fields for Assignments and Templates
    custom_fields = add_custom_fields(project.assignments_layer, layer)

    # Prepare Assignments to be Added
    for assignment in existing_assignments:
        if assignment.attributes[project._assignment_schema.assignment_type]:

            # set attributes in case they are empty
            assignment_location = (str(assignment.geometry["x"]) + " " + str(assignment.geometry["y"])) if \
                assignment.attributes[project._assignment_schema.location] is None else assignment.attributes[project._assignment_schema.location]
            assignment_status = 0 if assignment.attributes[project._assignment_schema.status] is None else \
                assignment.attributes[project._assignment_schema.status]
            assignment_priority = 0 if assignment.attributes[project._assignment_schema.priority] is None else \
                assignment.attributes[project._assignment_schema.priority]

            assignment_type_name = ""
            for at in existing_assignment_types:
                if at.code == assignment.attributes[
                        project._assignment_schema.assignment_type]:
                    assignment_type_name = at.name
                    break
            attributes = {
                v2_project._assignment_schema.status:
                assignment_status,
                v2_project._assignment_schema.notes:
                assignment.attributes[project._assignment_schema.notes],
                v2_project._assignment_schema.priority:
                assignment_priority,
                v2_project._assignment_schema.assignment_type:
                get_assignment_type_global_id(new_assignment_types,
                                              assignment_type_name),
                v2_project._assignment_schema.work_order_id:
                assignment.attributes[
                    project._assignment_schema.work_order_id],
                v2_project._assignment_schema.due_date:
                assignment.attributes[project._assignment_schema.due_date],
                v2_project._assignment_schema.description:
                assignment.attributes[project._assignment_schema.description],
                v2_project._assignment_schema.worker_id:
                get_worker_global_id(
                    project.workers.search(), assignment.attributes[
                        project._assignment_schema.worker_id]),
                v2_project._assignment_schema.location:
                assignment_location,
                v2_project._assignment_schema.declined_comment:
                assignment.attributes[
                    project._assignment_schema.declined_comment],
                v2_project._assignment_schema.assigned_date:
                assignment.attributes[
                    project._assignment_schema.assigned_date],
                v2_project._assignment_schema.in_progress_date:
                assignment.attributes[
                    project._assignment_schema.in_progress_date],
                v2_project._assignment_schema.completed_date:
                assignment.attributes[
                    project._assignment_schema.completed_date],
                v2_project._assignment_schema.declined_date:
                assignment.attributes[
                    project._assignment_schema.declined_date],
                v2_project._assignment_schema.paused_date:
                assignment.attributes[project._assignment_schema.paused_date],
                v2_project._assignment_schema.dispatcher_id:
                get_dispatcher_global_id(
                    arguments.skip_dispatchers, project.dispatchers.search(),
                    assignment.attributes[
                        project._assignment_schema.dispatcher_id]),
                v2_project._assignment_schema.global_id:
                assignment.attributes[project._assignment_schema.global_id],
                v2_project._assignment_schema.object_id:
                assignment.attributes[project._assignment_schema.object_id]
            }

            # Add Custom Field Values
            for field in custom_fields:
                attributes[field["name"]] = assignment.attributes[
                    field["name"]]
            feature = Feature(geometry=assignment.geometry,
                              attributes=attributes)
            assignments_to_add.append(feature)
        else:
            logger.info(
                "One assignment's migration skipped - does not have an assignment type"
            )
            assignment_ghost = True

    # Add Assignments
    layer.edit_features(adds=FeatureSet(assignments_to_add),
                        use_global_ids=True)
    new_assignments = v2_project.assignments_layer.query(
        "1=1", return_all_records=True).features
    if (len(new_assignments) == len(existing_assignments)) or assignment_ghost:
        logger.info("Assignments successfully migrated")
    else:
        cleanup_project(gis, title)
        raise Exception(
            "Assignments not migrated successfully. Cleaning up new project")

    # Migrate Attachments
    logger.info("Migrating Attachments")
    for assignment in existing_assignments:
        object_id = assignment.attributes[project._assignment_schema.object_id]
        new_assignment_object_id = v2_project.assignments.get(
            global_id=assignment.attributes[
                project._assignment_schema.global_id]).object_id
        if len(project.assignments_layer.attachments.get_list(object_id)) > 0:
            try:
                with tempfile.TemporaryDirectory() as dirpath:
                    paths = project.assignments_layer.attachments.download(
                        oid=object_id, save_path=dirpath)
                    for path in paths:
                        v2_project.assignments_layer.attachments.add(
                            oid=new_assignment_object_id, file_path=path)
            except Exception as e:
                logger.info(e)
                logger.info(
                    "Skipping migration of this attachment. It did not download successfully"
                )
    if len(project.assignments_layer.attachments.search("1=1")) == len(
            v2_project.assignments_layer.attachments.search("1=1")):
        logger.info("Attachments successfully migrated")
    else:
        logger.info(
            "Not all of your attachments migrated successfully. Continuing with migration"
        )

    # Migrate Integrations
    logger.info("Migrating Integrations")
    v2_project.integrations.batch_delete(
        [v2_project.integrations.get("arcgis-navigator")[0]])
    previous_integrations = project.integrations.search()

    # Replacing AT Code with GUID
    for integration in previous_integrations:
        if "assignmentTypes" in integration:
            types = integration["assignmentTypes"]
            key_list = list(sorted(types.keys()))
            for key in key_list:
                at_name = project.assignment_types.get(code=int(key)).name
                guid = get_assignment_type_global_id(new_assignment_types,
                                                     at_name)
                v2_project.integrations.add(
                    integration_id=integration["id"],
                    prompt=integration["prompt"],
                    url_template=types[key]["urlTemplate"],
                    assignment_types=guid)
        else:
            # default id changed
            if integration["id"] == "default-navigator":
                integration["id"] = "arcgis-navigator"
            v2_project.integrations.add(
                integration_id=integration["id"],
                prompt=integration["prompt"],
                url_template=integration["urlTemplate"])
    logger.info("Integrations migrated successfully")

    # Get rid of old URL patterns
    integrations = v2_project.integrations.search()
    generate_universal_links(integrations)

    # Migrate Webmaps - Retain non-WF layers
    logger.info("Migrating Webmaps")
    upgrade_webmaps(project.worker_webmap, v2_project.worker_webmap)
    upgrade_webmaps(project.dispatcher_webmap, v2_project.dispatcher_webmap)
    logger.info("Script Completed")
Example #14
0
def main(arguments):
	# Initialize logging
	logger = initialize_logging(arguments.log_file)

	# Create the GIS
	logger.info("Authenticating...")

	# First step is to get authenticate and get a valid token
	gis = GIS(arguments.org_url,
			  username=arguments.username,
			  password=arguments.password,
			  verify_cert=not arguments.skip_ssl_verification)

	logger.info("Getting workforce project")

	# Get the workforce project
	item = gis.content.get(arguments.project_id)
	try:
		project = workforce.Project(item)
	except Exception as e:
		logger.info(e)
		logger.info("Invalid project id")
		sys.exit(0)
	
	# Get Survey or Collector Feature Layer
	layer = None
	if arguments.survey_id and arguments.layer_url:
		logger.info("Please try again with either survey id or layer url provided, not both")
		sys.exit(0)
	elif arguments.survey_id:
		survey_item = gis.content.get(arguments.survey_id)
		if survey_item:
			layer = survey_item.layers[0]
	elif arguments.layer_url:
		layer = FeatureLayer(arguments.layer_url)
	else:
		logger.info("Please provide either a portal id for your survey feature layer or a feature service URL for your survey/collector layer")
		sys.exit(0)
		
	# Check if layer exists
	try:
		x = layer.properties
	except Exception as e:
		logger.info(e)
		logger.info("Layer could not be found based on given input. Please check your parameters again. Exiting the script")
		sys.exit(0)
	
	# Updating Assignments
	logger.info("Querying assignments")
	assignments = project.assignments.search()
	to_update = []
	for assignment in assignments:
		if assignment.work_order_id and (assignment.status == "unassigned" or assignment.status == "assigned" or assignment.status == "declined"):
			where = f"{arguments.field_name} = '{assignment.work_order_id}'"
			if layer.query(where=where, return_count_only=True) > 0:
				logger.info(f"Potential Assignment to Cancel: {str(assignment)} with OBJECTID {assignment.object_id}")
				if gis.properties["isPortal"]:
					portal_url = gis.properties['portalHostname']
					logger.info(f"Assignment Link: {portal_url}/apps/workforce/#/projects/{arguments.project_id}/dispatch/assignments/{assignment.object_id}")
				else:
					logger.info(f"Assignment Link: https://workforce.arcgis.com/projects/{arguments.project_id}/dispatch/assignments/{assignment.object_id}")
				if arguments.cancel_assignments:
					logger.info("Canceling assignment")
					assignment.update(status="canceled")
					to_update.append(assignment)
	if arguments.cancel_assignments:
		project.assignments.batch_update(to_update)
	logger.info("Completed!")
    def main(arguments):
        # initialize logging
        logger = initialize_logging(arguments.log_file)
        # Create the GIS
        logger.info("Authenticating...")
        # First step is to get authenticate and get a valid token
        gis = GIS(arguments.org_url,
                  username=arguments.username,
                  password=arguments.password,
                  verify_cert=not arguments.skip_ssl_verification)
        # Get the project and data
        item = gis.content.get(arguments.project_id)
        project = workforce.Project(item)
        dispatcher = project.dispatchers.search(
            where="{}='{}'".format(project._dispatcher_schema.user_id, arguments.username))
        if not dispatcher:
            log_critical_and_raise_exception("{} is not a dispatcher".format(args.username))
        # Read the csv file
        logger.info("Reading CSV file: {}...".format(arguments.csv_file))
        assignments_in_csv = []
        with open(arguments.csv_file, 'r') as file:
            reader = csv.DictReader(file)
            for row in reader:
                assignments_in_csv.append(row)
        #########################################
        #GET A LIST OF THE CSV'S WORK_ORDER_IDS##
        #########################################
        assignments_in_csv_workOrderId =[] #creates an empty list to save the order IDs
        for i in assignments_in_csv:
            orderID = i['Work Order Id'] #A work order ID in the CSV file we want to load
            assignments_in_csv_workOrderId.append(orderID) #append the order IDs
        ####################################################
        #GET THE LAST 30 DAYS WORK ORDERS WITHIN WORKFORCE###
        #####################################################

        today = datetime.today() #time stamp at the moment of runnig this script
        thirtyDays_ago = today - timedelta(days=30) #obtain the date and time from 30 days ago

        lastThirtyDays ="{} <= '{}' OR {} >= '{}'"\
            .format(project._track_schema.creation_date, today.strftime("%m/%d/%Y %H:%M:%S"),
                    project._track_schema.creation_date, thirtyDays_ago.strftime("%m/%d/%Y %H:%M:%S"))#create a query of the last 30 days
        assgments = project.assignments.search(where=lastThirtyDays) #list all the assignments within Workforce Web App
        currentOrders = [] #create an empty list where we will save the assignments withing Workforce
        for a in range(0,len(assgments)):
            assgment = assgments[a]
            assgment_orderID = assgment.work_order_id
            currentOrders.append(assgment_orderID)
        ################################################################
        ###COMPARE THE CSV'S ORDER IDs TO THE CURRENT WORKORDER #########
        ###IN WORKFORCE AND GENERATE A LIST OF DUPLICATES###############
        ##############################################################
        duplicates = [] #list containing the CSV's workorder that are identical to those in workforce
        for order in assignments_in_csv_workOrderId:
            for current_o in currentOrders:
                if order == current_o:
                    duplicates.append(order)
        #########################################################
        ##DROP THE DUPLICATES FROM THE ASSIGNMENTS IN THE CSV####
        #########################################################

        for i in reversed(range(len(assignments_in_csv))):
            a = assignments_in_csv[i]
            orderID = a['Work Order Id']
            for h in duplicates:
                if orderID == h:
                    assignments_in_csv.pop(i)

        #############################################################
        #######UPLOAD NON-REPEATED OR INFORM THAT ALL ARE REPEATED###
        #############################################################

        if len(assignments_in_csv) == 0:
            print("The assignments you are trying to upload were previously uploaded. This scrip "
                  "won't run to prevent duplicate work order IDs.")
        elif len(assignments_in_csv) > 0:
            # Fetch assignment types
            assignment_types = project.assignment_types.search()
            assignment_type_dict = {}
            for assignment_type in assignment_types:
                assignment_type_dict[assignment_type.name] = assignment_type

            # Fetch dispatchers
            dispatchers = project.dispatchers.search()
            dispatchers_dict = {}
            for dispatcher in dispatchers:
                dispatchers_dict[dispatcher.user_id] = dispatcher

            # Fetch the workers
            workers = project.workers.search()
            workers_dict = {}
            for worker in workers:
                workers_dict[worker.user_id] = worker

            # assignments to add
            assignments_to_add = []
            for assignment in assignments_in_csv:
                assignment_to_add = workforce.Assignment(project,
                                                         assignment_type=assignment_type_dict[
                                                             assignment[args.assignment_type_field]],
                                                         )

                # Create the geometry
                geometry = dict(x=float(assignment[args.x_field]),
                                y=float(assignment[args.y_field]),
                                spatialReference=dict(
                                    wkid=int(args.wkid)))
                assignment_to_add.geometry = geometry

                # Determine the assignment due date, and if no time is provided, make the due date all day
                if args.due_date_field and assignment[args.due_date_field]:
                    d = arrow.Arrow.strptime(assignment[args.due_date_field], args.date_format).replace(
                        tzinfo=dateutil.tz.gettz(args.timezone))
                    if d.datetime.second == 0 and d.datetime.hour == 0 and d.datetime.minute == 0:
                        d = d.replace(hour=23, minute=59, second=59)
                    # Convert date to UTC time
                    assignment_to_add.due_date = d.to('utc').datetime

                # Set the location
                assignment_to_add.location = assignment[args.location_field]

                # Set the dispatcher
                if args.dispatcher_field and assignment[args.dispatcher_field]:
                    assignment_to_add.dispatcher = dispatchers_dict[assignment[args.dispatcher_field]]
                else:
                    assignment_to_add.dispatcher = dispatcher

                # Fetch workers and assign the worker to the assignment
                if args.worker_field and assignment[args.worker_field]:
                    assignment_to_add.worker = workers_dict[assignment[args.worker_field]]
                    assignment_to_add.assigned_date = arrow.now().to('utc').datetime
                    assignment_to_add.status = "assigned"
                else:
                    assignment_to_add.status = "unassigned"

                # Set the priority
                if args.priority_field and assignment[args.priority_field]:
                    assignment_to_add.priority = assignment[args.priority_field]

                # Set the description
                if args.description_field and assignment[args.description_field]:
                    assignment_to_add.description = assignment[args.description_field]

                # Set the work order id
                if args.work_order_id_field and assignment[args.work_order_id_field]:
                    assignment_to_add.work_order_id = assignment[args.work_order_id_field]

                # Set attachment
                if args.attachment_file_field and assignment[args.attachment_file_field]:
                    assignment_to_add.attachment_file = types.SimpleNamespace()
                    assignment_to_add.attachment_file = assignment[args.attachment_file_field]

                # Add all assignments to the list created
                assignments_to_add.append(assignment_to_add)

            # Batch add all assignments to the project
            logger.info("Adding Assignments...")
            assignments = project.assignments.batch_add(assignments_to_add) #I NEED TO UNCOMMENT THI TO WORK
            logger.info("Adding Attachments...")
            for assignment in assignments:
                if hasattr(assignment, "attachment_file"):
                    assignment.attachments.add(assignment.attachment_file)
            logger.info("Completed")

            if len(duplicates) == 0:
                print("SUCCESS!!")
            elif len(duplicates) > 0:
                print("This work orders were previously uploaded and they were ignored to prevent duplication:")
                print(duplicates)
                print("The rest of the work orders have been uploaded successfully.")
Example #16
0
    # parse the config file
    config = configparser.ConfigParser()
    config.read("my_config.ini")

    logger = initialize_logging(config["LOG"]["LOGFILE"])
    initialize_db(config["DB"]["DATABASE"])

    # Authenticate and get data
    logger.info("Authenticating with ArcGIS Online...")
    gis = GIS(config["AGOL"]["ORG"],
              username=config["AGOL"]["USERNAME"],
              password=config["AGOL"]["PASSWORD"],
              verify_cert=False)

    logger.info("Getting project info...")
    project = workforce.Project(gis.content.get(
        config["WORKFORCE"]["PROJECT"]))

    logger.info("Querying assignments...")
    timestamp_last_minute = (
        datetime.datetime.utcnow() -
        datetime.timedelta(minutes=60)).strftime("%Y-%m-%d %H:%M:%S")
    assignments = project.assignments.search(
        "{} = 0 AND {} >= timestamp '{}'".format(
            project._assignment_schema.status,
            project._assignment_schema.creation_date, timestamp_last_minute))
    logger.info("Processing assignments...")
    for assignment in assignments:
        if not is_assignment_processed(config["DB"]["DATABASE"], assignment):
            logger.info("Assigning new assignment...")
            assign_worker(assignment)
            logger.info("Adding new assignment to sqlite database...")
# In[1]:

import arcgis
import arrow
import dateutil
import datetime
from arcgis.apps import workforce
from arcgis.gis import GIS
gis = GIS()  #FOUO

# In[2]:

source = gis.content.get('d443d3ad15cf442884aa1d980b0958ef')
projectid = gis.content.get('53a74e9905e64688b40944b339cd6988')
project = workforce.Project(projectid)

# In[3]:

dispatchers = project.dispatchers.search()
dispatchers_dict = {}
for dispatcher in dispatchers:
    dispatchers_dict[dispatcher.user_id] = dispatcher
assignment_types = project.assignment_types.search()
workers = project.workers.search()
workers_dict = {}
for worker in workers:
    workers_dict[worker.user_id] = worker
sourcelayer = source.layers[0]

# In[9]:
Example #18
0
def main(arguments):  # noqa: C901
    # Initialize logging
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the old workforce project
    item = gis.content.get(arguments.classic_project_id)
    project = workforce.Project(item)
    try:
        if project._is_v2_project:
            raise Exception(
                "The first project provided is a v2 project. Please migrate assignment data from v1 projects"
            )
    except AttributeError:
        raise Exception(
            "Cannot find the attribute is v2 project. Are you sure you have the API version 1.8.3 or greater installed? "
            "Check with `arcgis.__version__` in your Python console")

    # Get new workforce project
    v2_project = workforce.Project(gis.content.get(arguments.new_project_id))
    if not v2_project._is_v2_project:
        raise Exception(
            "The second project provided is a v1 project. Please migrate assignment data to v2 projects"
        )

    # validate correct assignment types are present
    existing_assignment_types = project.assignment_types.search()
    for assignment_type in existing_assignment_types:
        if not v2_project.assignment_types.get(name=assignment_type.name):
            raise Exception(
                "One of your assignment types in your classic project is not in your offline project"
            )

    # validate correct workers are present
    for worker in project.workers.search():
        if not v2_project.workers.get(user_id=worker.user_id):
            raise Exception(
                "One of your workers in your classic project is not in your offline project"
            )

    # Migrate Assignments
    logger.info("Migrating assignments")
    assignment_ghost = False

    # Get Existing Assignments
    existing_assignments = project.assignments_layer.query(
        where=arguments.where, return_all_records=True).features
    assignments_to_add = []
    layer = v2_project.assignments_layer

    # Set Custom Fields for Assignments and Templates
    custom_fields = add_custom_fields(project.assignments_layer, layer)

    # Prepare Assignments to be Added
    for assignment in existing_assignments:
        if assignment.attributes[project._assignment_schema.assignment_type]:

            # set attributes in case they are empty
            assignment_location = (str(assignment.geometry["x"]) + " " + str(assignment.geometry["y"])) if \
                assignment.attributes[project._assignment_schema.location] is None else \
                assignment.attributes[project._assignment_schema.location]
            assignment_status = 0 if assignment.attributes[project._assignment_schema.status] is None else \
                assignment.attributes[project._assignment_schema.status]
            assignment_priority = 0 if assignment.attributes[project._assignment_schema.priority] is None else \
                assignment.attributes[project._assignment_schema.priority]

            # get AT name based on code stored
            assignment_type_name = ""
            for at in existing_assignment_types:
                if at.code == assignment.attributes[
                        project._assignment_schema.assignment_type]:
                    assignment_type_name = at.name
                    break

            # Set attributes
            attributes = {
                v2_project._assignment_schema.status:
                assignment_status,
                v2_project._assignment_schema.notes:
                assignment.attributes[project._assignment_schema.notes],
                v2_project._assignment_schema.priority:
                assignment_priority,
                v2_project._assignment_schema.assignment_type:
                get_assignment_type_global_id(
                    v2_project.assignment_types.search(),
                    assignment_type_name),
                v2_project._assignment_schema.work_order_id:
                assignment.attributes[
                    project._assignment_schema.work_order_id],
                v2_project._assignment_schema.due_date:
                assignment.attributes[project._assignment_schema.due_date],
                v2_project._assignment_schema.description:
                assignment.attributes[project._assignment_schema.description],
                v2_project._assignment_schema.worker_id:
                get_worker_global_id(
                    project.workers.search(), v2_project.workers,
                    assignment.attributes[
                        project._assignment_schema.worker_id]),
                v2_project._assignment_schema.location:
                assignment_location,
                v2_project._assignment_schema.declined_comment:
                assignment.attributes[
                    project._assignment_schema.declined_comment],
                v2_project._assignment_schema.assigned_date:
                assignment.attributes[
                    project._assignment_schema.assigned_date],
                v2_project._assignment_schema.in_progress_date:
                assignment.attributes[
                    project._assignment_schema.in_progress_date],
                v2_project._assignment_schema.completed_date:
                assignment.attributes[
                    project._assignment_schema.completed_date],
                v2_project._assignment_schema.declined_date:
                assignment.attributes[
                    project._assignment_schema.declined_date],
                v2_project._assignment_schema.paused_date:
                assignment.attributes[project._assignment_schema.paused_date],
                v2_project._assignment_schema.dispatcher_id:
                get_dispatcher_global_id(
                    project.dispatchers.search(), v2_project.dispatchers,
                    assignment.attributes[
                        project._assignment_schema.dispatcher_id]),
                v2_project._assignment_schema.global_id:
                assignment.attributes[project._assignment_schema.global_id],
                v2_project._assignment_schema.object_id:
                assignment.attributes[project._assignment_schema.object_id]
            }

            # Add Custom Field Values
            for field in custom_fields:
                attributes[field["name"]] = assignment.attributes[
                    field["name"]]
            feature = Feature(geometry=assignment.geometry,
                              attributes=attributes)
            assignments_to_add.append(feature)
        else:
            logger.info(
                "One assignment's migration skipped - does not have an assignment type"
            )
            assignment_ghost = True

    # Add Assignments
    layer.edit_features(adds=FeatureSet(assignments_to_add),
                        use_global_ids=True)
    new_assignments = v2_project.assignments_layer.query(
        "1=1", return_all_records=True).features
    # skip validation if there's a ghost
    if (len(new_assignments) == len(existing_assignments)) or assignment_ghost:
        logger.info("Assignments successfully migrated")
    else:
        raise Exception("Assignments not migrated successfully. Unknown error")

    # Migrate Attachments
    logger.info("Migrating Attachments")
    for assignment in existing_assignments:
        object_id = assignment.attributes[project._assignment_schema.object_id]
        new_assignment_object_id = v2_project.assignments.get(
            global_id=assignment.attributes[
                project._assignment_schema.global_id]).object_id
        if len(project.assignments_layer.attachments.get_list(object_id)) > 0:
            try:
                with tempfile.TemporaryDirectory() as dirpath:
                    paths = project.assignments_layer.attachments.download(
                        oid=object_id, save_path=dirpath)
                    for path in paths:
                        v2_project.assignments_layer.attachments.add(
                            oid=new_assignment_object_id, file_path=path)
            except Exception as e:
                logger.info(e)
                logger.info(
                    "Skipping migration of this attachment. It did not download successfully"
                )
    if len(project.assignments_layer.attachments.search("1=1")) == len(
            v2_project.assignments_layer.attachments.search("1=1")):
        logger.info("Attachments successfully migrated")
    else:
        logger.info(
            "Not all of your attachments migrated successfully. Continuing with migration"
        )
    logger.info("Script Completed")
Example #19
0
def main(arguments):  # noqa: C901
    # initialize logging
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the project and data
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)
    dispatcher = project.dispatchers.search(where="{}='{}'".format(
        project._dispatcher_schema.user_id, arguments.username))
    if not dispatcher:
        log_critical_and_raise_exception("{} is not a dispatcher".format(
            args.username))

    # Read the csv file
    logger.info("Reading CSV file: {}...".format(arguments.csv_file))
    assignments_in_csv = []
    locations = []
    with open(arguments.csv_file, 'r') as file:
        reader = csv.DictReader(file)
        for row in reader:
            locations.append(row[args.location_field])
            assignments_in_csv.append(row)

    # Fetch assignment types
    assignment_types = project.assignment_types.search()
    assignment_type_dict = {}
    for assignment_type in assignment_types:
        assignment_type_dict[assignment_type.name] = assignment_type

    # Fetch dispatchers
    dispatchers = project.dispatchers.search()
    dispatchers_dict = {}
    for dispatcher in dispatchers:
        dispatchers_dict[dispatcher.user_id] = dispatcher

    # Fetch the workers
    workers = project.workers.search()
    workers_dict = {}
    for worker in workers:
        workers_dict[worker.user_id] = worker

    if not (args.x_field and args.y_field):
        geocoder = None
        if args.custom_geocoder:
            geocoder = Geocoder.fromitem(gis.content.get(args.custom_geocoder))
        addresses = batch_geocode(locations,
                                  geocoder=geocoder,
                                  out_sr=args.wkid)
    assignments_to_add = []
    for i, assignment in enumerate(assignments_in_csv):
        assignment_to_add = workforce.Assignment(
            project,
            assignment_type=assignment_type_dict[assignment[
                args.assignment_type_field]],
            status="unassigned")

        # Create the geometry
        if args.x_field and args.y_field:
            geometry = dict(x=float(assignment[args.x_field]),
                            y=float(assignment[args.y_field]),
                            spatialReference=dict(wkid=int(args.wkid)))
        else:
            try:
                location_geometry = addresses[i]['location']
            except Exception as e:
                logger.info(e)
                logger.info(
                    "Geocoding did not work for the assignment with location {}. "
                    "Please check your addresses again".format(
                        assignment[args.location_field]))
                logger.info("Continuing on to the next assignment")
                continue
            location_geometry['spatialReference'] = dict(wkid=int(args.wkid))
            geometry = location_geometry
        assignment_to_add.geometry = geometry

        # Determine the assignment due date, and if no time is provided, make the due date all day
        if args.due_date_field and assignment[args.due_date_field]:
            d = datetime.datetime.strptime(assignment[args.due_date_field],
                                           args.date_format)
            p_date = pendulum.instance(d, tz=args.timezone)
            if p_date.second == 0 and p_date.hour == 0 and p_date.minute == 0:
                p_date = p_date.at(hour=23, minute=59, second=59)
            # Convert date to UTC time
            assignment_to_add.due_date = datetime.datetime.fromtimestamp(
                p_date.in_tz('UTC').timestamp())

        # Set the location
        assignment_to_add.location = assignment[args.location_field]

        # Set the dispatcher
        if args.dispatcher_field and assignment[args.dispatcher_field]:
            assignment_to_add.dispatcher = dispatchers_dict[assignment[
                args.dispatcher_field]]
        else:
            assignment_to_add.dispatcher = dispatcher

        # Fetch workers and assign the worker to the assignment
        if args.worker_field and assignment[args.worker_field]:
            assignment_to_add.worker = workers_dict[assignment[
                args.worker_field]]
            assignment_to_add.assigned_date = datetime.datetime.fromtimestamp(
                pendulum.now('UTC').timestamp())
            assignment_to_add.status = "assigned"
        else:
            assignment_to_add.status = "unassigned"

        # Set the priority
        if args.priority_field and assignment[args.priority_field]:
            assignment_to_add.priority = assignment[args.priority_field]

        # Set the description
        if args.description_field and assignment[args.description_field]:
            assignment_to_add.description = assignment[args.description_field]

        # Set the work order id
        if args.work_order_id_field and assignment[args.work_order_id_field]:
            assignment_to_add.work_order_id = assignment[
                args.work_order_id_field]

        # Set attachment
        if args.attachment_file_field and assignment[
                args.attachment_file_field]:
            assignment_to_add.attachment_file = types.SimpleNamespace()
            assignment_to_add.attachment_file = assignment[
                args.attachment_file_field]

        # Add all assignments to the list created
        assignments_to_add.append(assignment_to_add)

    # Batch add all assignments to the project
    logger.info("Adding Assignments...")
    assignments = project.assignments.batch_add(assignments_to_add)
    logger.info("Adding Attachments...")
    for assignment in assignments:
        if hasattr(assignment, "attachment_file"):
            assignment.attachments.add(assignment.attachment_file)
    logger.info("Completed")
def main(arguments):
    # initialize logging
    logger = initialize_logging(arguments.log_file)
    # Create the GIS
    logger.info("Authenticating...")
    # First step is to get authenticate and get a valid token
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)
    # Get the project and data
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)
    dispatcher = project.dispatchers.search(where="{}='{}'".format(
        project._dispatcher_schema.user_id, arguments.username))
    if not dispatcher:
        log_critical_and_raise_exception("{} is not a dispatcher".format(
            args.username))
    # Read the csv file
    logger.info("Reading CSV file: {}...".format(arguments.csv_file))
    assignments_in_csv = []
    with open(arguments.csv_file, 'r') as file:
        reader = csv.DictReader(file)
        for row in reader:
            assignments_in_csv.append(row)

    # Fetch assignment types
    assignment_types = project.assignment_types.search()
    assignment_type_dict = {}
    for assignment_type in assignment_types:
        assignment_type_dict[assignment_type.name] = assignment_type

    # Fetch dispatchers
    dispatchers = project.dispatchers.search()
    dispatchers_dict = {}
    for dispatcher in dispatchers:
        dispatchers_dict[dispatcher.user_id] = dispatcher

    # Fetch the workers
    workers = project.workers.search()
    workers_dict = {}
    for worker in workers:
        workers_dict[worker.user_id] = worker

    assignments_to_add = []
    for assignment in assignments_in_csv:
        assignment_to_add = workforce.Assignment(
            project,
            assignment_type=assignment_type_dict[assignment[
                args.assignment_type_field]],
        )

        # Create the geometry
        geometry = dict(x=float(assignment[args.x_field]),
                        y=float(assignment[args.y_field]),
                        spatialReference=dict(wkid=int(args.wkid)))
        assignment_to_add.geometry = geometry

        # Determine the assignment due date, and if no time is provided, make the due date all day
        if args.due_date_field and assignment[args.due_date_field]:
            d = arrow.Arrow.strptime(
                assignment[args.due_date_field], args.date_format).replace(
                    tzinfo=dateutil.tz.gettz(args.timezone))
            if d.datetime.second == 0 and d.datetime.hour == 0 and d.datetime.minute == 0:
                d = d.replace(hour=23, minute=59, second=59)
            # Convert date to UTC time
            assignment_to_add.due_date = d.to('utc').datetime

        # Set the location
        assignment_to_add.location = assignment[args.location_field]

        # Set the dispatcher
        if args.dispatcher_field and assignment[args.dispatcher_field]:
            assignment_to_add.dispatcher = dispatchers_dict[assignment[
                args.dispatcher_field]]
        else:
            assignment_to_add.dispatcher = dispatcher

        # Fetch workers and assign the worker to the assignment
        if args.worker_field and assignment[args.worker_field]:
            assignment_to_add.worker = workers_dict[assignment[
                args.worker_field]]
            assignment_to_add.assigned_date = arrow.now().to('utc').datetime
            assignment_to_add.status = "assigned"
        else:
            assignment_to_add.status = "unassigned"

        # Set the priority
        if args.priority_field and assignment[args.priority_field]:
            assignment_to_add.priority = assignment[args.priority_field]

        # Set the description
        if args.description_field and assignment[args.description_field]:
            assignment_to_add.description = assignment[args.description_field]

        # Set the work order id
        if args.work_order_id_field and assignment[args.work_order_id_field]:
            assignment_to_add.work_order_id = assignment[
                args.work_order_id_field]

        # Set attachment
        if args.attachment_file_field and assignment[
                args.attachment_file_field]:
            assignment_to_add.attachment_file = types.SimpleNamespace()
            assignment_to_add.attachment_file = assignment[
                args.attachment_file_field]

        # Add all assignments to the list created
        assignments_to_add.append(assignment_to_add)

    # Batch add all assignments to the project
    logger.info("Adding Assignments...")
    assignments = project.assignments.batch_add(assignments_to_add)
    logger.info("Adding Attachments...")
    for assignment in assignments:
        if hasattr(assignment, "attachment_file"):
            assignment.attachments.add(assignment.attachment_file)
    logger.info("Completed")
Example #21
0
def main(arguments):
    # initialize logging
    logger = initialize_logging(arguments.log_file)

    # Create the GIS
    logger.info("Authenticating...")

    # First step is to authenticate
    gis = GIS(arguments.org_url,
              username=arguments.username,
              password=arguments.password,
              verify_cert=not arguments.skip_ssl_verification)

    # Get the target feature layer
    target_fl = arcgis.features.FeatureLayer(arguments.target_fl, gis)

    # Get the project info
    item = gis.content.get(arguments.project_id)
    project = workforce.Project(item)

    # Open the field mappings config file
    logging.getLogger().info("Reading field mappings...")
    with open(arguments.config_file, 'r') as f:
        field_mappings = json.load(f)
    logging.getLogger().info("Validating field mappings...")

    # Query the source to get the features specified by the query string
    logger.info("Querying source features...")
    current_assignments = project.assignments.search(where=arguments.where)

    # Query the archived assignments to get all of the currently archived ones
    logger.info("Querying target features")
    archived_assignments = target_fl.query(
        out_fields=field_mappings[project._assignment_schema.global_id])

    # Create a list of GlobalIDs - These should be unique
    global_ids = [
        feature.attributes[field_mappings[
            project._assignment_schema.global_id]]
        for feature in archived_assignments.features
    ]

    # Iterate through the the assignments returned and only add those that don't exist in the Feature Layer
    assignments_to_copy = []
    # Updated loop to get the global_id and only copy if it doesn't already exist in global_ids
    for assignment in current_assignments:
        if assignment.global_id not in global_ids:
            assignments_to_copy.append(assignment)

    # Create a new list to store the updated feature-dictionaries
    assignments_to_submit = []
    # Loop over all assignments that we want to add,
    for assignment in assignments_to_copy:
        # map the field names appropriately
        assignment_attributes = {}
        for key, value in field_mappings.items():
            # Updated the feature.attributes to call the correct field mapping items
            assignment_attributes[value] = assignment.feature.attributes[key]
        # create the new feature object to send to server
        assignments_to_submit.append(
            arcgis.features.Feature(geometry=assignment.geometry,
                                    attributes=assignment_attributes))
    logger.info("Copying assignments...")
    response = target_fl.edit_features(
        adds=arcgis.features.FeatureSet(assignments_to_submit))
    logger.info(response)
    logger.info("Completed")