示例#1
0
def update_planning_info(*, db_session, job_in: JobPlanningInfoUpdate) -> Job:

    existing_job = get_by_code(db_session=db_session, code=job_in.code)

    existing_job.planning_status = job_in.planning_status
    existing_job.scheduled_start_datetime = job_in.scheduled_start_datetime
    existing_job.scheduled_duration_minutes = job_in.scheduled_duration_minutes
    existing_job.scheduled_primary_worker = worker_service.get_by_code(
        db_session=db_session, code=job_in.scheduled_primary_worker_code)

    # if len(job_in.scheduled_secondary_workers) > 0:
    # else:
    #     existing_job.scheduled_secondary_workers = None
    existing_job.scheduled_secondary_workers = [
        worker_service.get_by_code(db_session=db_session, code=_c)
        for _c in job_in.scheduled_secondary_worker_codes
    ]

    db_session.add(existing_job)
    db_session.commit()
    event_service.log(
        db_session=db_session,
        source=job_in.update_source,
        description=
        f"Job ({job_in.code}) is changed to different plan: {job_in}",  # .scheduled_start_datetime
        job_id=existing_job.id,
    )
    post_job_to_kafka(job=existing_job,
                      message_type=KafkaMessageType.UPDATE_JOB,
                      db_session=db_session)

    return existing_job
示例#2
0
def update_planning_info(*, db_session, job_in: JobPlanningInfoUpdate) -> Job:

    existing_job = get_by_code(db_session=db_session, code=job_in.code)

    existing_job.planning_status = job_in.planning_status
    if job_in.scheduled_start_datetime:
        existing_job.scheduled_start_datetime = job_in.scheduled_start_datetime
    if job_in.scheduled_duration_minutes:
        existing_job.scheduled_duration_minutes = job_in.scheduled_duration_minutes
    if job_in.scheduled_primary_worker_code:
        existing_job.scheduled_primary_worker = worker_service.get_by_code(
            db_session=db_session, code=job_in.scheduled_primary_worker_code)

    # if len(job_in.scheduled_secondary_workers) > 0:
    # else:
    #     existing_job.scheduled_secondary_workers = None
    if job_in.scheduled_secondary_worker_codes:
        existing_job.scheduled_secondary_workers = [worker_service.get_by_code(
            db_session=db_session, code=_c) for _c in job_in.scheduled_secondary_worker_codes]

    db_session.add(existing_job)
    db_session.commit()
    new_plan = (
        job_in.planning_status.value,
        job_in.scheduled_primary_worker_code,
        job_in.scheduled_secondary_worker_codes,
        str(job_in.scheduled_start_datetime),
        job_in.scheduled_duration_minutes
    )
    message = f"Job ({job_in.code}) is changed to a different plan: {new_plan}"
    event_service.log(
        db_session=db_session,
        source=job_in.update_source,
        # .scheduled_start_datetime
        description=f"changed planning:  {new_plan}",
        job_id=existing_job.id,
        # details={
        #     "job_code":job_in.code,
        #     "message":message,
        #     "planning_status": job_in.planning_status,
        #     "scheduled_primary_worker_code": job_in.scheduled_primary_worker_code,
        #     "scheduled_start_datetime": str(job_in.scheduled_start_datetime),
        # }
    )
    post_job_to_kafka(job=existing_job, message_type=KafkaMessageType.UPDATE_JOB,
                      db_session=db_session)

    # zulip send message
    zulip_dict = get_zulip_client_by_org_id(existing_job.org_id)
    if zulip_dict:
        zulip_core = zulip_dict['client']
        zulip_core.update_job_send_message(
            existing_job, [existing_job.scheduled_primary_worker] + existing_job.scheduled_secondary_workers)
    return existing_job
示例#3
0
def update(*, db_session, job: Job, job_in: JobUpdate) -> Job:
    tags = []
    for t in job_in.tags:
        tags.append(
            tag_service.get_or_create(db_session=db_session,
                                      tag_in=TagUpdate(**t)))

    scheduled_secondary_workers = []
    for w in job_in.scheduled_secondary_workers:
        scheduled_secondary_workers.append(
            worker_service.get(db_session=db_session,
                               tag_in=WorkerUpdate(**w)))

    update_data = job_in.dict(
        skip_defaults=True,
        exclude={
            "tags",
            "scheduled_secondary_workers",
            "requested_primary_worker",
            "scheduled_primary_worker",
            "team",
            "location",
        },
    )

    for field in update_data.keys():
        setattr(job, field, update_data[field])

    job.scheduled_secondary_workers = scheduled_secondary_workers
    job.tags = tags
    if job_in.scheduled_primary_worker is not None:
        job.scheduled_primary_worker = worker_service.get_by_code(
            db_session=db_session, code=job_in.scheduled_primary_worker.code)
    if job_in.requested_primary_worker is not None:
        job.requested_primary_worker = worker_service.get_by_code(
            db_session=db_session, code=job_in.requested_primary_worker.code)

    db_session.add(job)
    db_session.commit()
    event_service.log(
        db_session=db_session,
        source="Dispatch Core App",
        description=f"Job ({job_in.code}) is updated {job_in.flex_form_data}",
        job_id=job.id,
    )
    post_job_to_kafka(job=job,
                      message_type=KafkaMessageType.UPDATE_JOB,
                      db_session=db_session)
    return job
示例#4
0
 def get(self, email, db_session=None):
     return getattr(
         worker_service.get_by_code(db_session=db_session, code=email),
         "__dict__",
         {
             "email": email,
             "fullname": email
         },
     )
示例#5
0
def calc_historical_location_features_real_func(db_session=None):

    job_loc_df = pd.read_sql(
        db_session.query(
            Job.scheduled_primary_worker_id,
            Worker.code.label("scheduled_primary_worker_code"),
            Job.id.label("job_id"),
            Job.scheduled_start_datetime,
            Job.scheduled_duration_minutes,
            Job.requested_start_datetime,
            Job.flex_form_data,
            Job.location_id,
            Location.geo_longitude,
            Location.geo_latitude,
        )
        .filter(Job.location_id == Location.id)
        .filter(Job.scheduled_primary_worker_id == Worker.id)
        .filter(Job.planning_status.in_((JobPlanningStatus.FINISHED,)))
        .statement,
        db_session.bind,
    )
    if job_loc_df.count().max() < 1:
        # raise ValueError("job_loc_df.count().max() < 1, no data to proceed")
        log.error(
            "calc_historical_location_features_real_func: job_loc_df.count().max() < 1, no data to proceed"
        )
        return

    def get_actual_primary_worker_code(x):
        if "actual_primary_worker_code" in x["flex_form_data"]:
            return x["flex_form_data"]["actual_start_datetime"]
        else:
            return x["scheduled_primary_worker_code"]

    job_loc_df["actual_primary_worker_code"] = job_loc_df.apply(
        lambda x: get_actual_primary_worker_code(x),
        axis=1,
    )
    # actual_start_datetime
    # job_loc_df["days_delay"] = job_loc_df.apply(
    #     lambda x: (x["scheduled_start_datetime"] - x["requested_start_datetime"]).days, axis=1
    # )

    worker_job_gmm_df = (
        job_loc_df.groupby(["actual_primary_worker_code"])
        .agg(
            # job_count=pd.NamedAgg(column='location_code', aggfunc='count')
            avg_geo_longitude=pd.NamedAgg(column="geo_longitude", aggfunc="mean"),
            avg_geo_latitude=pd.NamedAgg(column="geo_latitude", aggfunc="mean"),
            std_geo_longitude=pd.NamedAgg(column="geo_longitude", aggfunc="std"),
            std_geo_latitude=pd.NamedAgg(column="geo_latitude", aggfunc="std"),
            list_geo_longitude=pd.NamedAgg(column="geo_longitude", aggfunc=list),
            list_geo_latitude=pd.NamedAgg(column="geo_latitude", aggfunc=list),
            # cov_geo =pd.NamedAgg(column=("geo_longitude","geo_latitude"), aggfunc=np.cov),
            job_count=pd.NamedAgg(column="scheduled_primary_worker_id", aggfunc="count"),
        )
        .reset_index()
    )  # .sort_values(['location_code'], ascending=True)

    def get_cov(x):
        arr = np.array([x["list_geo_longitude"], x["list_geo_latitude"], ])
        arr_cov = np.cov(arr)
        return arr_cov.tolist()
    worker_job_gmm_df["cov_geo"] = worker_job_gmm_df.apply(
        lambda x: get_cov(x),
        axis=1,
    )
    worker_job_gmm_df = worker_job_gmm_df[worker_job_gmm_df["job_count"] > 2]

    worker_job_gmm_df["job_history_feature_data"] = worker_job_gmm_df.apply(
        lambda x: {
            "mean": {"longitude": x["avg_geo_longitude"], "latitude": x["avg_geo_latitude"]},
            "std": {"longitude": x["std_geo_longitude"], "latitude": x["std_geo_latitude"]},
            "cov": x["cov_geo"],
            "job_count": x["job_count"],
        },
        axis=1,
    )

    for _, w in worker_job_gmm_df.iterrows():
        worker = worker_service.get_by_code(
            db_session=db_session,
            code=w["actual_primary_worker_code"])

        worker.job_history_feature_data = w["job_history_feature_data"]
        db_session.add(worker)
        db_session.commit()

        # a=worker.__dict__
        # # a["team"]=worker.team.__dict__
        # # a["team"]=TeamUpdate(**worker.team.__dict__)
        # # a["location"]=LocationUpdate(**worker.location.__dict__)
        # # a["job_history_feature_data"] = w["job_history_feature_data"]
        # # worker_in = WorkerUpdate(**a)
        # worker_in = worker
        # worker_in.job_history_feature_data = w["job_history_feature_data"]

        # new_worker = worker_service.update(
        #     db_session=db_session,
        #     worker=worker,
        #     worker_in=worker_in)
        # print(worker)
    print(f"{worker_job_gmm_df.count().max()} workers are updated.")
    return

    loc_gmm_df = (
        job_loc_df.groupby(["location_id"])
        .agg(
            # job_count=pd.NamedAgg(column='location_code', aggfunc='count')
            avg_geo_longitude=pd.NamedAgg(column="geo_longitude", aggfunc="mean"),
            avg_geo_latitude=pd.NamedAgg(column="geo_latitude", aggfunc="mean"),
            std_geo_longitude=pd.NamedAgg(column="geo_longitude", aggfunc="std"),
            std_geo_latitude=pd.NamedAgg(column="geo_latitude", aggfunc="std"),
            cov_geo=pd.NamedAgg(column=["geo_longitude", "geo_latitude"], aggfunc=np.cov),
            job_count=pd.NamedAgg(column="scheduled_primary_worker_id", aggfunc="count"),
            list_scheduled_worker_code=pd.NamedAgg(
                column="scheduled_primary_worker_id", aggfunc=list
            ),
            avg_actual_start_minutes=pd.NamedAgg(column="actual_start_minutes", aggfunc="mean"),
            avg_actual_duration_minutes=pd.NamedAgg(
                column="scheduled_duration_minutes", aggfunc="mean"
            ),
            avg_days_delay=pd.NamedAgg(column="days_delay", aggfunc="mean"),
            stddev_days_delay=pd.NamedAgg(column="days_delay", aggfunc="std"),
        )
        .reset_index()
    )  # .sort_values(['location_code'], ascending=True)

    loc_gmm_df["job_historical_worker_service_dict"] = loc_gmm_df.apply(
        lambda x: Counter(x["list_scheduled_worker_code"]),
        axis=1,
    )
    # Then I should get "job_historical_worker_service_dict": {"19": 3, "18":1}

    loc_gmm_df["job_history_feature_data"] = loc_gmm_df.apply(
        lambda x: {
            "mean": {"longitude": x["avg_geo_longitude"], "latitude": x["avg_geo_latitude"]},
            "std": {"longitude": x["std_geo_longitude"], "latitude": x["std_geo_latitude"]},
            "job_count": x["job_count"],  # TODO same as out side, it should be here.!
            "job_historical_worker_service_dict": x["job_historical_worker_service_dict"],
        },
        axis=1,
    )

    loc_feature_df = loc_gmm_df[
        [
            "location_id",
            "job_history_feature_data",
            "job_count",
            "avg_actual_start_minutes",
            "avg_actual_duration_minutes",
            "avg_days_delay",
            "stddev_days_delay",
        ]
    ]
    loc_feature_df.rename(columns={"location_id": "id"}, inplace=True)

    loc_update_dict_list = json.loads(loc_feature_df.to_json(orient="records"))

    db_session.bulk_update_mappings(
        Location,
        loc_update_dict_list,
    )
    # db_session.flush()
    db_session.commit()

    log.debug(
        f"calc_historical_location_features - Finished Location features, now started worker.served_location_gmm. "
    )

    job_loc_df = job_loc_df[
        [
            "scheduled_primary_worker_id",
            "location_id",
            "geo_longitude",
            "geo_latitude",
        ]
    ]

    worker_loc_df = pd.read_sql(
        db_session.query(
            Worker.id.label("scheduled_primary_worker_id"),
            Worker.location_id,
            Location.geo_longitude,
            Location.geo_latitude,
        )
        .filter(Worker.location_id == Location.id)
        .statement,
        db_session.bind,
    )
    # worker_loc_df.rename(columns={"id": "scheduled_primary_worker_id"}, inplace=True)

    """
    job_loc_df = pd.read_sql(
        db_session.query(Job).filter(Job.planning_status != JobPlanningStatus.unplanned).statement,
        db_session.bind,
    )
    # TO attach home location for each worker.

    worker_loc_df = pd.read_sql(
        db_session.query(Worker)
        .filter(Job.planning_status != JobPlanningStatus.unplanned)
        .statement,
        db_session.bind,
    )
    worker_loc_df.rename(columns={"id": "scheduled_primary_worker_id"}, inplace=True)

    job_loc_with_worker_home = pd.concat(
        [
            visit[["actual_worker_code", "location_code", "geo_longitude", "geo_latitude"]],
            worker_df,
        ]
    ).copy()

    """
    # job_loc_with_worker_home = job_loc_df

    job_loc_with_worker_home = pd.concat(
        [
            job_loc_df,
            worker_loc_df,
        ]
    ).copy()
    log.debug(f"calc_historical_location_features - worker loaded from db ...")

    #

    worker_gmm_df = (
        job_loc_with_worker_home.groupby(["scheduled_primary_worker_id"])
        .agg(
            # job_count=pd.NamedAgg(column='location_code', aggfunc='count')
            avg_geo_longitude=pd.NamedAgg(column="geo_longitude", aggfunc="mean"),
            avg_geo_latitude=pd.NamedAgg(column="geo_latitude", aggfunc="mean"),
            std_geo_longitude=pd.NamedAgg(column="geo_longitude", aggfunc="std"),
            std_geo_latitude=pd.NamedAgg(column="geo_latitude", aggfunc="std"),
            job_count=pd.NamedAgg(column="scheduled_primary_worker_id", aggfunc="count"),
        )
        .reset_index()
    )  # .sort_values(['location_code'], ascending=True)

    # json.dumps(
    worker_gmm_df["job_history_feature_data"] = worker_gmm_df.apply(
        lambda x: {
            "mean": {"longitude": x["avg_geo_longitude"], "latitude": x["avg_geo_latitude"]},
            "std": {"longitude": x["std_geo_longitude"], "latitude": x["std_geo_latitude"]},
            "count": x["job_count"],
        },
        axis=1,
    )

    worker_gmm_df.rename(columns={"scheduled_primary_worker_id": "id"}, inplace=True)
    update_dict_list = json.loads(
        worker_gmm_df[["id", "job_history_feature_data"]].to_json(orient="records")
    )

    # worker_gmm_df.rename(columns={"job_history_feature_data": "notes"}, inplace=True)

    db_session.bulk_update_mappings(
        Worker,
        update_dict_list,
    )
    # db_session.flush()
    db_session.commit()

    log.debug(f"calc_historical_location_features finished ...")
    def save_solution(self, manager, routing, solution):
        """Prints solution on console."""
        max_route_distance = 0
        for vehicle_id in range(len(self.worker_slots)):
            index = routing.Start(vehicle_id)
            # First one should be depot
            previous_index = index
            index = solution.Value(routing.NextVar(index))
            plan_output = "Route for vehicle {}:\n".format(vehicle_id)
            route_distance = 0
            # worker/vehicle starts at 0
            scheduled_worker_codes = [self.worker_slots[vehicle_id].worker_id]

            next_start_minutes = self.worker_slots[vehicle_id].start_minutes
            prev_location = self.worker_slots[vehicle_id].end_location

            while not routing.IsEnd(index):
                plan_output += " {} -> ".format(manager.IndexToNode(index))
                # job starts at 0
                job_code = self.cluster_list[index][0]  # - 1
                job = self.env.jobs_dict[job_code]
                one_job_action_dict = ActionDict(
                    job_code=job.job_code,
                    scheduled_worker_codes=scheduled_worker_codes,
                    scheduled_start_minutes=next_start_minutes,
                    scheduled_duration_minutes=job.requested_duration_minutes,
                    action_type=ActionType.FLOATING,
                    is_forced_action=False,
                )
                internal_result_info = self.env.mutate_update_job_by_action_dict(
                    a_dict=one_job_action_dict, post_changes_flag=True)
                if internal_result_info.status_code != ActionScoringResultType.OK:
                    print(
                        f"{one_job_action_dict.job_code}: Failed to commit change, error: {str(internal_result_info)} "
                    )
                else:
                    print(
                        f"job({one_job_action_dict.job_code}) is planned successfully ..."
                    )

                db_job = job_service.get_by_code(
                    db_session=self.env.kp_data_adapter.db_session,
                    code=job.job_code)
                db_worker = worker_service.get_by_code(
                    db_session=self.env.kp_data_adapter.db_session,
                    code=scheduled_worker_codes[0])

                db_job.requested_primary_worker = db_worker

                self.env.kp_data_adapter.db_session.add(db_job)
                self.env.kp_data_adapter.db_session.commit()

                print(
                    f"job({job.job_code}) is updated to new requested_primary_worker = {scheduled_worker_codes[0]} "
                )

                prev_location = job.location
                previous_index = index
                index = solution.Value(routing.NextVar(index))

                # not used.
                travel_time = self._get_travel_time_2locations(
                    prev_location, job.location)

                route_distance = routing.GetArcCostForVehicle(
                    previous_index, index, vehicle_id)
                next_start_minutes += job.requested_duration_minutes + route_distance
示例#7
0
def update(*, db_session, job: Job, job_in: JobUpdate, org_code: str) -> Job:
    tags = []
    for t in job_in.tags:
        tags.append(tag_service.get_or_create(db_session=db_session, tag_in=TagUpdate(**t)))

    scheduled_secondary_workers = []
    if job_in.scheduled_secondary_workers:
        for w in job_in.scheduled_secondary_workers:
            scheduled_secondary_workers.append(
                worker_service.get_by_code(db_session=db_session, code=w.code))
    if job_in.team and job_in.team.code != job.team.code:
        team_obj = team_service.get_by_code(db_session=db_session, code=job_in.team.code)
        job.team = team_obj
    if job_in.location and job_in.location.location_code and job_in.location.location_code != job.location.location_code:
        location_obj = location_service.get_or_create_by_code(
            db_session=db_session, location_in=job_in.location)
        job.location = location_obj
    update_data = job_in.dict(
        skip_defaults=True,
        exclude={
            "tags",
            "scheduled_secondary_workers",
            "requested_primary_worker",
            "scheduled_primary_worker",
            "team",
            "location",
        },
    )

    for field in update_data.keys():
        setattr(job, field, update_data[field])

    job.scheduled_secondary_workers = scheduled_secondary_workers
    job.tags = tags
    if job_in.scheduled_primary_worker is not None:
        job.scheduled_primary_worker = worker_service.get_by_code(
            db_session=db_session, code=job_in.scheduled_primary_worker.code)
    if job_in.requested_primary_worker is not None:
        job.requested_primary_worker = worker_service.get_by_code(
            db_session=db_session, code=job_in.requested_primary_worker.code)

    db_session.add(job)
    db_session.commit()
    event_service.log(
        db_session=db_session,
        source="Dispatch Core App",
        description=f"Job ({job_in.code}) is updated {job_in.flex_form_data}",
        job_id=job.id,
    )
    print(f"\033[37;46m\t1: job update succeed {job.code}\033[0m")
    post_job_to_kafka(job=job, message_type=KafkaMessageType.UPDATE_JOB,
                      db_session=db_session, org_code=org_code)

    print(f"\033[37;46m\t2: job psot kafka in succeed {job.code}\033[0m")
    # zulip send message
    if job.planning_status != JobPlanningStatus.UNPLANNED:
        zulip_dict = get_zulip_client_by_org_id(job.org_id)
        if zulip_dict:
            zulip_core = zulip_dict['client']
            zulip_core.update_job_send_message(
                job, [job.scheduled_primary_worker] + job.scheduled_secondary_workers)

    return job
示例#8
0
def create(
    *,
    db_session,
    # job_priority: str = None,
    # job_type: str,
    code: str,
    job_type: str = "visit",
    org_id: str = None,
    org_code: str = None,
    name: str = None,
    planning_status: str,
    tags: List[dict] = [],
    description: str = None,
    team: TeamCreate,
    location: LocationCreate,
    flex_form_data: dict = None,
    requested_primary_worker: WorkerCreate = None,
    requested_start_datetime: datetime = None,
    requested_duration_minutes: float = None,
    scheduled_primary_worker: WorkerCreate = None,
    scheduled_secondary_workers: List[WorkerCreate] = [],
    scheduled_start_datetime: datetime = None,
    scheduled_duration_minutes: float = None,
    auto_planning: bool = True,
    requested_skills: List[str] = [],
    requested_items: List[str] = [],
    life_cycle_status: JobLifeCycleUpdate = None
) -> Job:
    """Creates a new job."""

    tag_objs = []
    for t in tags:
        tag_objs.append(tag_service.get_or_create(db_session=db_session, tag_in=TagCreate(**t)))

    team_obj = team_service.get_by_code(db_session=db_session, code=team["code"])

    location_obj = location_service.get_by_location_code(
        db_session=db_session,
        location_code=location["location_code"])
    if location_obj is None:
        loc_to_create = LocationCreate(**location)
        loc_to_create.org_id = org_id
        if loc_to_create.geo_address_text and not loc_to_create.geo_latitude:
            try:
                nid = SHORTUUID.random(length=9)
                location_config = {
                    "url": config.LOCATION_SERVICE_URL,
                    "token": config.LOCATION_SERVICE_TOKEN,
                    "request_method": config.LOCATION_SERVICE_REQUEST_METHOD,
                }
                payload = {"address_id": nid,
                           "input_address": loc_to_create.geo_address_text
                           }
                # get location service
                location_plug = service_plugin_service.get_by_service_id_and_type(
                    db_session=db_session,
                    service_id=team_obj.service_id,
                    service_plugin_type=KandboxPlannerPluginType.kandbox_location_service,
                ).all()
                if location_plug:
                    location_plugin = plugins.get(location_plug[0].plugin.slug)
                    location_adapter_service = location_plugin(config=location_config)
                    status, _location_ret, msg = location_adapter_service.get_pldt_location(payload)

                    if status:
                        if isinstance(_location_ret['latitude'], float):
                            if _check_location(_location_ret, team_obj.flex_form_data):
                                loc_to_create.geo_latitude = _location_ret['latitude']
                                loc_to_create.geo_longitude = _location_ret['longitude']
                                loc_to_create.id = _location_ret['location_id']
                                loc_to_create.location_code = _location_ret['location_code']
                            else:
                                logService.create(db_session=db_session, log_in=LogCreate(
                                    title='Location Response Data OutSide', category='Location', content=f"location outside,job code:{code},input_address:{payload['input_address']}, msg:{str(_location_ret)}", org_id=int(org_id), team_id=team_obj.id))
                                log.error(
                                    f"Location Response Data OutSide ,{msg} :{payload['input_address']}")
                        else:
                            logService.create(db_session=db_session, log_in=LogCreate(
                                title='Location Response Data NUll', category='Location', content=f"job code:{code},input_address:{payload['input_address']},msg:{str(_location_ret)}", org_id=int(org_id), team_id=team_obj.id))
                            log.error(
                                f"Location Response Data NUll ,{msg} :{payload['input_address']}")
                    else:
                        logService.create(db_session=db_session, log_in=LogCreate(
                            title=msg['type'], category='Location', content=f"job code:{code},input_address:{payload['input_address']},msg:{str(msg['msg'])}", org_id=int(org_id), team_id=team_obj.id))
                        log.error(
                            f"Location Response failed ,{msg} :{payload['input_address']}")
                else:
                    log.error(
                        f"not find location plug,service:{team_obj.service_id},{KandboxPlannerPluginType.kandbox_location_service}")
            except Exception as e:
                print(traceback.format_exc())
                log.error(f"address request error:{loc_to_create.geo_address_text},{ e} ")

        location_obj = location_service.get_or_create_by_code(
            db_session=db_session, location_in=loc_to_create)
        # if location_obj.geo_longitude < 1:
        #     location_obj.geo_longitude = loc_to_create.geo_longitude
        #     location_obj.geo_latitude = loc_to_create.geo_latitude
        db_session.add(location_obj)
    # location_obj = location_service.update(
    #     db_session=db_session, location=location_obj, location_in=LocationUpdate(**location)
    # )

    if requested_primary_worker:
        requested_primary_worker = worker_service.get_by_code(
            db_session=db_session, code=requested_primary_worker["code"])
    if scheduled_primary_worker:
        scheduled_primary_worker = worker_service.get_by_code(
            db_session=db_session, code=scheduled_primary_worker["code"])
    scheduled_secondary_workers_list = []
    if scheduled_secondary_workers is not None:
        for w in scheduled_secondary_workers:
            scheduled_secondary_workers_list.append(
                worker_service.get_by_code(db_session=db_session, code=w['code']))
    # We create the job
    if requested_skills:
        flex_form_data['requested_skills'] = requested_skills
    if requested_items:
        flex_form_data['requested_items'] = requested_items
    job = Job(
        code=code,
        name=name,
        org_id=org_id,
        job_type=job_type,
        description=description,
        planning_status=planning_status,
        tags=tag_objs,
        flex_form_data=flex_form_data,
        location=location_obj,
        team=team_obj,
        requested_start_datetime=requested_start_datetime,
        requested_duration_minutes=requested_duration_minutes,
        requested_primary_worker=requested_primary_worker,
        scheduled_start_datetime=scheduled_start_datetime,
        scheduled_duration_minutes=scheduled_duration_minutes,
        scheduled_primary_worker=scheduled_primary_worker,
        scheduled_secondary_workers=scheduled_secondary_workers_list,
        auto_planning=auto_planning,
        requested_skills=requested_skills,
        requested_items=requested_items,
    )
    db_session.add(job)

    if job.job_type == JobType.REPLENISH:
        depot_code = flex_form_data["depot_code"]
        depot = depot_service.get_by_code(db_session=db_session, code=depot_code)

        for item_str in flex_form_data["requested_items"]:
            item_list = parse_item_str(item_str)
            item = item_service.get_by_code(db_session=db_session, code=item_list[0])
            inv = inventory_service.get_by_item_depot(
                db_session=db_session,
                item_id=item.id,
                depot_id=depot.id,
                org_id=team_obj.org_id
            ).one_or_none()
            inv.curr_qty -= item_list[1]
            inv.allocated_qty += item_list[1]
            if inv.curr_qty < 0:
                log.error(
                    f" Not enough inventory for item: {item_list[0]}, depot: {depot_code}, org.id: {team_obj.org_id}")
                continue
            db_session.add(inv)
            inventory_event_service.log(
                db_session=db_session,
                source="Env_Replenish",
                description=f"Allocated {item_list[1]} {item_list[0]} from depot: {depot_code}",
                item_code=item_list[0],
                depot_code=depot_code,
                item_id=item.id,
                depot_id=depot.id
            )

    db_session.commit()

    print(f"\033[37;46m\t1:add job succeed,{code}\033[0m")
    log.info(f"1:add job succeed,{code}")
    event_service.log(
        db_session=db_session,
        source="Dispatch Core App",
        description=f"Job ({code}) is created, planning_status={planning_status}, requested_start_datetime={requested_start_datetime}",
        job_id=job.id,
    )

    post_job_to_kafka(job=job, message_type=KafkaMessageType.CREATE_JOB,
                      db_session=db_session, org_code=org_code)

    print(f"\033[37;46m\t2:job post kafka in succeed,{code}\033[0m")
    log.info(f"2:job post kafka in succeed,{code}")

    # zulip send message
    if job.planning_status != JobPlanningStatus.UNPLANNED:
        zulip_dict = get_zulip_client_by_org_id(job.org_id)
        if zulip_dict:
            zulip_core = zulip_dict['client']
            zulip_core.update_job_send_message(
                job, [job.scheduled_primary_worker] + job.scheduled_secondary_workers)

    return job
示例#9
0
def job_update_flow(user_email: str,
                    job_id: int,
                    previous_job: JobRead,
                    notify=True,
                    db_session=None):
    """Runs the job update flow."""
    # we load the job instance
    job = job_service.get(db_session=db_session, job_id=job_id)

    # we load the worker
    worker = worker_service.get_by_code(db_session=db_session, code=user_email)

    conversation_topic_change = False
    if previous_job.job_code != job.job_code:
        event_service.log(
            db_session=db_session,
            source="Job Participant",
            description=
            f'{worker.name} changed the job title to "{job.job_code}"',
            job_id=job.id,
            worker_id=worker.id,
        )

    if previous_job.description != job.description:
        event_service.log(
            db_session=db_session,
            source="Job Participant",
            description=f"{worker.name} changed the job description",
            details={"description": job.description},
            job_id=job.id,
            worker_id=worker.id,
        )

    if previous_job.job_type.name != job.job_type.name:
        conversation_topic_change = True

        event_service.log(
            db_session=db_session,
            source="Job Participant",
            description=
            f"{worker.name} changed the job type to {job.job_type.name}",
            job_id=job.id,
            worker_id=worker.id,
        )

    if previous_job.job_priority.name != job.job_priority.name:
        conversation_topic_change = True

        event_service.log(
            db_session=db_session,
            source="Job Participant",
            description=
            f"{worker.name} changed the job priority to {job.job_priority.name}",
            job_id=job.id,
            worker_id=worker.id,
        )

    if previous_job.planning_status.value != job.planning_status:
        conversation_topic_change = True

        event_service.log(
            db_session=db_session,
            source="Job Participant",
            description=
            f"{worker.name} marked the job as {job.planning_status}",
            job_id=job.id,
            worker_id=worker.id,
        )
示例#10
0
def create(
    *,
    db_session,
    # job_priority: str = None,
    # job_type: str,
    code: str,
    name: str = None,
    planning_status: str,
    tags: List[dict],
    description: str = None,
    team: TeamCreate,
    location: LocationCreate,
    flex_form_data: dict = None,
    requested_primary_worker: WorkerCreate = None,
    requested_start_datetime: datetime = None,
    requested_duration_minutes: float = None,
    scheduled_primary_worker: WorkerCreate = None,
    scheduled_secondary_workers: List[WorkerCreate],
    scheduled_start_datetime: datetime = None,
    scheduled_duration_minutes: float = None,
    auto_planning: bool = True,
) -> Job:
    """Creates a new job."""

    tag_objs = []
    for t in tags:
        tag_objs.append(
            tag_service.get_or_create(db_session=db_session,
                                      tag_in=TagCreate(**t)))

    team_obj = team_service.get_by_code(db_session=db_session,
                                        code=team["code"])
    loc_to_create = LocationCreate(**location)
    location_obj = location_service.get_or_create_by_code(
        db_session=db_session, location_in=loc_to_create)
    # if location_obj.geo_longitude < 1:
    #     location_obj.geo_longitude = loc_to_create.geo_longitude
    #     location_obj.geo_latitude = loc_to_create.geo_latitude
    db_session.add(location_obj)
    # location_obj = location_service.update(
    #     db_session=db_session, location=location_obj, location_in=LocationUpdate(**location)
    # )

    if requested_primary_worker:
        requested_primary_worker = worker_service.get_by_code(
            db_session=db_session, code=requested_primary_worker["code"])
    if scheduled_primary_worker:
        scheduled_primary_worker = worker_service.get_by_code(
            db_session=db_session, code=scheduled_primary_worker["code"])
    if scheduled_secondary_workers is None:
        scheduled_secondary_workers = []
    # We create the job
    job = Job(
        code=code,
        name=name,
        description=description,
        planning_status=planning_status,
        tags=tag_objs,
        flex_form_data=flex_form_data,
        location=location_obj,
        team=team_obj,
        requested_start_datetime=requested_start_datetime,
        requested_duration_minutes=requested_duration_minutes,
        requested_primary_worker=requested_primary_worker,
        scheduled_start_datetime=scheduled_start_datetime,
        scheduled_duration_minutes=scheduled_duration_minutes,
        scheduled_primary_worker=scheduled_primary_worker,
        scheduled_secondary_workers=scheduled_secondary_workers,
        auto_planning=auto_planning,
    )
    db_session.add(job)
    db_session.commit()

    event_service.log(
        db_session=db_session,
        source="Dispatch Core App",
        description=
        f"Job ({code}) is created, planning_status={planning_status}, scheduled_start_datetime={scheduled_start_datetime}",
        job_id=job.id,
    )

    post_job_to_kafka(job=job,
                      message_type=KafkaMessageType.CREATE_JOB,
                      db_session=db_session)

    return job