Пример #1
0
def step1():

    example1 = circus.Circus(name="example1",
                             master_seed=12345,
                             start=pd.Timestamp("1 Jan 2017 00:00"),
                             step_duration=pd.Timedelta("1h"))

    person = example1.create_population(
        name="person",
        size=1000,
        ids_gen=SequencialGenerator(prefix="PERSON_"))

    hello_world = example1.create_story(
        name="hello_world",
        initiating_population=person,
        member_id_field="PERSON_ID",

        # after each story, reset the timer to 0, so that it will get
        # executed again at the next clock tick (next hour)
        timer_gen=ConstantDependentGenerator(value=0))

    hello_world.set_operations(
        ConstantGenerator(value="hello world").ops.generate(named_as="HELLO"),
        FieldLogger(log_id="hello"))

    example1.run(duration=pd.Timedelta("48h"),
                 log_output_folder="output/example1",
                 delete_existing_logs=True)

    with open("output/example1/hello.csv") as f:
        print("Logged {} lines".format(len(f.readlines()) - 1))
Пример #2
0
def step4():
    """
    Woah, this got drastically slower
    """

    example1 = circus.Circus(name="example1",
                             master_seed=12345,
                             start=pd.Timestamp("1 Jan 2017 00:00"),
                             step_duration=pd.Timedelta("1h"))

    person = example1.create_population(
        name="person",
        size=1000,
        ids_gen=SequencialGenerator(prefix="PERSON_"))

    person.create_attribute("NAME",
                            init_gen=FakerGenerator(method="name",
                                                    seed=next(
                                                        example1.seeder)))

    sites = SequencialGenerator(prefix="SITE_").generate(1000)
    random_site_gen = NumpyRandomGenerator(method="choice",
                                           a=sites,
                                           seed=next(example1.seeder))

    allowed_sites = person.create_relationship(name="sites")
    for i in range(5):
        allowed_sites \
            .add_relations(from_ids=person.ids,
                           to_ids=random_site_gen.generate(person.size))

    hello_world = example1.create_story(
        name="hello_world",
        initiating_population=person,
        member_id_field="PERSON_ID",

        # after each story, reset the timer to 0, so that it will get
        # executed again at the next clock tick (next hour)
        timer_gen=ConstantDependentGenerator(value=0))

    duration_gen = NumpyRandomGenerator(method="exponential",
                                        scale=60,
                                        seed=next(example1.seeder))

    hello_world.set_operations(
        person.ops.lookup(id_field="PERSON_ID", select={"NAME": "NAME"}),
        ConstantGenerator(value="hello world").ops.generate(named_as="HELLO"),
        duration_gen.ops.generate(named_as="DURATION"),
        allowed_sites.ops.select_one(from_field="PERSON_ID", named_as="SITE"),
        example1.clock.ops.timestamp(named_as="TIME"),
        FieldLogger(log_id="hello"))

    example1.run(duration=pd.Timedelta("48h"),
                 log_output_folder="output/example1",
                 delete_existing_logs=True)

    with open("output/example1/hello.csv") as f:
        print("Logged {} lines".format(len(f.readlines()) - 1))
Пример #3
0
def add_mobility_action(circus, params):

    logging.info(" creating customer mobility action")
    mov_prof = [
        1., 1., 1., 1., 1., 1., 1., 1., 5., 10., 5., 1., 1., 1., 1., 1., 1.,
        5., 10., 5., 1., 1., 1., 1.
    ]
    mobility_time_gen = CyclicTimerGenerator(
        clock=circus.clock,
        seed=next(circus.seeder),
        config=CyclicTimerProfile(
            profile=mov_prof,
            profile_time_steps="1H",
            start_date=pd.Timestamp("12 September 2016 00:00.00"),
        ))

    gaussian_activity = NumpyRandomGenerator(
        method="normal",
        loc=params["mean_daily_customer_mobility_activity"],
        scale=params["std_daily_customer_mobility_activity"],
        seed=next(circus.seeder))

    mobility_activity_gen = gaussian_activity.map(f=bound_value(lb=.5))

    mobility_action = circus.create_story(
        name="customer_mobility",
        initiating_actor=circus.actors["customers"],
        actorid_field="CUST_ID",
        timer_gen=mobility_time_gen,
        activity_gen=mobility_activity_gen)

    logging.info(" adding operations")

    mobility_action.set_operations(
        circus.actors["customers"].ops.lookup(
            id_field="CUST_ID",
            select={"CURRENT_SITE": "PREV_SITE"}),

        # selects a destination site (or maybe the same as current... ^^)

        circus.actors["customers"] \
            .get_relationship("POSSIBLE_SITES") \
            .ops.select_one(from_field="CUST_ID", named_as="NEW_SITE"),

        # update the SITE attribute of the customers accordingly
        circus.actors["customers"] \
            .get_attribute("CURRENT_SITE") \
            .ops.update(
                id_field="CUST_ID",
                copy_from_field="NEW_SITE"),

        circus.clock.ops.timestamp(named_as="TIME"),

        # create mobility logs
        FieldLogger(log_id="customer_mobility_logs",
                    cols=["TIME", "CUST_ID", "PREV_SITE",
                          "NEW_SITE"]),
    )
Пример #4
0
def add_mobility_action(circus, params):

    logging.info(" creating field agent mobility action")

    # Field agents move only during the work hours
    mobility_time_gen = WorkHoursTimerGenerator(clock=circus.clock,
                                                seed=next(circus.seeder))

    fa_mean_weekly_activity = mobility_time_gen.activity(
        n=params["mean_daily_fa_mobility_activity"], per=pd.Timedelta("1day"))

    fa_weekly_std = mobility_time_gen.activity(
        n=params["std_daily_fa_mobility_activity"], per=pd.Timedelta("1day"))

    gaussian_activity = NumpyRandomGenerator(method="normal",
                                             loc=fa_mean_weekly_activity,
                                             scale=fa_weekly_std,
                                             seed=next(circus.seeder))

    mobility_activity_gen = gaussian_activity.map(f=bound_value(lb=1))

    field_agents = circus.actors["field_agents"]

    mobility_action = circus.create_story(name="field_agent_mobility",
                                          initiating_actor=field_agents,
                                          actorid_field="FA_ID",
                                          timer_gen=mobility_time_gen,
                                          activity_gen=mobility_activity_gen)

    logging.info(" adding operations")

    mobility_action.set_operations(
        field_agents.ops.lookup(
            id_field="FA_ID",
            select={"CURRENT_SITE": "PREV_SITE"}),

        # selects a destination site (or maybe the same as current... ^^)

        field_agents \
            .get_relationship("POSSIBLE_SITES") \
            .ops.select_one(from_field="FA_ID", named_as="NEW_SITE"),

        # update the SITE attribute of the field agents accordingly
        field_agents \
            .get_attribute("CURRENT_SITE") \
            .ops.update(
                id_field="FA_ID",
                copy_from_field="NEW_SITE"),

        circus.clock.ops.timestamp(named_as="TIME"),

        # create mobility logs
        FieldLogger(log_id="field_agent_mobility_logs",
                    cols=["TIME", "FA_ID", "PREV_SITE",
                          "NEW_SITE"]),
    )
Пример #5
0
def add_agent_stock_log_action(circus, params):
    """
    Adds an action per agent, recording the daily stock level and value of pos,
    dist_l1 and dist_l2.

    All those actions contribute to the same log_id
    """

    for agent_name in ["pos", "dist_l1", "dist_l2"]:

        agent = circus.actors[agent_name]

        for product in params["products"].keys():

            product_actor = circus.actors[product]
            stock_ratio = 1 / product_actor.size
            mean_price = np.mean(params["products"][product]["item_prices"])

            stock_levels_logs = circus.create_story(
                name="{}_{}_stock_log".format(agent_name, product),
                initiating_actor=agent,
                actorid_field="agent_id",
                timer_gen=ConstantDependentGenerator(
                    value=circus.clock.n_iterations(
                        duration=pd.Timedelta("24h")) - 1))

            stock_levels_logs.append_operations(
                circus.clock.ops.timestamp(named_as="TIME"),

                # We're supposed to report the stock level of every product id
                # => what we actually do is counting the full stock accross
                # all products and report randomly on one product id, scaling
                # down the stock volume.
                # It's ok if not all product id get reported every day
                product_actor.ops.select_one(named_as="product_id"),
                agent.get_relationship(product).ops.get_neighbourhood_size(
                    from_field="agent_id", named_as="full_stock_volume"),
                Apply(source_fields="full_stock_volume",
                      named_as="stock_volume",
                      f=lambda v: (v * stock_ratio).astype(np.int),
                      f_args="series"),

                # estimate stock value based on stock volume
                Apply(source_fields="stock_volume",
                      named_as="stock_value",
                      f=lambda v: v * mean_price,
                      f_args="series"),

                # The log_id (=> the resulting file name) is the same for all
                # actions => we just merge the stock level of all populations as
                # we go. I dare to find that pretty neat ^^
                FieldLogger(log_id="agent_stock_log",
                            cols=[
                                "TIME", "product_id", "agent_id",
                                "stock_volume", "stock_value"
                            ]))
Пример #6
0
def add_survey_action(circus):

    logging.info(" creating field agent survey action")

    field_agents = circus.actors["field_agents"]

    # Surveys only happen during work hours
    survey_timer_gen = WorkHoursTimerGenerator(clock=circus.clock,
                                               seed=next(circus.seeder))

    min_activity = survey_timer_gen.activity(
        n=10,
        per=pd.Timedelta("7 days"),
    )
    max_activity = survey_timer_gen.activity(
        n=100,
        per=pd.Timedelta("7 days"),
    )

    survey_activity_gen = NumpyRandomGenerator(method="choice",
                                               a=np.arange(
                                                   min_activity, max_activity),
                                               seed=next(circus.seeder))

    survey_action = circus.create_story(name="pos_surveys",
                                        initiating_actor=field_agents,
                                        actorid_field="FA_ID",
                                        timer_gen=survey_timer_gen,
                                        activity_gen=survey_activity_gen)

    survey_action.set_operations(
        field_agents.ops.lookup(id_field="FA_ID",
                                select={"CURRENT_SITE": "SITE"}),

        # TODO: We should select a POS irrespectively of the relationship weight
        circus.actors["sites"].get_relationship("POS").ops.select_one(
            from_field="SITE",
            named_as="POS_ID",

            # a field agent in a location without a POS won't serve any
            discard_empty=True),
        circus.actors["pos"].ops.lookup(id_field="POS_ID",
                                        select={
                                            "LATITUDE": "POS_LATITUDE",
                                            "LONGITUDE": "POS_LONGITUDE",
                                            "AGENT_NAME": "POS_NAME",
                                        }),
        SequencialGenerator(prefix="TASK").ops.generate(named_as="TASK_ID"),
        ConstantGenerator(value="Done").ops.generate(named_as="STATUS"),
        circus.clock.ops.timestamp(named_as="TIME"),
        FieldLogger(log_id="pos_surveys",
                    cols=[
                        "TASK_ID", "FA_ID", "POS_ID", "POS_NAME",
                        "POS_LATITUDE", "POS_LONGITUDE", "TIME", "STATUS"
                    ]))
Пример #7
0
def test_populations_during_working_hours():

    with path.tempdir() as log_parent_folder:
        log_folder = os.path.join(log_parent_folder, "logs")

        circus = Circus(name="tested_circus",
                        master_seed=1,
                        start=pd.Timestamp("8 June 2016"),
                        step_duration=pd.Timedelta("1h"))

        field_agents = circus.create_population(
            name="fa",
            size=100,
            ids_gen=SequencialGenerator(max_length=3, prefix="id_"))

        mobility_time_gen = WorkHoursTimerGenerator(clock=circus.clock,
                                                    seed=next(circus.seeder))

        five_per_day = mobility_time_gen.activity(n=5,
                                                  per=pd.Timedelta("1day"))

        std_per_day = mobility_time_gen.activity(n=.5,
                                                 per=pd.Timedelta("1day"))

        gaussian_activity = NumpyRandomGenerator(method="normal",
                                                 loc=five_per_day,
                                                 scale=std_per_day,
                                                 seed=1)
        mobility_activity_gen = gaussian_activity.map(bound_value(lb=1))

        # just a dummy operation to produce some logs
        story = circus.create_story(name="test_story",
                                    initiating_population=field_agents,
                                    member_id_field="some_id",
                                    timer_gen=mobility_time_gen,
                                    activity_gen=mobility_activity_gen)

        story.set_operations(circus.clock.ops.timestamp(named_as="TIME"),
                             FieldLogger(log_id="the_logs"))

        circus.run(duration=pd.Timedelta("30 days"),
                   log_output_folder=log_folder)

        logging.info("loading produced logs")
        logs = load_all_logs(log_folder)["the_logs"]

        logging.info("number of produced logs: {} logs".format(logs.shape[0]))

        # 30 days of simulation should produce 100 * 5 * 30 == 15k logs
        assert 14e3 <= logs.shape[0] <= 16e3
Пример #8
0
def run_test_scenario_1(clock_step, simulation_duration, n_stories, per,
                        log_folder):

    circus = Circus(name="tested_circus",
                    master_seed=1,
                    start=pd.Timestamp("8 June 2016"),
                    step_duration=pd.Timedelta(clock_step))

    population = circus.create_population(name="a",
                                          size=1000,
                                          ids_gen=SequencialGenerator(
                                              max_length=3, prefix="id_"))

    daily_profile = CyclicTimerGenerator(
        clock=circus.clock,
        config=CyclicTimerProfile(profile=[1] * 24,
                                  profile_time_steps="1h",
                                  start_date=pd.Timestamp("8 June 2016")),
        seed=1234)

    # each of the 500 populations have a constant 12 logs per day rate
    activity_gen = ConstantGenerator(
        value=daily_profile.activity(n=n_stories, per=per))

    # just a dummy operation to produce some logs
    story = circus.create_story(name="test_story",
                                initiating_population=population,
                                member_id_field="some_id",
                                timer_gen=daily_profile,
                                activity_gen=activity_gen)

    story.set_operations(circus.clock.ops.timestamp(named_as="TIME"),
                         FieldLogger(log_id="the_logs"))

    circus.run(duration=pd.Timedelta(simulation_duration),
               log_output_folder=log_folder)
Пример #9
0
    person.create_attribute("age",
                            init_gen=NumpyRandomGenerator(
                                method="normal",
                                loc=3,
                                scale=5,
                                seed=next(example_circus.seeder)))

    return example_circus


example = create_circus_with_population()

hello_world = example.create_story(
    name="hello_world",
    initiating_population=example.populations["person"],
    member_id_field="PERSON_ID",
    timer_gen=ConstantDependentGenerator(value=1))

hello_world.set_operations(
    ConstantGenerator(value="hello world").ops.generate(named_as="MESSAGE"),
    FieldLogger(log_id="hello"))

example.run(duration=pd.Timedelta("48h"),
            log_output_folder="output/example3",
            delete_existing_logs=True)

with open("output/example3/hello.csv") as log:
    logging.info("some produced logs: \n\n" +
                 "".join(log.readlines(1000)[:10]))
Пример #10
0
def add_telco_restock_actions(circus, params):
    """
    Add actions to "restock" the ERS and SIM products, i.e. create new items
    when needed (Telco do not buy products from anyone in this model,
    they just create them).

    :param circus:
    :param params:
    :return:
    """

    telcos = circus.actors["telcos"]
    pos_per_telco = circus.actors["pos"].size / telcos.size

    for product, description in params["products"].items():

        action_name = "telcos_{}_bulk_purchase".format(product)
        logging.info("creating {} action".format(action_name))

        bulk_purchases = circus.create_story(
            name=action_name,
            initiating_actor=telcos,
            actorid_field="TELCO",

            # no timer or activity: this is triggered by the bulk-purchases
            # of the level 1 dealers
        )

        # telcos do not "buys" product, they just create them
        product_id_gen = circus.generators["{}_id_gen".format(product)]
        bulk_gen = DependentBulkGenerator(product_id_gen)

        # bulk size distribution is a scaled version of POS bulk size distribution
        bulk_size_gen = patterns.scale_quantity_gen(
            stock_size_gen=circus.generators["pos_{}_bulk_size_gen".format(
                product)],
            scale_factor=pos_per_telco)

        bulk_purchases.set_operations(
            circus.clock.ops.timestamp(named_as="TIME"),

            bulk_size_gen.ops.generate(named_as="BULK_SIZE"),

            telcos.get_relationship(product).ops
                  .get_neighbourhood_size(from_field="TELCO",
                                          named_as="OLD_STOCK"),

            bulk_gen.ops.generate(named_as="ITEMS_BULK",
                                  observed_field="BULK_SIZE"),

            # and adding them to the buyer
            telcos.get_relationship(product).ops.add_grouped(
                from_field="TELCO",
                grouped_items_field="ITEMS_BULK"),

            telcos.get_relationship(product).ops \
                  .get_neighbourhood_size(
                        from_field="TELCO",
                        named_as="NEW_STOCK"),

            FieldLogger(log_id=action_name,
                        cols=["TIME", "TELCO", "OLD_STOCK",
                              "NEW_STOCK", "BULK_SIZE"]))
Пример #11
0

the_circus = create_circus_with_population()

hello_world = the_circus.create_story(
    name="hello_world",
    initiating_population=the_circus.populations["person"],
    member_id_field="PERSON_ID",
    timer_gen=ConstantDependentGenerator(value=1))

hello_world.set_operations(

    # adding a random timestamp, within the current clock step
    the_circus.clock.ops.timestamp(named_as="TIME"),
    ConstantGenerator(value="hello world").ops.generate(named_as="MESSAGE"),

    # selecting a random "other person"
    the_circus.populations["person"].ops.select_one(named_as="OTHER_PERSON"),

    # specifying which fields to put in the log
    FieldLogger(log_id="hello",
                cols=["TIME", "PERSON_ID", "OTHER_PERSON", "MESSAGE"]))

the_circus.run(duration=pd.Timedelta("48h"),
               log_output_folder="output/example4",
               delete_existing_logs=True)

with open("output/example4/hello.csv") as log:
    logging.info("some produced logs: \n\n" +
                 "".join(log.readlines(1000)[:10]))
Пример #12
0
    # selecting a random "other person"
    the_circus.populations["person"]
        .ops
        .select_one(named_as="OTHER_PERSON"),

    the_circus.populations["person"]
        .ops
        .lookup(id_field="PERSON_ID",
                select={"NAME": "EMITTER_NAME"}),

    the_circus.populations["person"]
        .ops
        .lookup(id_field="OTHER_PERSON",
                select={"NAME": "RECEIVER_NAME"}),

    # specifying which fields to put in the log
    FieldLogger(log_id="hello",
                cols=["TIME", "EMITTER_NAME", "RECEIVER_NAME", "MESSAGE"]
                )

)

the_circus.run(
    duration=pd.Timedelta("48h"),
    log_output_folder="output/example8",
    delete_existing_logs=True
)

with open("output/example8/hello.csv") as log:
    logging.info("some produced logs: \n\n" + "".join(log.readlines(1000)[:10]))
Пример #13
0
def step7():

    example1 = circus.Circus(name="example1",
                             master_seed=12345,
                             start=pd.Timestamp("1 Jan 2017 00:00"),
                             step_duration=pd.Timedelta("1h"))

    person = example1.create_population(
        name="person",
        size=1000,
        ids_gen=SequencialGenerator(prefix="PERSON_"))

    person.create_attribute("NAME",
                            init_gen=FakerGenerator(method="name",
                                                    seed=next(
                                                        example1.seeder)))
    person.create_attribute("POPULARITY",
                            init_gen=NumpyRandomGenerator(
                                method="uniform",
                                low=0,
                                high=1,
                                seed=next(example1.seeder)))

    sites = SequencialGenerator(prefix="SITE_").generate(1000)
    random_site_gen = NumpyRandomGenerator(method="choice",
                                           a=sites,
                                           seed=next(example1.seeder))

    allowed_sites = person.create_relationship(name="sites")

    # SITES ------------------

    # Add HOME sites
    allowed_sites.add_relations(from_ids=person.ids,
                                to_ids=random_site_gen.generate(person.size),
                                weights=0.4)

    # Add WORK sites
    allowed_sites.add_relations(from_ids=person.ids,
                                to_ids=random_site_gen.generate(person.size),
                                weights=0.3)

    # Add OTHER sites
    for i in range(3):
        allowed_sites \
            .add_relations(from_ids=person.ids,
                           to_ids=random_site_gen.generate(person.size),
                           weights=0.1)

    # FRIENDS ------------------

    friends = person.create_relationship(name="friends")

    friends_df = pd.DataFrame.from_records(
        make_random_bipartite_data(
            person.ids,
            person.ids,
            p=0.005,  # probability for a node to be connected to
            # another one : 5 friends on average = 5/1000
            seed=next(example1.seeder)),
        columns=["A", "B"])

    friends.add_relations(from_ids=friends_df["A"], to_ids=friends_df["B"])

    # PRICE ------------------

    def price(story_data):

        result = pd.DataFrame(index=story_data.index)

        result["PRICE"] = story_data["DURATION"] * 0.05
        result["CURRENCY"] = "EUR"

        return result

    # STORIES ------------------

    hello_world = example1.create_story(
        name="hello_world",
        initiating_population=person,
        member_id_field="PERSON_ID",

        # after each story, reset the timer to 0, so that it will get
        # executed again at the next clock tick (next hour)
        timer_gen=ConstantDependentGenerator(value=0))

    duration_gen = NumpyRandomGenerator(method="exponential",
                                        scale=60,
                                        seed=next(example1.seeder))

    hello_world.set_operations(
        person.ops.lookup(id_field="PERSON_ID", select={"NAME": "NAME"}),
        ConstantGenerator(value="hello world").ops.generate(named_as="HELLO"),
        duration_gen.ops.generate(named_as="DURATION"),
        friends.ops.select_one(
            from_field="PERSON_ID",
            named_as="COUNTERPART_ID",
            weight=person.get_attribute_values("POPULARITY"),
            # For people that do not have friends, it will try to find
            # the POPULARITY attribute of a None and crash miserably
            # Adding this flag will discard people that do not have friends
            discard_empty=True),
        person.ops.lookup(id_field="COUNTERPART_ID",
                          select={"NAME": "COUNTER_PART_NAME"}),
        allowed_sites.ops.select_one(from_field="PERSON_ID", named_as="SITE"),
        allowed_sites.ops.select_one(from_field="COUNTERPART_ID",
                                     named_as="COUNTERPART_SITE"),
        Apply(source_fields=["DURATION", "SITE", "COUNTERPART_SITE"],
              named_as=["PRICE", "CURRENCY"],
              f=price,
              f_args="dataframe"),
        example1.clock.ops.timestamp(named_as="TIME"),
        FieldLogger(log_id="hello"))

    example1.run(duration=pd.Timedelta("48h"),
                 log_output_folder="output/example1",
                 delete_existing_logs=True)

    with open("output/example1/hello.csv") as f:
        print("Logged {} lines".format(len(f.readlines()) - 1))
Пример #14
0
def add_purchase_actions(circus, params):

    customers = circus.actors["customers"]
    pos = circus.actors["pos"]
    sites = circus.actors["sites"]

    for product, description in params["products"].items():

        logging.info("creating customer {} purchase action".format(product))
        purchase_timer_gen = DefaultDailyTimerGenerator(
            circus.clock, next(circus.seeder))

        max_activity = purchase_timer_gen.activity(
            n=1,
            per=pd.Timedelta(
                days=description["customer_purchase_min_period_days"]))

        min_activity = purchase_timer_gen.activity(
            n=1,
            per=pd.Timedelta(
                days=description["customer_purchase_max_period_days"]))

        purchase_activity_gen = NumpyRandomGenerator(
            method="uniform",
            low=1 / max_activity,
            high=1 / min_activity,
            seed=next(circus.seeder)).map(f=lambda per: 1 / per)

        low_stock_bulk_purchase_trigger = DependentTriggerGenerator(
            value_to_proba_mapper=bounded_sigmoid(
                x_min=1,
                x_max=description["max_pos_stock_triggering_pos_restock"],
                shape=description["restock_sigmoid_shape"],
                incrementing=False))

        item_price_gen = NumpyRandomGenerator(method="choice",
                                              a=description["item_prices"],
                                              seed=next(circus.seeder))

        action_name = "customer_{}_purchase".format(product)
        purchase_action = circus.create_story(
            name=action_name,
            initiating_actor=customers,
            actorid_field="CUST_ID",
            timer_gen=purchase_timer_gen,
            activity_gen=purchase_activity_gen)

        purchase_action.set_operations(
            customers.ops.lookup(id_field="CUST_ID",
                                 select={"CURRENT_SITE": "SITE"}),
            sites.get_relationship("POS").ops.select_one(
                from_field="SITE",
                named_as="POS",
                weight=pos.get_attribute_values("ATTRACTIVENESS"),

                # TODO: this means customer in a location without POS do not buy
                # anything => we could add a re-try mechanism here
                discard_empty=True),
            sites.get_relationship("CELLS").ops.select_one(from_field="SITE",
                                                           named_as="CELL_ID"),

            # injecting geo level 2 and distributor in purchase action:
            # this is only required for approximating targets of that
            # distributor
            sites.ops.lookup(id_field="SITE",
                             select={
                                 "GEO_LEVEL_2": "geo_level2_id",
                                 "{}__dist_l1".format(product):
                                 "distributor_l1"
                             }),
            pos.get_relationship(product).ops.select_one(
                from_field="POS",
                named_as="INSTANCE_ID",
                pop=True,
                discard_empty=False),
            circus.actors[product].ops.select_one(named_as="PRODUCT_ID"),
            Apply(source_fields="INSTANCE_ID",
                  named_as="FAILED_SALE_OUT_OF_STOCK",
                  f=pd.isnull,
                  f_args="series"),
            SequencialGenerator(
                prefix="TX_CUST_{}".format(product)).ops.generate(
                    named_as="TX_ID"),
            item_price_gen.ops.generate(named_as="VALUE"),
            circus.clock.ops.timestamp(named_as="TIME"),
            FieldLogger(log_id=action_name),
            patterns.trigger_action_if_low_stock(
                circus,
                stock_relationship=pos.get_relationship(product),
                actor_id_field="POS",
                restock_trigger=low_stock_bulk_purchase_trigger,
                triggered_action_name="pos_{}_bulk_purchase".format(product)),
        )
Пример #15
0
    # after each story, reset the timer to 0, so that it will get
    # executed again at the next clock tick (next hour)
    timer_gen=story_timer_gen,
    activity_gen=activity_gen)

#3600 seconds
duration_gen = NumpyRandomGenerator(method="exponential",
                                    scale=3600,
                                    seed=next(example1.seeder))

hello_world.set_operations(
    person.ops.lookup(id_field="PERSON_ID", select={"NAME": "NAME"}),
    duration_gen.ops.generate(named_as="DURATION"),
    ConstantGenerator(value=1).ops.generate(named_as="LOCATION"),
    example1.clock.ops.timestamp(named_as="TIME"), FieldLogger(log_id="dummy"))

example1.run(duration=pd.Timedelta(hrs),
             log_output_folder="output/example1",
             delete_existing_logs=True)

df = pd.read_csv("output/example1/dummy.csv")
print(df)
"""
with open("output/example1/dummy.csv") as f:
    print("Logged {} lines".format(len(f.readlines()) - 1))



# import pandas as pd
# import numpy as np
Пример #16
0
def add_attractiveness_evolution_action(circus):

    pos = circus.actors["pos"]

    # once per day the attractiveness of each POS evolves according to the delta
    attractiveness_evolution = circus.create_story(
        name="attractiveness_evolution",
        initiating_actor=pos,
        actorid_field="POS_ID",

        # exactly one attractiveness evolution per day
        # caveat: all at the same time for now
        timer_gen=ConstantDependentGenerator(
            value=circus.clock.n_iterations(pd.Timedelta("1 day"))))

    def update_attract_base(df):
        base = df.apply(lambda row: row["ATTRACT_BASE"] + row["ATTRACT_DELTA"],
                        axis=1)
        base = base.mask(base > 50, 50).mask(base < -50, -50)
        return pd.DataFrame(base, columns=["result"])

    attractiveness_evolution.set_operations(
        pos.ops.lookup(id_field="POS_ID",
                       select={
                           "ATTRACT_BASE": "ATTRACT_BASE",
                           "ATTRACT_DELTA": "ATTRACT_DELTA",
                       }),
        Apply(
            source_fields=["ATTRACT_BASE", "ATTRACT_DELTA"],
            named_as="NEW_ATTRACT_BASE",
            f=update_attract_base,
        ),
        pos.get_attribute("ATTRACT_BASE").ops.update(
            id_field="POS_ID", copy_from_field="NEW_ATTRACT_BASE"),
        Apply(source_fields=["ATTRACT_BASE"],
              named_as="NEW_ATTRACTIVENESS",
              f=_attractiveness_sigmoid(),
              f_args="series"),
        pos.get_attribute("ATTRACTIVENESS").ops.update(
            id_field="POS_ID", copy_from_field="NEW_ATTRACTIVENESS"),

        # TODO: remove this (currently there just for debugs)
        circus.clock.ops.timestamp(named_as="TIME"),
        FieldLogger(log_id="att_updates"))

    delta_updater = NumpyRandomGenerator(method="choice",
                                         a=[-1, 0, 1],
                                         seed=next(circus.seeder))

    # random walk around of the attractiveness delta, once per week
    attractiveness_delta_evolution = circus.create_story(
        name="attractiveness_delta_evolution",
        initiating_actor=pos,
        actorid_field="POS_ID",
        timer_gen=ConstantDependentGenerator(
            value=circus.clock.n_iterations(pd.Timedelta("7 days"))))

    attractiveness_delta_evolution.set_operations(
        pos.ops.lookup(id_field="POS_ID",
                       select={"ATTRACT_DELTA": "ATTRACT_DELTA"}),
        delta_updater.ops.generate(named_as="DELTA_UPDATE"),
        Apply(source_fields=["ATTRACT_DELTA", "DELTA_UPDATE"],
              named_as="NEW_ATTRACT_DELTA",
              f=np.add,
              f_args="series"),
        pos.get_attribute("ATTRACT_DELTA").ops.update(
            id_field="POS_ID", copy_from_field="NEW_ATTRACT_DELTA"),

        # TODO: remove this (currently there just for debugs)
        circus.clock.ops.timestamp(named_as="TIME"),
        FieldLogger(log_id="att_delta_updates"))