Beispiel #1
0
def donate():
    app = request.args.get('app', 'strava')
    file = request.files.get('file')
    try:
        if file:
            extractor = ActivityExtractor.get_provider(provider_name=app,
                                                       file_stream=file)
        else:
            extractor = ActivityExtractor.get_provider(provider_name=app,
                                                       creds=request.args)
        activities = extractor.get_activities()
        donated_activity_ids = extractor.get_activity_ids(activities)
        logger.info(
            f'Extracted activities to be donated and processed: {donated_activity_ids}'
        )
    except ActivityExtractorException as e:
        return jsonify(success=False, error=e.message), e.status

    try:
        producer.produce(activities)
    except Exception as e:
        logger.error('Producing activity records failed with:' + str(e))
        return jsonify(success=False, error=str(e)), 500

    return jsonify(success=True, donated_activities=donated_activity_ids)
Beispiel #2
0
def callback(ch, method, properties, body):
    print("callback run")
    print(str(body)[2:-1])
    path = detect(str(body)[2:-1])
    url = upload(path)
    removeFile(path)
    produce(url)
    print("callback done")
Beispiel #3
0
def generate_and_produce(generator, interval):
    logger.debug('Generate and produce activities..')
    provider = generator.PROVIDER_NAME
    while os.getenv(f'{provider}_generation') == 'running':
        logger.debug('{provider}: running - generating activity..')
        activities = [generator.generate_dummy_activity()]
        try:
            producer.produce(activities)
        except Exception as e:
            logger.warning(
                'Producing generated activity records failed with:' + str(e))
        time.sleep(interval)
Beispiel #4
0
def main():

    component_map = {"pup": maps.PUP, "hbi": maps.HBI}
    logger.info("Starting Component Tester")
    config.log_config()

    if config.ROLE == "consume":
        data = component_map[config.COMPONENT]
        consumer.consume(data["consume_topic"])
    elif config.ROLE == "produce":
        data = component_map[config.COMPONENT]
        producer.produce(data["produce_topic"], data["msg"])
    else:
        logger.error("Role not recognized: %s", config.ROLE)
def test_state(db_conn, wiki_updates):
    # Publish events to the event bus
    for update in wiki_updates:
        produce(TOPIC, update)

    # Wait for processing
    time.sleep(10)

    with db_conn as conn:
        cur = conn.cursor()
        cur.execute(
            "SELECT data -> 'meta' ->> 'domain' AS domain, COUNT(*) AS cnt "
            "FROM events "
            "GROUP BY domain "
            "ORDER BY cnt DESC;")
        res = dict(cur.fetchall())

    assert res['www.wikidata.org'] == 34
    assert res['en.wikipedia.org'] == 10
    assert res['commons.wikimedia.org'] == 10
        "Sep": 9,
        "Oct": 10,
        "Nov": 11,
        "Dec": 12
    }

    datetime = str(arr[5]) + "-" + str(months[arr[1]]) + "-" + str(
        arr[2]) + " " + arr[3]
    return datetime


df = pd.read_csv("twcs.csv")

# Data preprocessing

df = df[df['response_tweet_id'].notna()
        & df['in_response_to_tweet_id'].notna()]
df = df.astype({"in_response_to_tweet_id": np.int64})
df['created_at'] = df['created_at'].apply(lambda x: convert(x))
df['created_at'] = pd.to_datetime(df['created_at'], format='%Y-%m-%d %H:%M:%S')
df = df.sort_values(by='created_at')

print(df.dtypes)
print(df.tail())

chunk_size = 35
for start in range(71, df.size, chunk_size):
    df_subset = df.iloc[start:start + chunk_size]
    df_subset['created_at'] = df_subset['created_at'].astype(str)
    produce(df_subset.to_dict('records'))
Beispiel #7
0
  binder.install(consumer.queue_base)

  q = mp.Queue()
  binder.bind(mp.Queue, q)

  write_func = producer.queue_create_write(q)
  binder.bind(producer.EventWriter, producer.DefaultEventWriter(write_func))
  binder.bind(consumer.Stream, consumer.infinite_mpqueue_stream(q))


inject.configure(pipeline)


# Stream Processing
#
def worker(q):
  consumer.consume()


q = inject.instance(mp.Queue)

p = mp.Process(target=worker, args=(q,))
p.start()

producer.produce()

q.close()
q.join_thread()

p.join()
Beispiel #8
0
def send_message(count=1):
    message = request.args.get("message")
    produce(message, count=count)
    return message + " Sent"
def consume(broker_url='broker:9092'):
    try:
        route = 'run_business_rule'
        logging.info(f'Listening to topic: {route}')

        consumer = create_consumer(route)
        logging.debug('Consumer object created.')

        parts = consumer.partitions_for_topic(route)
        if parts is None:
            logging.warning(f'No partitions for topic `{route}`')
            logging.debug(f'Creating Topic: {route}')
            produce(route, {})
            print(f'Listening to topic `{route}`...')
            while parts is None:
                consumer = create_consumer(route)
                parts = consumer.partitions_for_topic(route)
                logging.warning("No partition. In while loop. Make it stop")

        partitions = [TopicPartition(route, p) for p in parts]
        consumer.assign(partitions)

        for message in consumer:
            data = message.value
            logging.info(f'Message: {data}')

            try:
                case_id = data['case_id']
                functions = data.pop('functions', None)
                tenant_id = data['tenant_id']
                start_rule_id = data.pop('start_rule_id', None)
                stage = data.get('stage', None)
                workflow = data.get('workflow', None)
                file_name = data.get('file_name', None)
                files = data.get('files', None)
                source = data.get('source', None)
                file_path = data.get('file_path', None)
                original_file_name = data.get('original_file_name', None)
            except Exception as e:
                logging.warning(f'Recieved unknown data. [{data}] [{e}]')
                consumer.commit()
                continue

            db_config['tenant_id'] = tenant_id
            queue_db = DB('queues', **db_config)
            kafka_db = DB('kafka', **db_config)

            query = 'SELECT * FROM `button_functions` WHERE `route`=%s'
            function_info = queue_db.execute(query, params=[route])
            in_progress_message = list(function_info['in_progress_message'])[0]
            failure_message = list(function_info['failure_message'])[0]
            success_message = list(function_info['success_message'])[0]

            message_flow = kafka_db.get_all('grouped_message_flow')

            # Get which button (group in kafka table) this function was called from
            group = data.pop('group', None)
            if group:
                # Get message group functions
                group_messages = message_flow.loc[message_flow['message_group']
                                                  == group]

                # If its the first function the update the progress count
                first_flow = group_messages.head(1)
                first_topic = first_flow.loc[first_flow['listen_to_topic'] ==
                                             route]

                query = 'UPDATE `process_queue` SET `status`=%s, `total_processes`=%s, `case_lock`=1 WHERE `case_id`=%s'

                if not first_topic.empty:
                    logging.debug(
                        f'`{route}` is the first topic in the group `{group}`.'
                    )
                    logging.debug(
                        f'Number of topics in group `{group}` is {len(group_messages)}'
                    )
                    if list(first_flow['send_to_topic'])[0] is None:
                        queue_db.execute(query,
                                         params=[
                                             in_progress_message,
                                             len(group_messages), case_id
                                         ])
                    else:
                        queue_db.execute(query,
                                         params=[
                                             in_progress_message,
                                             len(group_messages) + 1, case_id
                                         ])

            # Getting the correct data for the functions. This data will be passed through
            # rest of the chained functions.
            function_params = {}
            if functions:
                for function in functions:
                    if function['route'] == route:
                        function_params = function['parameters']
                        break
            else:
                function_params['stage'] = [stage]

            # Call the function
            try:
                logging.debug(f'Calling function `run_business_rule`')
                result = apply_business_rule(case_id, function_params,
                                             tenant_id, start_rule_id)
                logging.debug(f'Ran business rules with stage {stage}')
                logging.debug(f"result flag is {result.get('flag', '')}")
            except:
                # Unlock the case.
                logging.exception(
                    f'Something went wrong while saving changes. Check trace.')
                query = 'UPDATE `process_queue` SET `status`=%s, `case_lock`=0, `failure_status`=1 WHERE `case_id`=%s'
                queue_db.execute(query, params=[failure_message, case_id])
                consumer.commit()
                continue

            # Check if function was succesfully executed
            if group:
                if result['flag']:
                    # If there is only function for the group, unlock case.
                    if not first_topic.empty:
                        if list(first_flow['send_to_topic'])[0] is None:
                            # It is the last message. So update file status to completed.
                            query = 'UPDATE `process_queue` SET `status`=%s, `case_lock`=0, `completed_processes`=`completed_processes`+1 WHERE `case_id`=%s'
                            queue_db.execute(query,
                                             params=[success_message, case_id])
                            consumer.commit()
                            continue

                    last_topic = group_messages.tail(1).loc[
                        group_messages['send_to_topic'] == route]

                    # If it is not the last message, then produce to next function else just unlock case.
                    if last_topic.empty:
                        # Get next function name
                        next_topic = list(group_messages.loc[
                            group_messages['listen_to_topic'] == route]
                                          ['send_to_topic'])[0]

                        if next_topic is not None:
                            logging.debug('Not the last topic of the group.')
                            produce(next_topic, data)

                        # Update the progress count by 1
                        query = 'UPDATE `process_queue` SET `status`=%s, `completed_processes`=`completed_processes`+1 WHERE `case_id`=%s'
                        queue_db.execute(query,
                                         params=[success_message, case_id])
                        consumer.commit()
                    else:
                        # It is the last message. So update file status to completed.
                        logging.debug('Last topic of the group.')
                        query = 'UPDATE `process_queue` SET `status`=%s, `case_lock`=0, `completed_processes`=`completed_processes`+1 WHERE `case_id`=%s'
                        queue_db.execute(query,
                                         params=[success_message, case_id])
                        consumer.commit()
                else:
                    # Unlock the case.
                    logging.debug(
                        'Flag false. Unlocking case with failure status 1.')
                    query = 'UPDATE `process_queue` SET `status`=%s, `case_lock`=0, `failure_status`=1 WHERE `case_id`=%s'
                    queue_db.execute(query, params=[failure_message, case_id])
                    consumer.commit()
            else:
                data = result['send_data'] if 'send_data' in result else {}
                data['case_id'] = case_id
                data['tenant_id'] = tenant_id
                data['workflow'] = workflow
                data['type'] = 'file_ingestion'
                data['file_name'] = file_name
                data['files'] = files
                data['source'] = source
                data['file_path'] = file_path
                data['original_file_name'] = original_file_name

                query = 'SELECT * FROM `message_flow` WHERE `listen_to_topic`=%s AND `workflow`=%s'
                logging.debug(f'topic - {route} , workflow - {workflow}')
                message_flow = kafka_db.execute(query,
                                                params=[route, workflow])

                if message_flow.empty:
                    logging.error(
                        '`kafka table` is not configured correctly in message flow table.'
                    )
                    send_to_topic = None
                else:
                    send_to_topic = list(message_flow.send_to_topic)[0]

                if send_to_topic is not None:
                    logging.info(f'Producing to topic {send_to_topic}')
                    produce(send_to_topic, data)
                else:
                    logging.info(f'There is no topic to send.')
                if 'send_to_topic' in result:
                    send_to_topic_bypassed = result['send_to_topic']
                    produce(send_to_topic_bypassed, {})
                else:
                    logging.error(
                        'Message not consumed. Some error must have occured. Will try again!'
                    )
                consumer.commit()
    except:
        logging.exception('Something went wrong in consumer. Check trace.')