Exemple #1
0
async def home(request):
    from producer import Producer
    pro = Producer()
    fids = ['40', '41', '43', '44', '45', '46', '47']
    for fid in fids:
        pro.produce(fid=fid, page=2)
    return await file('home.html')
Exemple #2
0
 async def get_page(self, fid, page=2, objs=[]):
     from producer import Producer
     pro = Producer()
     keys = 'moxing_' + fid + '_' + str(page)
     res = rds.get(keys)
     if res:
         objs, page = json.loads(res.decode('utf-8'))
         pro.produce(fid=fid, page=page + 1)
         return objs, page
     objs, page = await self.get_page_list(fid=fid,
                                           page=page,
                                           start_page=page,
                                           objs=[])
     pro.produce(fid=fid, page=page + 1)
     return objs, page
Exemple #3
0
from producer import  Producer
import logging
from pathlib import Path
import json

from confluent_kafka import avro

def import_or_install(package):
    try:
        __import__(package)
    except ImportError:
        pip.main(['install', package])

import_or_install("pandas")

key_schema = avro.load(f"{Path(__file__).parents[0]}/schemas/arrival_key.json")

p = Producer("hello",key_schema,1,1)
p.close()

from confluent_kafka import Producer
import socket

conf = {'bootstrap.servers': "host1:9092,host2:9092",
        'client.id': socket.gethostname()}

producer = Producer(conf)

producer.produce("dads", key="key", value="value")
producer.
Exemple #4
0
class BrainsphereModel:
    def __init__(self, functional_connectivity, patient_data, **kwargs):
        self.types = [
            'ConcentrationLinear', 'Constant', 'ConcentrationSigmoid',
            'WeightedDegreeLinear', 'WeightedDegreeSigmoid'
        ]
        self.producer = Producer(self.types)
        self.params = self.producer.params

        for key, value in kwargs.items():
            if key == "nodeCoordinates":
                self.nodeCoordinates = value
            elif key == "optimizer":
                self.optimizer = value
            elif key == "loss":
                self.loss = value
            elif key == "euclideanAdjacency":
                self.euclideanAdjacency = value
            elif key == "producer":
                self.producer = value
            elif key == "diffuser":
                self.diffuser = value
            elif key == "params":
                self.params.update(value)
            else:
                raise TypeError("Illegal Keyword '" + str(key) + "'")

        self.functionalConnectivity = functional_connectivity
        self.patientData = patient_data
        self.numNodes, _ = np.shape(functional_connectivity)
        self.loss = Loss("mse", self.patientData)
        self.lastloss = 0

        self.reset()

    def reset(self):
        self.initializer = Initializer("braak1", self.numNodes, self.params)
        self.concentration = self.initializer.get()
        self.concentrationHistory = np.copy(self.concentration)

        self.producer = Producer(self.types)
        self.diffusor = Diffusor("euclidean",
                                 self.params,
                                 EuclideanAdjacency=self.euclideanAdjacency)

    def run(self):
        stop_concentration = 1100
        timesteps = 2500

        self.reset()
        deltaT = 0.0001
        self.concentration += deltaT * (
            self.producer.produce(params=self.params,
                                  concentration=self.concentration,
                                  connectivity=self.functionalConnectivity) +
            self.diffusor.diffuse(self.concentration))
        self.concentrationHistory = np.append(self.concentrationHistory,
                                              self.concentration,
                                              axis=0)
        deltaConc = np.sum(self.concentrationHistory[1, :]) - np.sum(
            self.concentrationHistory[0, :])
        if deltaConc <= 0.0:
            return 9999999
        else:
            deltaT *= stop_concentration / timesteps / deltaConc

        while (np.sum(self.concentration) <
               stop_concentration) and (np.sum(deltaConc) > 0):
            deltaConc = deltaT * (self.producer.produce(
                params=self.params,
                concentration=self.concentration,
                connectivity=self.functionalConnectivity) +
                                  self.diffusor.diffuse(self.concentration))
            self.concentration += deltaConc
            self.concentrationHistory = np.append(self.concentrationHistory,
                                                  self.concentration,
                                                  axis=0)
            # print(self.loss.get(self.concentrationHistory))

        self.lastloss = self.loss.get(self.concentrationHistory)

        return self.lastloss

    def gradient(self, loss=None):
        if loss is None:
            loss = self.run()

        params_new = {}
        params_old = self.params.copy()
        deltaX = {}
        for key, value in params_old.items():
            deltaX[key] = np.sign(np.random.randn()) * 0.01
            params_new[key] = value + deltaX[key]

        self.params = params_new
        new_loss = self.run()
        grad = {}
        self.params = params_old

        for key, value in params_old.items():
            grad[key] = (new_loss - loss) / (deltaX[key])

        return grad

    def gradient4(self):
        loss = self.run()
        gradients = Parallel(n_jobs=4)(delayed(self.gradient)(loss)
                                       for i in range(4))

        grad = {}

        for key in self.params:
            gradsum = 0
            count = 0
            for g in gradients:
                gradsum += g.get(key)
                count += 1.0
            grad[key] = gradsum / count
        return grad
Exemple #5
0
def ez_produce(name, queue, data, is_rpc=False, rpc_attempts=25):
    """
    Send data to queue.
    """
    if not (name and queue and data and isinstance(data, dict)):
        return

    name = name.upper()

    if not data:
        return False

    try:
        producer = Producer(host=os.getenv('RABBITMQ_HOST'),
                            port=os.getenv('RABBITMQ_PORT', 5672),
                            vhost=os.getenv('RABBITMQ_VHOST', '/'),
                            username=os.getenv('RABBITMQ_%s_USER' % name),
                            password=os.getenv('RABBITMQ_%s_PASS' % name),
                            is_rpc=is_rpc,
                            rpc_attempts=rpc_attempts)

        response = producer.produce(queue=os.getenv('RABBITMQ_%s_QUEUE' % name,
                                                    queue),
                                    value=json.dumps(data))

        # This MUST be ran through json.loads again...
        if response:
            response = json.loads(response)

    # Couldn't connect to rabbitmq, most likely
    except AttributeError as e:
        message = 'Your .env file is probably not set up correctly.'
        print({
            'MESSAGE': message,
            'NAME': name,
            'QUEUE': queue,
            'RABBITMQ_HOST': os.getenv('RABBITMQ_HOST'),
            'RABBITMQ_PORT': os.getenv('RABBITMQ_PORT', 5672),
            'RABBITMQ_VHOST': os.getenv('RABBITMQ_VHOST', '/'),
            'RABBITMQ_%s_USER': os.getenv('RABBITMQ_%s_USER' % name),
            'RABBITMQ_%s_PASS': os.getenv('RABBITMQ_%s_PASS' % name),
        })
        logger.write_log('%s_PRODUCER_ERROR_ATTR_ERROR' % name, message)
        return False

    # Type errors happen when data cannot be serialized to JSON
    except TypeError as e:
        logger.write_log('%s_PRODUCER_ERROR_TYPE_ERROR' % name, str(e))
        return False

    # Any other exception
    except Exception as e:
        if is_rpc:
            raise e
        else:
            logger.write_log('%s_PRODUCER_ERROR' % name, str(e))
            return False

    if not is_rpc:
        return True

    if response:
        return json.loads(response) if isinstance(response, str) else response
Exemple #6
0
from os import getenv
from socket import gethostname
import logging
from producer import Producer

logging.basicConfig(level=logging.INFO)

myproducer = Producer(kafka_host=getenv("KAFKA_HOST"),
                      kafka_topic=getenv("KAFKA_TOPIC"),
                      machine_identifier=gethostname())
myproducer.produce()