def func(): raw_data = request.get_json(force=True) print("Raw_data:",raw_data) dfu = Handler.build_request(raw_data) pred_resp = modelClass.predict(dfu) pred_resp = Handler.build_response(raw_data, pred_resp) return pred_resp
def __init__(self, config=None): """ Create a new instance of the InfluxdbeHandler """ # Initialize Handler Handler.__init__(self, config) if not InfluxDBClient: self.log.error('influxdb.client.InfluxDBClient import failed. ' 'Handler disabled') # Initialize Options if self.config['ssl'] == "True": self.ssl = True else: self.ssl = False self.hostname = self.config['hostname'] self.port = int(self.config['port']) self.username = self.config['username'] self.password = self.config['password'] self.database = self.config['database'] self.batch_size = int(self.config['batch_size']) self.batch_count = 0 self.time_precision = self.config['time_precision'] # Initialize Data self.batch = {} self.influx = None # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of cloudwatchHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.connection = None # Initialize Options self.region = self.config['region'] self.instance_id = boto.utils.get_instance_metadata()['instance-id'] self.log.debug("Setting InstanceID: " + self.instance_id) self.valid_config = ('region', 'collector', 'metric', 'namespace', "name", "unit") self.rules = [] for key_name, section in self.config.items(): if section.__class__ is Section: keys = section.keys() rules = {} for key in keys: if key not in self.valid_config: self.log.warning("invalid key %s in section %s", key, section.name) else: rules[key] = section[key] self.rules.append(rules) # Create CloudWatch Connection self._bind()
def __init__(self): Handler.__init__(self) self.teamHeight = 60 self.maxOffsetY = len( Handler.contest.teamMap.items()) * self.teamHeight self.offsetY = 0 self.animationTimer = Timer() self.animationSpeed = 5000.0 self.keyTimer = Timer() self.moveMap = {} self.pendingScroll = 0 self.awardsMode = False self.lockTo = None self.attractTimer = Timer() self.attractSpeed = 1000 self.refreshTimer = Timer() self.refreshFreq = 5000 self.ffTimer = Timer() self.ffMode = False self.ffLength = 100000.0 pygame.mouse.set_visible(False)
def __init__(self, config=None): """ Create a new instance of the GraphiteHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.proto = self.config['proto'].lower().strip() self.host = self.config['host'] self.port = int(self.config['port']) self.timeout = float(self.config['timeout']) self.keepalive = bool(self.config['keepalive']) self.keepaliveinterval = int(self.config['keepaliveinterval']) self.batch_size = int(self.config['batch']) self.max_backlog_multiplier = int( self.config['max_backlog_multiplier']) self.trim_backlog_multiplier = int( self.config['trim_backlog_multiplier']) self.flow_info = self.config['flow_info'] self.scope_id = self.config['scope_id'] self.metrics = [] self.reconnect_interval = int(self.config['reconnect_interval']) self.last_connect_timestamp = -1 # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of rmqHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.connection = None self.channel = None # Initialize Options self.server = self.config.get('server', '127.0.0.1') self.port = int(self.config.get('port', 5672)) self.topic_exchange = self.config.get('topic_exchange', 'diamond') self.vhost = self.config.get('vhost', '') self.user = self.config.get('user', 'guest') self.password = self.config.get('password', 'guest') self.routing_key = self.config.get('routing_key', 'metric') self.custom_routing_key = self.config.get('custom_routing_key', 'diamond') if not pika: self.log.error('pika import failed. Handler disabled') return # Create rabbitMQ topic exchange and bind try: self._bind() except pika.exceptions.AMQPConnectionError: self.log.error('Failed to bind to rabbitMQ topic exchange')
def __init__(self, config=None): """ Create a new instance of rmqHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.connection = None self.channel = None # Initialize Options self.server = self.config['server'] self.rmq_exchange = self.config['rmq_exchange'] if not pika: self.log.error('pika import failed. Handler disabled') return # Create rabbitMQ pub socket and bind try: self._bind() except pika.exceptions.AMQPConnectionError: self.log.error('Failed to bind to rabbitMQ pub socket')
def __init__(self, config=None): """ Create a new instance of the LibratoHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized Librato handler.") if librato is None: logging.error("Failed to load librato module") return # Initialize Options api = librato.connect(self.config['user'], self.config['apikey']) self.queue = api.new_queue() self.queue_max_size = int(self.config['queue_max_size']) self.queue_max_interval = int(self.config['queue_max_interval']) self.queue_max_timestamp = int(time.time() + self.queue_max_interval) self.current_n_measurements = 0 # If a user leaves off the ending comma, cast to a array for them include_filters = self.config['include_filters'] if isinstance(include_filters, basestring): include_filters = [include_filters] self.include_reg = re.compile(r'(?:%s)' % '|'.join(include_filters))
def __init__(self, config=None): """ Create a new instance of the LibratoHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized Librato handler.") if librato is None: logging.error("Failed to load librato module") return # Initialize Options self.api = librato.connect(self.config["user"], self.config["apikey"]) self.queue = [] self.queue_max_age = int(self.config["queue_max_age"]) self.queue_max_size = int(self.config["queue_max_size"]) self.queue_max_interval = int(self.config["queue_max_interval"]) self.queue_max_timestamp = int(time.time() + self.queue_max_interval) self.disable_force_flush = bool(self.config["disable_force_flush"]) # If a user leaves off the ending comma, cast to a array for them include_filters = self.config["include_filters"] if isinstance(include_filters, basestring): include_filters = [include_filters] self.include_reg = re.compile(r"(?:%s)" % "|".join(include_filters))
def __init__(self, config=None): """ Create a new instance of the TSDBHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.timeout = int(self.config['timeout']) self.metric_format = str(self.config['format']) self.tags = "" if isinstance(self.config['tags'], basestring): self.tags = self.config['tags'] elif isinstance(self.config['tags'], list): for tag in self.config['tags']: self.tags += " "+tag if not self.tags == "" and not self.tags.startswith(' '): self.tags = " "+self.tags # OpenTSDB refuses tags with = in the value, so see whether we have # some of them in it.. for tag in self.tags.split(" "): if tag.count('=') > 1: raise Exception("Invalid tag name "+tag) self.skipAggregates = self.config['skipAggregates'] self.cleanMetrics = self.config['cleanMetrics'] # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of the InfluxdbeHandler """ # Initialize Handler Handler.__init__(self, config) if not InfluxDBClient: self.log.error('influxdb.client.InfluxDBClient import failed. ' 'Handler disabled') # Initialize Options self.ssl = self.config['ssl'] self.hostname = self.config['hostname'] self.port = int(self.config['port']) self.username = self.config['username'] self.password = self.config['password'] self.database = self.config['database'] self.batch_size = int(self.config['batch_size']) self.batch_count = 0 self.time_precision = self.config['time_precision'] # Initialize Data self.batch = {} self.influx = None # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of the StatsdHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") if not statsd: self.log.error('statsd import failed. Handler disabled') self.enabled = False return if not hasattr(statsd, 'StatsClient'): self.log.warn('python-statsd support is deprecated ' 'and will be removed in the future. ' 'Please use https://pypi.python.org/pypi/statsd/') # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.batch_size = int(self.config['batch']) self.metrics = [] self.old_values = {} # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of the InfluxdbeHandler """ # Initialize Handler Handler.__init__(self, config) if not InfluxDBClient: self.log.error('influxdb.client.InfluxDBClient import failed. ' 'Handler disabled') # Initialize Options if self.config['ssl'] == "True": self.ssl = True else: self.ssl = False self.hostname = self.config['hostname'] self.port = int(self.config['port']) self.username = self.config['username'] self.password = self.config['password'] self.database = self.config['database'] self.batch_size = int(self.config['batch_size']) self.metric_max_cache = int(self.config['cache_size']) self.batch_count = 0 self.time_precision = self.config['time_precision'] # Initialize Data self.batch = {} self.influx = None self.batch_timestamp = time.time() self.time_multiplier = 1 # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of rmqHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.connection = None self.channel = None # Initialize Options self.server = self.config.get('server', '127.0.0.1') self.port = int(self.config.get('port', 5672)) self.topic_exchange = self.config.get('topic_exchange', 'diamond') self.vhost = self.config.get('vhost', '') self.user = self.config.get('user', 'guest') self.password = self.config.get('password', 'guest') self.routing_key = self.config.get('routing_key', 'metric') self.custom_routing_key = self.config.get( 'custom_routing_key', 'diamond') if not pika: self.log.error('pika import failed. Handler disabled') return # Create rabbitMQ topic exchange and bind try: self._bind() except pika.exceptions.AMQPConnectionError: self.log.error('Failed to bind to rabbitMQ topic exchange')
def __init__(self, config=None): """ Create a new instance of the SensuHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.proto = self.config['proto'].lower().strip() self.host = self.config['host'] self.port = int(self.config['port']) self.timeout = int(self.config['timeout']) self.keepalive = bool(self.config['keepalive']) self.keepaliveinterval = int(self.config['keepaliveinterval']) self.batch_size = int(self.config['batch']) self.max_backlog_multiplier = int( self.config['max_backlog_multiplier']) self.trim_backlog_multiplier = int( self.config['trim_backlog_multiplier']) self.metrics = [] # Connect self._connect()
def __init__(self, config=None): # Initialize Handler Handler.__init__(self, config) if discovery is None: logging.error("Failed to load apiclient.discovery") return elif GoogleCredentials is None: logging.error("Failed to load " "oauth2client.client.GoogleCredentials") return # Initialize options self.topic = self.config['topic'] self.scopes = self.config['scopes'] self.retries = int(self.config['retries']) self.batch = self.config['batch'] self.batch_size = int(self.config['batch_size']) self.metrics = [] tags_items = self.config['tags'] self.tags = {} for item in tags_items: k, v = item.split(':') self.tags[k] = v # Initialize client credentials = GoogleCredentials.get_application_default() if credentials.create_scoped_required(): credentials = credentials.create_scoped(self.scopes) self.client = discovery.build('pubsub', 'v1', credentials=credentials)
def __init__(self, config=None): """ Create a new instance of the ObservabilityHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized Observability handler.") if oauth2 is None: logging.error("Failed to load oauthlib module") return if requests_oauthlib is None: logging.error("Failed to load requests_oauthlib module") return # Initialize client self.client_key = self.config['client_key'] self.client_secret = self.config['client_secret'] self.floor_seconds = int(self.config['floor_seconds']) self.queue = [] self.queue_max_age = int(self.config['queue_max_age']) client = oauth2.BackendApplicationClient(client_id=self.client_key) self.session = requests_oauthlib.OAuth2Session(client=client) self.token = None
def test_get_all(self): url = "https://news.yahoo.com/rss/" lim = 3 hand = Handler(url, lim) self.assertIsInstance(hand.get_all(), list) self.assertIsInstance(hand.get_all()[0], News) self.assertEqual(len(hand.get_all()), lim)
def save_img(imgs, stri): num = imgs.shape[0] for i in range(num): A = imgs[i].copy() * 255 A = np.reshape(np.ravel(A), (Handler().img_size, Handler().img_size)) new_p = Image.fromarray(A) if new_p.mode != 'RGB': new_p = new_p.convert('RGB') new_p.save(os.path.join(stri, str(i) + ".png"))
def __init__(self, config=None): """ Create a new instance of the HostedGraphiteHandler class """ # Initialize Handler Handler.__init__(self, config) self.key = self.config['apikey'].lower().strip() self.graphite = GraphiteHandler(self.config)
def __init__(self, h, w): self.win = Window(top=0, x=w, y=h) self.buffer = Buffer() self.filename = "" self.cursor = Cursor() self.mode = "normal" self.exit = False self.handlers = Handler() self.command = "" self.message = ""
def __init__(self, config=None): """ New instance of LogentriesDiamondHandler class """ Handler.__init__(self, config) self.log_token = self.config.get('log_token', None) self.queue_size = int(self.config['queue_size']) self.queue = deque([]) if self.log_token is None: raise Exception
def __init__(self, config=None): Handler.__init__(self, config) self.metrics = [] self.batch_size = int(self.config['batch']) self.url = self.config['url'] self.auth_token = self.config['auth_token'] self.batch_max_interval = self.config['batch_max_interval'] self.resetBatchTimeout() if self.auth_token == "": logging.error("Failed to load Signalfx module") return
def __init__(self, config=None): """ initialize Netuitive api and populate agent host metadata """ if not netuitive: self.log.error('netuitive import failed. Handler disabled') self.enabled = False return try: Handler.__init__(self, config) logging.debug("initialize Netuitive handler") self.version = self._get_version() self.api = netuitive.Client(self.config['url'], self.config['api_key'], self.version) self.element = netuitive.Element( location=self.config.get('location')) self.batch_size = int(self.config['batch']) self.max_backlog_multiplier = int( self.config['max_backlog_multiplier']) self.trim_backlog_multiplier = int( self.config['trim_backlog_multiplier']) self._add_sys_meta() self._add_aws_meta() self._add_docker_meta() self._add_azure_meta() self._add_config_tags() self._add_config_relations() self._add_collectors() self.flush_time = 0 try: self.config['write_metric_fqns'] = str_to_bool( self.config['write_metric_fqns']) except KeyError, e: self.log.warning('write_metric_fqns missing from the config') self.config['write_metric_fqns'] = False if self.config['write_metric_fqns']: self.metric_fqns_path = self.config['metric_fqns_path'] truncate_fqn_file = open(self.metric_fqns_path, "w") truncate_fqn_file.close() logging.debug(self.config)
def train(self, X, Y): #print(X.shape) #print(Y.shape) # camada convolucional self.model = tf.keras.models.Sequential([ tf.keras.layers.Conv2D(64, kernel_size=(3, 3), strides=(1, 1), activation='relu', input_shape=(Handler().img_size, Handler().img_size, 1)), tf.keras.layers.Conv2D(32, kernel_size=(3, 3), strides=(1, 1), activation='relu', input_shape=(Handler().img_size, Handler().img_size, 1)), tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=None, padding='valid', data_format=None), tf.keras.layers.Flatten(), tf.keras.layers.Dropout(0.2), tf.keras.layers.Dense(512, activation=tf.nn.relu, kernel_initializer='random_uniform'), tf.keras.layers.Dropout(0.2), tf.keras.layers.Dense(256, activation=tf.nn.relu, kernel_initializer='random_uniform'), tf.keras.layers.Dropout(0.2), tf.keras.layers.Dense(128, activation=tf.nn.relu, kernel_initializer='random_uniform'), tf.keras.layers.Dropout(0.2), tf.keras.layers.Dense(1, activation=tf.nn.sigmoid) ]) """"" self.model.add(tf.keras.layers.Flatten()) self.model.add(tf.keras.layers.Dense(units=128, activation=tf.nn.relu, kernel_initializer='random_uniform', kernel_regularizer=regularizers.l1(self.reg_l1), activity_regularizer=regularizers.l2(self.reg_l2), bias_regularizer=regularizers.l1_l2(self.reg_l1, self.reg_l2))) self.model.add(tf.keras.layers.Dropout(self.droput)) self.model.add(tf.keras.layers.Dense(units=128, activation=tf.nn.relu, kernel_initializer='random_uniform', kernel_regularizer=regularizers.l1(self.reg_l1), activity_regularizer=regularizers.l2(self.reg_l2), bias_regularizer=regularizers.l1_l2(self.reg_l1, self.reg_l2))) self.model.add(tf.keras.layers.Dropout(self.droput)) self.model.add(tf.keras.layers.Dense(units=Handler().img_size, activation=tf.nn.sigmoid)) self.model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['binary_accuracy']) self.model.add(tf.keras.layers.Conv2D(64, kernel_size=(3, 3), strides=(1, 1), activation='relu', input_shape=(50, 50, 1))) self.model.add(tf.keras.layers.Conv2D(32, kernel_size=(3, 3), strides=(1, 1), activation='relu', input_shape=(50, 50, 1))) # adicionando camadas à rede neural self.model.add(tf.keras.layers.Flatten()) # camada que transforma imagens em vetores self.model.add(tf.keras.layers.Dense(128, activation=tf.nn.relu)) self.model.add(tf.keras.layers.Dense(128, activation=tf.nn.relu)) self.model.add(tf.keras.layers.Dropout(0.2)) self.model.add(tf.keras.layers.Dense(1, activation=tf.nn.softmax)) """"" self.model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['binary_accuracy']) self.model.fit(X, Y, epochs=20)
def __init__(self, config=None): """ initialize Netuitive api and populate agent host metadata """ if not netuitive: self.log.error('netuitive import failed. Handler disabled') self.enabled = False return try: Handler.__init__(self, config) logging.debug("initialize Netuitive handler") self.version = self._get_version() self.api = netuitive.Client(self.config['url'], self.config[ 'api_key'], self.version) self.element = netuitive.Element( location=self.config.get('location')) self.batch_size = int(self.config['batch']) self.max_backlog_multiplier = int( self.config['max_backlog_multiplier']) self.trim_backlog_multiplier = int( self.config['trim_backlog_multiplier']) self._add_sys_meta() self._add_aws_meta() self._add_docker_meta() self._add_azure_meta() self._add_config_tags() self._add_config_relations() self._add_collectors() self.flush_time = 0 try: self.config['write_metric_fqns'] = str_to_bool(self.config['write_metric_fqns']) except KeyError, e: self.log.warning('write_metric_fqns missing from the config') self.config['write_metric_fqns'] = False if self.config['write_metric_fqns']: self.metric_fqns_path = self.config['metric_fqns_path'] truncate_fqn_file = open(self.metric_fqns_path, "w") truncate_fqn_file.close() logging.debug(self.config)
def __init__(self, config=None): """ Create a new instance of the InfluxdbeHandler """ # Initialize Handler Handler.__init__(self, config) # Initialize Options if self.config['ssl'] == "True": self.ssl = True else: self.ssl = False self.hostname = self.config['hostname'] self.port = int(self.config['port']) self.username = self.config['username'] self.password = self.config['password'] self.database = self.config['database'] self.batch_size = int(self.config['batch_size']) self.metric_max_cache = int(self.config['cache_size']) self.batch_count = 0 self.time_precision = self.config['time_precision'] self.timeout = self.config['timeout'] self.influxdb_version = self.config['influxdb_version'] self.using_0_8 = False if self.influxdb_version in ['0.8', '.8']: if not InfluxDB08Client: self.log.error( 'influxdb.influxdb08.client.InfluxDBClient import failed. ' 'Handler disabled') self.enabled = False return else: self.client = InfluxDB08Client self.using_0_8 = True else: if not InfluxDBClient: self.log.error('influxdb.client.InfluxDBClient import failed. ' 'Handler disabled') self.enabled = False return else: self.client = InfluxDBClient # Initialize Data self.batch = {} self.influx = None self.batch_timestamp = time.time() self.time_multiplier = 1 # Connect self._connect()
def test_img_alt_2(self): text = 'img src="http://l.yimg.com/uu/api/res/1.2/I4AtbbFWPM.66LesQWxLqQ--/YXBwaWQ9eXRhY2h5b247aD04Njt3PTEzM' \ 'Ds-/https://media.zenfs.com/en/the_new_york_times_articles_158/101bec76cc1717d8bfd63460b9443fd1" width=' \ '"130" height="86" alt="She Texted About Dinner While Driving. Then a Pedestrian Was Dead." align="left" ' \ 'title="She Texted About Dinner While Driving. Then a Pedestrian Was Dead." border="0" ></a>FREEHOLD, N.J.' \ ' -- One woman was out for a walk and a taste of fresh air during a break from her job as a scientist at' \ ' a New Jersey fragrance manufacturer. She and her husband had been trying to get pregnant, and brief' \ ' bouts of exercise, away from the laboratory's smells and fumes, were part of that plan.A second ' \ 'woman was behind the wheel of a black Mercedes-Benz, headed to work as chief executive of a nonprofit in ' img_alt = '="She Texted About Dinner While Driving. Then a Pedestrian Was Dead.' hand = Handler("https://news.yahoo.com/rss/", 3) self.assertNotEqual(hand.get_img_alt(text)[0], img_alt)
def createConditionFromArray(self, condition): if isinstance(condition, list) and Handler.list_get( condition, 0, None): operator = Handler.strToUpper(condition.pop(0)) if self._conditionClasses.get(operator, None): className = self._conditionClasses.get(operator) else: className = SimpleCondition return className.fromArrayDefinition(operator, condition) # 字典类型 {"key": "value"} return HashCondition(condition)
def __init__(self, config=None): """ Create a new instance of the LibratoHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options api = librato.connect(self.conf['user'], self.conf['apikey']) self.queue = api.new_queue() self.batch_size = 300 self.current_n_measurements = 0
def __init__(self, p=0.75, p1=0.8, p2=0.5, iteration_count=100000): self.current_tick = 0 self.handled_tasks = [] self.states = [] self.iteration_count = iteration_count self.busy_count = 0 self.source = Source(p, LemerGenerator(209715120, 3, 7)) self.queue = TaskQueue(2) self.handlers = [ Handler(p1, LemerGenerator(209715120, 3, 7)), Handler(p2, LemerGenerator(209715120, 3, 7)) ]
def __init__(self, config=None): """ Create a new instance of the StatsdHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.batch_size = int(self.config.get('batch', 1)) self.old_values = {} self.queues = {}
def __init__(self, config=None): """ Create a new instance of the StatsdHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) # Connect self._connect()
def tick(self): Handler.tick(self) if self.nextHandler != self: nextHandler, self.nextHandler = self.nextHandler, self return nextHandler self.animationTimer.tick(self.delta()) self.attractTimer.tick(self.delta()) self.refreshTimer.tick(self.delta()) self.keyTimer.tick(self.delta()) if self.ffMode: self.ffTimer.tick(self.delta()) self.ffTimer.clock = min(self.ffTimer.clock, self.ffLength) u = self.ffTimer.clock / self.ffLength Handler.contest.revealUntil = int(0.0 + \ (1 - u) * Handler.contest.freezeTime + \ u * Handler.contest.contestTime) self.attractSpeed = 300 else: self.attractSpeed = 1000 curScroll = (abs(self.pendingScroll) + 4) / 5 if self.pendingScroll < 0: curScroll *= -1 self.offsetY += curScroll self.pendingScroll -= curScroll if self.offsetY < 0: self.offsetY = 0 self.pendingScroll = 0 # no need to scroll further elif self.offsetY > self.maxOffsetY: self.offsetY = self.maxOffsetY self.pendingScroll = 0 if (not self.awardsMode ) and self.refreshTimer.clock > self.refreshFreq: self.refreshTimer.reset() try: Handler.contest.refresh_runs() self.setup_animation() Handler.contest.load_clock() except IOError, e: pass if self.lockTo: self.offsetY += self.teamHeight * 12 self.attractTimer.clock = \ (self.offsetY * self.attractSpeed) / self.teamHeight self.lockTo = None for run in Handler.contest.newRunList[-1]: if run.answer == 'Y': self.lockTo = run.team
def __init__(self, config=None): # Initialize Handler Handler.__init__(self, config) # Initialize options self.host = self.config['host'] self.port = int(self.config['port']) self.transport = self.config.get('transport', 'tcp') # Initialize client if self.transport == 'tcp': transportCls = bernhard.TCPTransport else: transportCls = bernhard.UDPTransport self.client = bernhard.Client(self.host, self.port, transportCls)
def __init__(self, config=None): """ Create a new instance of the MultiGraphitePickleHandler class """ # Initialize Handler Handler.__init__(self, config) self.handlers = [] # Initialize Options hosts = self.config['host'] for host in hosts: config = deepcopy(self.config) config['host'] = host self.handlers.append(GraphitePickleHandler(config))
def __init__(self, config=None): """ @type config: configobj.ConfigObj """ Handler.__init__(self, config) # init sentry/raven self.sentry_log_handler = raven.handlers.logging.SentryHandler( self.config['dsn']) self.raven_logger = logging.getLogger(self.__class__.__name__) self.raven_logger.addHandler(self.sentry_log_handler) self.configure_sentry_errors() self.rules = self.compile_rules() self.hostname = get_hostname(self.config) if not len(self.rules): self.log.warning("No rules, this graphite handler is unused")
def __init__(self, p1=0.4, p2=0.5, iteration_count=100000): self.iteration_count = iteration_count self.task_in_system_count = 0 self.current_tick = 0 self.handled_count = 0 self.refused_count = 0 self.states = [] self.p1 = p1 self.source = Source() self.queue = TaskQueue(2) self.handlers = [ Handler(p1, LemerGenerator(209715120, 3, 7)), Handler(p2, LemerGenerator(209715120, 3, 7)) ]
def __init__(self, config=None): """ Create a new instance of the StatsdHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options self.host = self.config["host"] self.port = int(self.config["port"]) self.batch_size = int(self.config["batch"]) self.metrics = [] self.old_values = {} # Connect self._connect()
def __init__(self, config=None): """ New instance of DatadogHandler class """ Handler.__init__(self, config) logging.debug("Initialized Datadog handler.") if dogapi is None: logging.error("Failed to load dogapi module.") return self.api = dogapi.dog_http_api self.api.api_key = self.config.get('api_key', '') self.queue_size = self.config.get('queue_size', 1) self.queue = deque([])
def __init__(self, config=None): """ Create a new instance of the LibratoHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options self.user = self.config['user'] self.apikey = self.config['apikey'] self.url = 'https://metrics-api.librato.com/v1/metrics' self.batch_size = 300 self.batch = { 'counters': [], 'gauges': [], }
def client(ip_data): while True: try: SendManager.Send().login(ip_data) req = ip_data.recv(1024).decode() data = js.loads(req) push_dataLog(data, ip_data) Work.task(data) print([i[0].print_sost() for i in listing.robot], "robot") # print(listing.task_list_robot, "task") # print(listing.task_list_sort, "task") # sys.stdout.write("\r list_robot: {0}, list_sort: {1}, qr_flag: {2}, angle: {3}".format(listing.task_list_robot, listing.task_list_sort, listing.list_qr_flag, listing.list_cal)) except ConnectionResetError: delite_datalog(sock.getsockname()) break
def __init__(self, config=None): """ Create a new instance of cloudwatchHandler class """ # Initialize Handler Handler.__init__(self, config) if not boto: self.log.error( "CloudWatch: Boto is not installed, please install boto.") return # Initialize Data self.connection = None # Initialize Options self.region = self.config['region'] instance_metadata = boto.utils.get_instance_metadata() if 'instance-id' in instance_metadata: self.instance_id = instance_metadata['instance-id'] self.log.debug("Setting InstanceId: " + self.instance_id) else: self.instance_id = None self.log.error('CloudWatch: Failed to load instance metadata') self.valid_config = ('region', 'collector', 'metric', 'namespace', 'name', 'unit', 'collect_by_instance', 'collect_without_dimension') self.rules = [] for key_name, section in self.config.items(): if section.__class__ is Section: keys = section.keys() rules = self.get_default_rule_config() for key in keys: if key not in self.valid_config: self.log.warning("invalid key %s in section %s", key, section.name) else: rules[key] = section[key] self.rules.append(rules) # Create CloudWatch Connection self._bind()
def __init__(self, config): """ Create a new instance of the ArchiveHandler class """ # Initialize Handler Handler.__init__(self, config) # Create Archive Logger self.archive = logging.getLogger('archive') self.archive.setLevel(logging.DEBUG) # Create Archive Log Formatter formatter = logging.Formatter('%(message)s') # Create Archive Log Handler handler = logging.handlers.TimedRotatingFileHandler(self.config['log_file'], 'midnight', 1, backupCount=int(self.config['days'])) handler.setFormatter(formatter) handler.setLevel(logging.DEBUG) self.archive.addHandler(handler)
def __init__(self, config=None): """ Create a new instance of the TSDBHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.timeout = int(self.config['timeout']) # Connect self._connect()
def __init__(self, config=None): """ Create a new instance of the LibratoHandler class """ # Initialize Handler Handler.__init__(self, config) logging.debug("Initialized statsd handler.") # Initialize Options api = librato.connect(self.config['user'], self.config['apikey']) self.queue = api.new_queue() self.queue_max_size = int(self.config.get('queue_max_size',300)) self.queue_max_interval = int(self.config.get('queue_max_interval',60)) self.queue_max_timestamp = int(time.time() + self.queue_max_interval) self.current_n_measurements = 0 self.include_reg = re.compile(r'(?:%s)' % '|'.join( self.config.get('include_filters',['^.*'])))
def __init__( self, config=None ): """ Create a new instance of zmqHandler class """ # Initialize Handler Handler.__init__(self,config) # Initialize Data self.context = None self.socket = None # Initialize Options self.port = int( self.config['port'] ) # Create ZMQ pub socket and bind self._bind()
def __init__(self, config=None): Handler.__init__(self, config) self.metrics = [] self.batch_size = int(self.config['batch']) self.url = self.config['url'] self.auth_token = self.config['auth_token'] self.batch_max_interval = self.config['batch_max_interval'] self.resetBatchTimeout() # If a user leaves off the ending comma, cast to a array for them include_filters = self.config['include_filters'] if isinstance(include_filters, basestring): include_filters = [include_filters] self.include_reg = re.compile(r'(?:%s)' % '|'.join(include_filters)) if self.auth_token == "": logging.error("Failed to load Signalfx module") return