def start(self): app = create_app() with app.app_context(): current_app.logger.info("bigquery-writer init") self._verify_hosts_in_config() # if we're not supposed to run, just sleep if not current_app.config['WRITE_TO_BIGQUERY']: sleep(66666) return try: self.bigquery = create_bigquery_object() except NoCredentialsFileException as e: current_app.logger.critical("BigQuery credential file not present! Sleeping...") sleep(100000) except NoCredentialsVariableException as e: current_app.logger.critical("BigQuery credentials environment variable not set!") sleep(100000) while True: try: self.redis = Redis( host=current_app.config['REDIS_HOST'], port=current_app.config['REDIS_PORT'], ) self.redis.ping() break except Exception as err: current_app.logger.warn("Cannot connect to redis: %s. Retrying in 3 seconds and trying again." % str(err), exc_info=True) sleep(self.ERROR_RETRY_DELAY) while True: self.connect_to_rabbitmq() self.channel = self.connection.channel() self.channel.exchange_declare(exchange=current_app.config['UNIQUE_EXCHANGE'], exchange_type='fanout') self.channel.queue_declare(current_app.config['UNIQUE_QUEUE'], durable=True) self.channel.queue_bind(exchange=current_app.config['UNIQUE_EXCHANGE'], queue=current_app.config['UNIQUE_QUEUE']) self.channel.basic_consume( lambda ch, method, properties, body: self.static_callback(ch, method, properties, body, obj=self), queue=current_app.config['UNIQUE_QUEUE'], ) self.channel.basic_qos(prefetch_count=PREFETCH_COUNT) current_app.logger.info("bigquery-writer started") try: self.channel.start_consuming() except pika.exceptions.ConnectionClosed: current_app.logger.warn("Connection to rabbitmq closed. Re-opening.") self.connection = None self.channel = None continue self.connection.close()
def start(self): self.log.info("bigquery-writer init") self._verify_hosts_in_config() # if we're not supposed to run, just sleep if not self.config.WRITE_TO_BIGQUERY: sleep(66666) return try: self.bigquery = create_bigquery_object() except (NoCredentialsFileException, NoCredentialsVariableException): self.log.error( "Credential File not present or invalid! Sleeping...") sleep(1000) while True: try: self.redis = Redis(host=self.config.REDIS_HOST, port=self.config.REDIS_PORT) self.redis.ping() break except Exception as err: self.log.error( "Cannot connect to redis: %s. Retrying in 3 seconds and trying again." % str(err)) sleep(self.ERROR_RETRY_DELAY) while True: self.connect_to_rabbitmq() self.channel = self.connection.channel() self.channel.exchange_declare(exchange=self.config.UNIQUE_EXCHANGE, exchange_type='fanout') self.channel.queue_declare(self.config.UNIQUE_QUEUE, durable=True) self.channel.queue_bind(exchange=self.config.UNIQUE_EXCHANGE, queue=self.config.UNIQUE_QUEUE) self.channel.basic_consume( lambda ch, method, properties, body: self.static_callback( ch, method, properties, body, obj=self), queue=self.config.UNIQUE_QUEUE, ) self.channel.basic_qos(prefetch_count=PREFETCH_COUNT) self.log.info("bigquery-writer started") try: self.channel.start_consuming() except pika.exceptions.ConnectionClosed: self.log.info("Connection to rabbitmq closed. Re-opening.") self.connection = None self.channel = None continue self.connection.close()
def start(self): """ Starts the job runner. This should run perpetually, monitor the bigquery jobs rabbitmq queue and perform tasks for entries in the queue. """ with self.app.app_context(): # if no bigquery support, sleep if not current_app.config['WRITE_TO_BIGQUERY']: while True: time.sleep(10000) current_app.logger.info('Connecting to Google BigQuery...') self.bigquery = bigquery.create_bigquery_object() current_app.logger.info('Connected!') current_app.logger.info('Connecting to database...') db.init_db_connection(current_app.config['SQLALCHEMY_DATABASE_URI']) current_app.logger.info('Connected!') current_app.logger.info('Connecting to redis...') self.redis = utils.connect_to_redis(host=current_app.config['REDIS_HOST'], port=current_app.config['REDIS_PORT'], log=current_app.logger.error) current_app.logger.info('Connected!') while True: self.init_rabbitmq_connection() self.incoming_ch = utils.create_channel_to_consume( connection=self.connection, exchange=current_app.config['BIGQUERY_EXCHANGE'], queue=current_app.config['BIGQUERY_QUEUE'], callback_function=self.callback, ) current_app.logger.info('Stats calculator started!') try: self.incoming_ch.start_consuming() except pika.exceptions.ConnectionClosed: current_app.logger.warning("Connection to rabbitmq closed. Re-opening.") self.connection = None continue self.connection.close()
def init_bigquery_connection(): """ Initiates the connection to Google BigQuery """ global bigquery bigquery = create_bigquery_object()