def get_subscriber(): client = pubsub.SubscriberClient() try: client.create_subscription(SUBSCRIPTION, TOPIC) except Exception: # already created pass return client
def subrun(send): subscriber = pubsub.SubscriberClient() print 'inside subscriber' def callback(message): print message.data message.ack() print subscriber.subscribe(subscription_name, callback)
def create_clients() -> None: global __subs_client global __pubs_client if __subs_client is None: __subs_client = pubsub.SubscriberClient() if __pubs_client is None: __pubs_client = pubsub.PublisherClient()
def waitOnFunctionsStart(splitFiles, recv_topic, project_id, bucket, topicId, work_dir, alignsDir, uploadDir, startTimeout, finishTimeout, checkInterval=30): reinvokeFutures = {} waitStartTime = timer() #for reinvoke publisher = pubsub.PublisherClient() topic_path = createTopic(publisher, project_id, topicId) fullUploadDir = os.path.join(work_dir, uploadDir) #create client to read/write to pubsub queue client = pubsub.SubscriberClient() subscription_path = client.subscription_path(project_id, recv_topic) recv_path = client.topic_path(project_id, recv_topic) create_subscription(client, subscription_path, recv_path) storageClient = storage.Client() waitIntervalTime = timer() unstartedSplitFiles = splitFiles #unstartedSplitFiles=listFunctionsNotStarted(splitFiles,bucket,work_dir,uploadDir) #wait on start while (not checkAllFunctionsStarted(unstartedSplitFiles, storageClient, bucket, work_dir, uploadDir) and timer() - waitStartTime < startTimeout): streaming_pull_future = client.subscribe( subscription_path, callback=lambda message: recv_callback( message, publisher, topic_path, bucket, work_dir, recv_topic, fullUploadDir, project_id, reinvokeFutures)) try: if streaming_pull_future: streaming_pull_future.result(timeout=10) except: # noqa streaming_pull_future.cancel() if timer() - waitIntervalTime > checkInterval: sys.stderr.write( "Checking start functions at time (queue empty) {}\n". format(timer() - waitStartTime)) waitIntervalTime = timer() unstartedSplitFiles = listFunctionsNotStarted( unstartedSplitFiles, bucket, work_dir, uploadDir) unstartedSplitFiles = listFunctionsNotStarted(unstartedSplitFiles, bucket, work_dir, uploadDir) if unstartedSplitFiles: for unstartedSplitFile in unstartedSplitFiles: sys.stderr.write('{} not started\n'.format(unstartedSplitFile)) else: sys.stderr.write( 'Time after last message for functions to start is {}\n'. format(timer() - waitStartTime)) return unstartedSplitFiles
def __init__(self, jobs_denylist=None, jobs_allowlist=None): """Initialization for PSQ Worker. Args: jobs_denylist (Optional[list[str]]): Jobs we will exclude from running jobs_allowlist (Optional[list[str]]): The only Jobs we will include to run """ setup() psq_publisher = pubsub.PublisherClient() psq_subscriber = pubsub.SubscriberClient() datastore_client = datastore.Client(project=config.TURBINIA_PROJECT) try: self.psq = psq.Queue( psq_publisher, psq_subscriber, config.TURBINIA_PROJECT, name=config.PSQ_TOPIC, storage=psq.DatastoreStorage(datastore_client)) except exceptions.GoogleCloudError as e: msg = 'Error creating PSQ Queue: {0:s}'.format(str(e)) log.error(msg) raise TurbiniaException(msg) # Deregister jobs from denylist/allowlist. job_manager.JobsManager.DeregisterJobs(jobs_denylist, jobs_allowlist) disabled_jobs = list( config.DISABLED_JOBS) if config.DISABLED_JOBS else [] disabled_jobs = [j.lower() for j in disabled_jobs] # Only actually disable jobs that have not been allowlisted. if jobs_allowlist: disabled_jobs = list(set(disabled_jobs) - set(jobs_allowlist)) if disabled_jobs: log.info( 'Disabling non-allowlisted jobs configured to be disabled in the ' 'config file: {0:s}'.format(', '.join(disabled_jobs))) job_manager.JobsManager.DeregisterJobs(jobs_denylist=disabled_jobs) # Check for valid dependencies/directories. dependencies = config.ParseDependencies() if config.DOCKER_ENABLED: try: check_docker_dependencies(dependencies) except TurbiniaException as e: log.warning( "DOCKER_ENABLED=True is set in the config, but there is an error checking for the docker daemon: {0:s}" ).format(str(e)) check_system_dependencies(dependencies) check_directory(config.MOUNT_DIR_PREFIX) check_directory(config.OUTPUT_DIR) check_directory(config.TMP_DIR) register_job_timeouts(dependencies) jobs = job_manager.JobsManager.GetJobNames() log.info('Dependency check complete. The following jobs are enabled ' 'for this worker: {0:s}'.format(','.join(jobs))) log.info('Starting PSQ listener on queue {0:s}'.format(self.psq.name)) self.worker = psq.Worker(queue=self.psq)
def delete_subscription(project, subscription_name): """Deletes an existing Pub/Sub topic.""" subscriber = pubsub.SubscriberClient() subscription_path = subscriber.subscription_path(project, subscription_name) subscriber.delete_subscription(subscription_path) print('Subscription deleted: {}'.format(subscription_path))
def main(): client = monitoring.MetricServiceClient() # Opens a connection to the message queue asynchronously subscriber = pubsub.SubscriberClient() subscription_path = 'projects/{}/subscriptions/{}'.format( PROJECT, SUBSCRIPTION) future = subscriber.subscribe(subscription_path, callback=handle_message) time.sleep(5)
def __init__(self, msg_queue): Thread.__init__(self) self.shutdown_flag = Event() self.msg_queue = msg_queue # Create a new pull subscription on the given topic subscriber = pubsub.SubscriberClient(credentials=creds) topic_name = 'projects/fiery-celerity-194216/topics/YogiMessages' sub_name = 'projects/fiery-celerity-194216/subscriptions/PythonYogiSub' self.subscription = subscriber.subscribe(sub_name)
def subscription_path(topic_path): subscriber = pubsub.SubscriberClient() subscription_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) try: subscriber.delete_subscription(subscription_path) except Exception: pass subscription = subscriber.create_subscription(subscription_path, topic_path) yield subscription.name subscriber.delete_subscription(subscription_path)
def open_subscription(sub_name, datatype): subscriber = pubsub.SubscriberClient() sub_name = 'projects/kartees-188316/subscriptions/%s' %sub_name subscription = subscriber.subscribe( sub_name, ) bucket = 'kartees-raw-data' # Define the callback. # Note that the callback is defined *before* the subscription is opened. def callback(message): source = "Stubhub" # datatype = "inventory" time_obj = datetime.utcnow() year = time_obj.year file = time_obj.strftime("%m_%d_%H-%M-%S") print ("Received message!") message.ack() try: team = str(eval(message.data)['team']) except: team = 'Unknown-Team' gcp_filename = "%s/%s/%s/%s/%s.txt" %(source, datatype, team, str(year), file) local_filename = str(int(time.time()))+ '.txt' write = False with open(local_filename,'w+') as f: try: f.write(str(eval(message.data))) write=True except: print("Not valid JSON, error writing to: " + local_filename) if write: upload_object(bucket, local_filename, gcp_filename, [], []) try: os.remove(local_filename) except: print("Nothing to delete") # Open the subscription, passing the callback. future = subscription.open(callback) future.result()
def subscribe_async(latencies_local, latencies_server, *, count: int, duration: int, rate: int): subscriber = pubsub.SubscriberClient() subscription = f'projects/{PROJECT}/subscriptions/{TOPIC}-0' cb = functools.partial(callback, latencies_local, latencies_server) future = subscriber.subscribe(subscription, cb) try: future.result(timeout=duration + 5) except concurrent.futures.TimeoutError: pass
def main(): client = monitoring.Client(project=PROJECT) subscriber = pubsub.SubscriberClient() subscription = subscriber.subscribe('projects/{}/subscriptions/{}'.format( PROJECT, SUBSCRIPTION)) subscription.open(handle_message) time.sleep(60) while not queue_empty(client): pass subscription.close()
def create_subscription(project, topic_name, subscription_name): """Create a new pull subscription on the given topic.""" subscriber = pubsub.SubscriberClient() topic_path = subscriber.topic_path(project, topic_name) subscription_path = subscriber.subscription_path( project, subscription_name) subscription = subscriber.create_subscription( subscription_path, topic_path) print('Subscription created: {}'.format(subscription))
def test(self): project_id = self.connection.project subscriptions = [] subscriber = pubsub.SubscriberClient( credentials=self.connection.credentials) project_path = subscriber.project_path(project_id) for x in subscriber.list_subscriptions(project_path): subscriptions.append(x.name) return {"project_path": project_path}
def pubsub_client(): """Google Cloud PubSub client.""" try: from google.cloud import pubsub return pubsub.SubscriberClient() except Exception: LOGGER.exception("unable to initialise PubSub client") return None
def pubsub_listen_for_change(topic_name, topic): subscriber = pubsub.SubscriberClient() topic_name = topic_name sub_name = 'projects/dzproject20180301/subscriptions/ztestsub' subscriber.create_subscription(name=subscription_name, topic=topic_name) subscription = subscriber.subscribe(subscription_name) def callback(message): print(message.data) message.ack() subscription.open(callback)
def get_message(project_id, subscription_name): subscriber = pubsub.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_name) def callback_subscribe(message): print('Received message: {}'.format(message)) message.ack() subscriber.subscribe(subscription_path, callback=callback_subscribe) print('Listening for messages on {}'.format(subscription_path)) while True: time.sleep(60)
def run(self, project_id, pubsub_subscription): """The main loop. Consumes messages from the Pub/Sub subscription. """ subscriber = pubsub.SubscriberClient() subscription_path = subscriber.subscription_path( project_id, pubsub_subscription) def callback(message): """Logic executed when a message is received from subscribed topic. """ try: stream = jsonpickle.decode(message.data) QUEUE.put( stream ) # add message to the Queue to be processed later by the asynchronous worker print("Message acknowledge") except ValueError as e: print('Loading Payload ({}) threw an Exception: {}.'.format( message.data, e)) message.ack() return # Get the registry id and device id from the attributes. These are # automatically supplied by IoT, and allow the server to determine # which device sent the event. device_project_id = message.attributes['projectId'] device_registry_id = message.attributes['deviceRegistryId'] device_id = message.attributes['deviceId'] device_region = message.attributes['deviceRegistryLocation'] # Send the config to the device. # self._update_device_config( # device_project_id, # device_region, # device_registry_id, # device_id, # data) # Acknowledge the consumed message. This will ensure that they # are not redelivered to this subscription. message.ack() print('Listening for messages on {}'.format(subscription_path)) subscriber.subscribe(subscription_path, callback=callback) # The subscriber is non-blocking, so keep the main thread from # exiting to allow it to process messages in the background. while True: time.sleep(60)
def add_subscription(topic_project, topic_name, subscription_project, subscription_name, push_endpoint): push_config = None if push_endpoint: push_config = PushConfig() push_config.push_endpoint = push_endpoint subscriber = pubsub.SubscriberClient() topic = 'projects/{}/topics/{}'.format(topic_project, topic_name) subscription = 'projects/{}/subscriptions/{}'.format( subscription_project, subscription_name) subscription = subscriber.create_subscription(subscription, topic, push_config) print('Subscription created: {}'.format(subscription))
def receive_messages(project, subscription_name): """Receives messages from a pull subscription.""" subscriber = ps.SubscriberClient() subscription_path = "projects/fcr-it/subscriptions/oliverpull" def callback(message): print('Received message: {}'.format(message)) message.ack() subscriber.subscribe(subscription_path, callback=callback) print('Listening for messages on {}'.format(subscription_path)) while True: time.sleep(2)
def pubSubWorker(): credentials = GoogleCredentials.get_application_default() subscriber = pubsub.SubscriberClient() subscription = subscriber.subscribe( 'projects/' + app.config['PROJECT_ID'] + '/subscriptions/' + app.config['SUBSCRIPTION_NAME'], ) future = subscription.open(callback) try: future.result() except Exception as ex: subscription.close() raise return "{status:'finished'}"
def run_ack_logs(worker_name: str, sfm_queue: Queue): logging_context = LoggingContext(worker_name) subscriber_client = pubsub.SubscriberClient() subscription_path = subscriber_client.subscription_path( LOGS_SUBSCRIPTION_PROJECT, LOGS_SUBSCRIPTION_ID) logging_context.log(f"Starting processing") worker_state = WorkerState(worker_name) while True: try: perform_pull(worker_state, sfm_queue, subscriber_client, subscription_path) except Exception as e: logging_context.exception("Failed to pull messages")
def setup(): publisher = pubsub.PublisherClient() topic = f'projects/{PROJECT}/topics/{TOPIC}' try: publisher.create_topic(topic) except Exception: pass subscriber = pubsub.SubscriberClient() subscription = f'projects/{PROJECT}/subscriptions/{TOPIC}-0' try: subscriber.create_subscription(name=subscription, topic=topic) except Exception: pass
def subscribe_sync(latencies_local, latencies_server, *, count: int, duration: int, rate: int): subscriber = pubsub.SubscriberClient() subscription = f'projects/{PROJECT}/subscriptions/{TOPIC}-0' for _ in range(duration * rate // count): # theoretically, timeout should only apply on the first message, eg. # since we are still spinning up `fn_count` processes... response = subscriber.pull(subscription, max_messages=1, timeout=100) for msg in response.received_messages: callback(latencies_local, latencies_server, msg.message) ack_ids = [msg.ack_id for msg in response.received_messages] subscriber.acknowledge(subscription, ack_ids)
def __init__(self, jobs_blacklist=None, jobs_whitelist=None): """Initialization for PSQ Worker. Args: jobs_blacklist (Optional[list[str]]): Jobs we will exclude from running jobs_whitelist (Optional[list[str]]): The only Jobs we will include to run """ config.LoadConfig() psq_publisher = pubsub.PublisherClient() psq_subscriber = pubsub.SubscriberClient() datastore_client = datastore.Client(project=config.TURBINIA_PROJECT) try: self.psq = psq.Queue( psq_publisher, psq_subscriber, config.TURBINIA_PROJECT, name=config.PSQ_TOPIC, storage=psq.DatastoreStorage(datastore_client)) except exceptions.GoogleCloudError as e: msg = 'Error creating PSQ Queue: {0:s}'.format(str(e)) log.error(msg) raise TurbiniaException(msg) # Deregister jobs from blacklist/whitelist. disabled_jobs = list( config.DISABLED_JOBS) if config.DISABLED_JOBS else [] job_manager.JobsManager.DeregisterJobs(jobs_blacklist, jobs_whitelist) if disabled_jobs: log.info( 'Disabling jobs that were configured to be disabled in the ' 'config file: {0:s}'.format(', '.join(disabled_jobs))) job_manager.JobsManager.DeregisterJobs( jobs_blacklist=disabled_jobs) # Check for valid dependencies/directories. dependencies = config.ParseDependencies() if config.DOCKER_ENABLED: check_docker_dependencies(dependencies) check_system_dependencies(dependencies) check_directory(config.MOUNT_DIR_PREFIX) check_directory(config.OUTPUT_DIR) check_directory(config.TMP_DIR) jobs = job_manager.JobsManager.GetJobNames() log.info( 'Dependency check complete. The following jobs will be enabled ' 'for this worker: {0:s}'.format(','.join(jobs))) log.info('Starting PSQ listener on queue {0:s}'.format(self.psq.name)) self.worker = psq.Worker(queue=self.psq)
def poll_notifications(project, subscription_name): # [BEGIN poll_notifications] subscriber = pubsub.SubscriberClient() subscription_path = subscriber.subscription_path(project, subscription_name) def callback(message): print('Received message:\n{}'.format(summarize(message))) message.ack() subscriber.subscribe(subscription_path, callback=callback) print('Listening for messages on {}'.format(subscription_path)) while True: time.sleep(60)
def __init__(self, project_id, topic_name, subscription_name): """ Initialize a Kernel CI message queue subscriber. Args: project_id: ID of the Google Cloud project to which the message queue belongs. topic_name: Name of the message queue topic to subscribe to. subscription_name: Name of the subscription to use. """ self.client = pubsub.SubscriberClient() self.subscription_path = \ self.client.subscription_path(project_id, subscription_name) self.topic_path = self.client.topic_path(project_id, topic_name)
def main(): subscriber = pubsub.SubscriberClient() flow_control = pubsub.types.FlowControl(max_messages=10) # Subscription is a Future. print('Listening to subscription for messages') subscription = subscriber.subscribe(SUBSCRIPTION, callback=handle_message, flow_control=flow_control) try: subscription.result() except Exception as ex: print('Error occurred with subscription') logging.exception(ex) subscription.close() raise
def gcp_pubsub_subscribe(project_id, topic_name, subscription_name): subscriber = pubsub.SubscriberClient() topic = 'projects/' + str(project_id) + '/topics/' + str(topic_name) subscriber_name = 'projects/' + str(project_id) + '/subscriptions/' + str( subscription_name) #subscriber.create_subscription(subscriber_name, topic) subscription = subscriber.subscribe(subscriber_name, ) def callback(message): print(message.data) message.ack() future = subscription.open(callback) future.result()
def main(): # I should publish wtkn0_s to topic10: publisher = pubsub.PublisherClient() topic_s = 'projects/'+PROJECT+'/topics/'+TOPIC for t_i in range(99): wtkn_s = str(t_i) publisher.publish(topic_s, wtkn_s) # I should open a connection to the message queue asynchronously flow_control = pubsub.types.FlowControl(max_messages=1) subscriber = pubsub.SubscriberClient() sub_s = 'projects/'+PROJECT+'/subscriptions/'+SUBSCRIPTION subscription = subscriber.subscribe(sub_s, flow_control=flow_control) subscription.open(print_message) time.sleep(2) subscription.close()