def __init__(self,
                 config_path: str = configPath,
                 project_id: str = None,
                 client_id: str = None,
                 client_secret: str = None,
                 topic_id: str = None,
                 gcp_project_id: str = None,
                 sub_id: str = None):
        """
        Load or get an authorization config. If a new authorization is being
        made, it needs the project_id, client_id, and client_secret.
        """
        configKeys = [
            'clientID', 'clientSecret', 'projectID', 'topicID', 'subID',
            'gcpProjectID', 'accessToken', 'expiresAt', 'refreshToken'
        ]
        self.configPath = config_path
        self.streamInfo = None

        if os.path.isfile(self.configPath):
            with open(self.configPath) as configFile:
                config = json.load(configFile)

                # Check if all config keys are here
                for key in config.keys():
                    if key not in configKeys:
                        raise Exception('Config file missing key.')

            print('Config loaded.')
            self.config = config
            self.baseURL = f'https://smartdevicemanagement.googleapis.com/' \
                           f'v1/enterprises/{self.config["projectID"]}/'

            # Create authorization headers
            self.authHeaders = {
                'Content-Type': 'application/json',
                'Authorization': f'Bearer {self.config["accessToken"]}'
            }

            self.devices = self._getDevices()['devices']
            self.structures = self._getStructures()['structures']

        else:
            if project_id is None or client_id is None or client_secret is None:
                raise Exception('Missing authorization args')

            # Create OAuth2 link
            authLink = f'https://nestservices.google.com/partnerconnections/' \
                       f'{project_id}/auth?' \
                       f'redirect_uri=https://www.google.com&' \
                       f'access_type=offline&prompt=consent&' \
                       f'client_id={client_id}' \
                       f'&response_type=code&' \
                       f'scope=https://www.googleapis.com/auth/sdm.service'

            print('Follow the link below and come back with the code')
            print(authLink)

            authCode = input('Auth Code: ')
            while len(authCode) == 0:
                authCode = input('Auth Code: ')

            # Do authorization
            params = {
                'client_id': client_id,
                'client_secret': client_secret,
                'code': authCode,
                'grant_type': 'authorization_code',
                'redirect_uri': 'https://www.google.com'
            }
            r = rq.post('https://www.googleapis.com/oauth2/v4/token',
                        data=params)

            if not r.status_code == 200:
                print(r.text)
                raise Exception('Bad response.')

            # Parse response
            response = r.json()
            accessToken = response['access_token']
            refreshToken = response['refresh_token']
            expiresAt = response['expires_in'] + time()

            self.baseURL = f'https://smartdevicemanagement.googleapis.com/' \
                           f'v1/enterprises/{project_id}/'

            # Send device request to finish authorization
            url = f'{self.baseURL}devices'
            self.authHeaders = {
                'Content-Type': 'application/json',
                'Authorization': f'Bearer {accessToken}'
            }
            r = rq.get(url=url, headers=self.authHeaders)

            if not r.status_code == 200:
                print(r.text)
                raise Exception('Bad response.')

            self.devices = r.json()['devices']

            config = {
                'clientID': client_id,
                'clientSecret': client_secret,
                'projectID': project_id,
                'topicID': topic_id,
                'subID': sub_id,
                'gcpProjectID': gcp_project_id,
                'accessToken': accessToken,
                'expiresAt': expiresAt,
                'refreshToken': refreshToken
            }

            # Save json
            with open(self.configPath, 'w') as out:
                json.dump(config, out)

            print('Authorization complete.')
            self.config = config

            self.structures = self._getStructures()['structures']

        # Check if a subscription is available for pubsub
        subber = pubsub_v1.SubscriberClient()
        subPath = f'projects/{self.config["gcpProjectID"]}'
        response = subber.list_subscriptions(subPath)

        self.eventsReady = False
        for sub in response:
            if sub.name.split('/')[-1] == sub_id:
                self.eventsReady = True

        if not self.eventsReady:
            print('No valid subscriptions!')
            print('Create subscription online. Events will not work.')
            print(
                'More Info: https://developers.google.com/nest/device-access/subscribe-to-events'
            )
Example #2
0
def synchronous_pull_with_lease_management(project_id, subscription_id):
    """Pulling messages synchronously with lease management"""
    # [START pubsub_subscriber_sync_pull_with_lease]
    import logging
    import multiprocessing
    import sys
    import time

    from google.api_core import retry
    from google.cloud import pubsub_v1

    multiprocessing.log_to_stderr()
    logger = multiprocessing.get_logger()
    logger.setLevel(logging.INFO)
    processes = dict()

    # TODO(developer)
    # project_id = "your-project-id"
    # subscription_id = "your-subscription-id"

    subscriber = pubsub_v1.SubscriberClient()
    subscription_path = subscriber.subscription_path(project_id,
                                                     subscription_id)

    response = subscriber.pull(
        request={
            "subscription": subscription_path,
            "max_messages": 3
        },
        retry=retry.Retry(deadline=300),
    )

    # Start a process for each message based on its size modulo 10.
    for message in response.received_messages:
        process = multiprocessing.Process(target=time.sleep,
                                          args=(sys.getsizeof(message) % 10, ))
        processes[process] = (message.ack_id, message.message.data)
        process.start()

    while processes:
        # Take a break every second.
        if processes:
            time.sleep(1)

        for process in list(processes):
            ack_id, msg_data = processes[process]
            # If the process is running, reset the ack deadline.
            if process.is_alive():
                subscriber.modify_ack_deadline(
                    request={
                        "subscription": subscription_path,
                        "ack_ids": [ack_id],
                        # Must be between 10 and 600.
                        "ack_deadline_seconds": 15,
                    })
                logger.info(f"Reset ack deadline for {msg_data}.")

            # If the process is complete, acknowledge the message.
            else:
                subscriber.acknowledge(request={
                    "subscription": subscription_path,
                    "ack_ids": [ack_id]
                })
                logger.info(f"Acknowledged {msg_data}.")
                processes.pop(process)
    print(
        f"Received and acknowledged {len(response.received_messages)} messages from {subscription_path}."
    )

    # Close the underlying gPRC channel. Alternatively, wrap subscriber in
    # a 'with' block to automatically call close() when done.
    subscriber.close()
Example #3
0
import os, uuid, time, json, random, datetime
from google.cloud import pubsub_v1
from google.api_core.exceptions import AlreadyExists

CHANNEL_MAP = {22: "Cartoon", 23: "YoYo TV", 52: "News"}

project_id = os.environ.get('GCP_PROJECT_ID')  # Google Project Id
topic_id = "channel"  # Topic Id
topic_path = f"projects/{project_id}/topics/{topic_id}"
container_name = os.environ.get('HOSTNAME')
sub = f"sub-{container_name}-{uuid.uuid4().hex}"
sub_path = f"projects/{project_id}/subscriptions/{sub}"
subscriber = pubsub_v1.SubscriberClient()

# 新建一個 Subscription
try:
    subscriber.create_subscription(name=sub_path, topic=topic_path)
except AlreadyExists:
    print(f"{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
          f' [WARNING] Subscription already exists, sub_path: {sub_path}')


def callback(message):
    data = json.loads(message.data.decode())
    if type := data.get('type'):
        if type == 'show_channel_info':
            print(
                f"{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
                f" [INFO] Message received, the channel is {CHANNEL_MAP.get(data.get('id'))}"
            )
    message.ack()
Example #4
0
def run_main(event, context):
  project_id = google.auth.default()[1]
  logger = alert_handler.AlertHandler(project_id)

  # Retrieve pubsub messages for all the tests that have been kicked off by
  # the test runner.
  subscriber = pubsub_v1.SubscriberClient()
  project = subscriber.project_path(project_id)
  subscription = None
  for s in subscriber.list_subscriptions(project):
    if s.topic.split('/')[-1] == METRICS_WRITTEN_TOPIC:
      subscription = s.name
      break
  if not subscription:
    subscription_id = subscriber.subscription_path(
        project_id, 'metrics-handler-subscription')
    topic = subscriber.topic_path(project_id, METRICS_WRITTEN_TOPIC)
    subscription = subscriber.create_subscription(
        subscription_id, topic, ack_deadline_seconds=300).name
  try:
    all_msgs = subscriber.pull(subscription, 100).received_messages
  except google.api_core.exceptions.DeadlineExceeded:
    logger.info(
        'No messages found for subscription: {}'.format(subscription))
    return

  # Group messages by test. Each test might have made multiple attempts and
  # therefore could have multiple messages.
  test_name_to_msgs = collections.defaultdict(list)
  ids_to_ack = []
  for msg in all_msgs:
    data_str = msg.message.data
    try:
      data = json.loads(data_str)
      data['publish_time'] = msg.message.publish_time.seconds
      data['ack_id'] = msg.ack_id
      test_name_to_msgs[data['test_name']].append(data)
    except Exception as e:
      logger.error(
          'Metrics handler encountered an invalid message in pubsub queue '
          'for topic `{}` which led to Exception: {}. This message will '
          'be acknowledged and ignored. The message was: {}'.format(
              METRICS_WRITTEN_TOPIC, e, msg))
      ids_to_ack.append(msg.ack_id)

  # Grab the latest message for each test. We will process only that message
  # and all other messages for that test will be ack'ed without being processed.
  msgs_to_process = []
  for test_name, msgs in test_name_to_msgs.items():
    sorted_msgs = sorted(msgs, key = lambda x: x['publish_time'])
    ids_to_ack.extend([msg['ack_id'] for msg in msgs[:-1]])
    msgs_to_process.append(msgs[-1])
  logger.info('Finished deduplicating messages from test runs.')

  # Note: it's good to ack early and often since pubsub will resend messages
  # that are not ack'ed within the queue's deadline.
  if ids_to_ack:
    logger.info('Will ack these ids: {}'.format(ids_to_ack))
    subscriber.acknowledge(subscription, ids_to_ack)
    logger.info('Successful ack for ids: {}'.format(ids_to_ack))

  if not msgs_to_process:
    logger.info('No messages to process. Stopping early.')
    return

  # TODO: Add support for multi-zone and/or multi-cluster setups.
  zone = msgs_to_process[0].get('zone')
  cluster = msgs_to_process[0].get('cluster_name')
  status_handler = job_status_handler.JobStatusHandler(
      project_id, zone, cluster, logger)

  # Handle the metrics for each test. Ack if the process was successful or if
  # the message is permanently invalid. Do not ack if the test is still running
  # so that we will retry again later once that test has finished running.
  for msg in msgs_to_process:
    try:
      logger.info('Pubsub message to process: {}'.format(msg))
      should_ack = _process_pubsub_message(msg, status_handler, logger)
    except Exception:
      logger.error(
          'Encountered exception while attempting to process message {}. '
          'The message will be acknowledged to prevent more crashes. '
          'Exception: {}'.format(msg, traceback.format_exc()))
      should_ack = True
    if should_ack:
      logger.info('Finished processing message. Will ack')
      subscriber.acknowledge(subscription, [msg['ack_id']])
      logger.info('Acknowledged ack_id: {}'.format(msg['ack_id']))
    else:
      logger.info('Finished processing message. Will not ack')
  logger.info('Processed a message for each of the following tests: '
              '{}'.format([x['test_name'] for x in msgs_to_process]))
  logger.send_email()
Example #5
0
credentials = service_account.Credentials.from_service_account_file(
    GOOGLE_APPLICATION_CREDENTIALS_PATH)


# Callback function, called when a message arrives
def callback(message):
    # Do somwething with the message
    print(f"Message received:")
    print(f"{message}.")

    # Need to acknowledge receipt, if not will be resent
    message.ack()


# Create a subscriber client to access pub/sub service
subscriber = pubsub_v1.SubscriberClient(credentials=credentials)

# Create subscription object
subscription_path = subscriber.subscription_path(PROJECT_ID, SUBSCRIPTION_ID)

# Subscribe (start receiving message) and give it the callback function name that will process incoming message
streaming_pull_future = subscriber.subscribe(subscription_path,
                                             callback=callback)

try:
    # Blocking event to wait, need to do this so it won't immediately end example program without receiving message
    streaming_pull_future.result(timeout=5)
except TimeoutError as err:
    # Exception when wait has timeout
    streaming_pull_future.cancel()
    print(f"Exception: {err}.")
Example #6
0
import picamera
import base64
import subprocess
import sys
import face_recognition
import numpy
import re
import glob

from PIL import Image

from google.cloud import pubsub_v1
from google.cloud import firestore
from google.cloud import storage

subscriber_pi = pubsub_v1.SubscriberClient()
camera = picamera.PiCamera()
authorized_encodings = []

pir = 8  # Pin 8  : PIR
yellow = 10  # Pin 10 : yellow LED
lock = 12  # Pin 12 : door lock
green = 16  # Pin 16 : green LED
red = 18  # Pin 18 : red LED
blue = 22  # Pin 22 : blue LED
pair_switch = 24  # Pin 24 : user pairing button
lock_switch = 26  # Pin 26 : toggles door lock

GPIO.setmode(GPIO.BOARD)  # set GPIO mode to correct physical numbering
GPIO.setup(pir, GPIO.IN)  # setup GPIO pin PIR as input
GPIO.setup(yellow, GPIO.OUT)  # setup led outputs
def subscriber_client():
    subscriber_client = pubsub_v1.SubscriberClient()
    yield subscriber_client
    # Close the subscriber client properly during teardown.
    subscriber_client.close()
Example #8
0
def poll_notifications(project, subscription_name):
    """Polls a Cloud Pub/Sub subscription for new GCS events for display."""
    # [BEGIN poll_notifications]

    service_account_info = json.load(open("service_account.json"))
    audience = "https://pubsub.googleapis.com/google.pubsub.v1.Subscriber"

    credentials = jwt.Credentials.from_service_account_info(
        service_account_info, audience=audience)
    subscriber = pubsub_v1.SubscriberClient(credentials=credentials)

    subscription_path = subscriber.subscription_path(project,
                                                     subscription_name)

    def download_image(name, bucketName="dynartwork-277815.appspot.com"):
        storage_client = storage.Client.from_service_account_json(
            'service_account.json')
        bucket = storage_client.get_bucket(bucketName)
        # get bucket data as blob
        print(f'Opening {bucketName}/{name}')
        blob = bucket.get_blob(name)
        json_data = blob.download_as_string()
        text_file = open(name, "wb")
        n = text_file.write(json_data)
        text_file.close()

    def callback(message):
        #show_image(message.attributes['objectId'])
        #print("Received message:\n{}".format(summarize(message)))
        data = message.data.decode("utf-8")
        attributes = message.attributes

        event_type = attributes["eventType"]
        bucket_id = attributes["bucketId"]
        object_id = attributes["objectId"]
        print(object_id)
        message.ack()
        var = "Message not important"
        if "data" in object_id:
            originalName = object_id[5:-4]
            imageName = f'{originalName}_showed.jpg'
            download_image(imageName, 'processed_artworks')
            # display the image
            print("Showing image:{}".format(imageName))
            global image
            previousImage = image
            image = subprocess.Popen([
                "feh", "--hide-pointer", "-x", "-q", "-B", "black",
                f"/home/pi/Documents/{imageName}"
            ])
            time.sleep(2)
            previousImage.kill()
            var = "Image correctly showed"
        print("Result of callback:{}".format(var))

    subscriber.subscribe(subscription_path, callback=callback)
    # The subscriber is non-blocking, so we must keep the main thread from
    # exiting to allow it to process messages in the background.
    print("Listening for messages on {}".format(subscription_path))
    global image
    image = subprocess.Popen([
        "feh", "--hide-pointer", "-x", "-F",
        f"/home/pi/Documents/dynartwork.png"
    ])
    while True:
        time.sleep(60)
import google.auth
from google.cloud import pubsub_v1
from xialib_gcp import PubsubSubscriber

project_id = google.auth.default()[1]


def callback(s: PubsubSubscriber, message: dict, source, subscription_id):
    header, data, id = s.unpack_message(message)
    print("{}: {}".format(subscription_id, header))
    s.ack(project_id, subscription_id, id)


loop = asyncio.get_event_loop()
task_backlog = PubsubSubscriber(
    sub_client=pubsub_v1.SubscriberClient()).stream('x-i-a-test',
                                                    'insight-backlog-debug',
                                                    callback=callback)
task_cockpit = PubsubSubscriber(
    sub_client=pubsub_v1.SubscriberClient()).stream('x-i-a-test',
                                                    'insight-cockpit-debug',
                                                    callback=callback)
task_cleaner = PubsubSubscriber(
    sub_client=pubsub_v1.SubscriberClient()).stream('x-i-a-test',
                                                    'insight-cleaner-debug',
                                                    callback=callback)
task_loader = PubsubSubscriber(sub_client=pubsub_v1.SubscriberClient()).stream(
    'x-i-a-test', 'insight-loader-debug', callback=callback)
task_merger = PubsubSubscriber(sub_client=pubsub_v1.SubscriberClient()).stream(
    'x-i-a-test', 'insight-merger-debug', callback=callback)
task_packager = PubsubSubscriber(
Example #10
0
 def __init__(self, project, topic_name, subscription_name):
     logger.debug("Subscriber client created")
     self.subscriber = pubsub_v1.SubscriberClient()
     self.subscription_path = self.subscriber.subscription_path(
         project, subscription_name)
Example #11
0
def subscribe(subscription_name, worker, give_up=False):
    """Receives and spawns threads to handle jobs received in Pub/Sub"""
    global shutdown_requested

    message = None  # The current active message
    lock = threading.Lock()

    client = pubsub_v1.SubscriberClient()
    subscription_path = client.subscription_path(GCLOUD_PROJECT_ID,
                                                 subscription_name)

    def renew_deadline():
        """Repeatedly give the active message more time to be processed to prevent it being resent"""
        while not (message == None and shutdown_requested):
            if message != None:
                try:
                    with lock:
                        client.modify_ack_deadline(subscription_path,
                                                   [str(message.ack_id)],
                                                   SUB_ACK_DEADLINE)
                        logging.debug(
                            'Reset ack deadline for {} for {}s'.format(
                                message.message.data.decode(),
                                SUB_ACK_DEADLINE))
                    time.sleep(SUB_SLEEP_TIME)
                except Exception as e:
                    logging.warning('Could not reset ack deadline', exc_info=e)

    watcher = threading.Thread(target=renew_deadline)
    watcher.start()

    # Repeatedly check for new jobs until SIGINT/SIGTERM received
    logging.info('Listening for jobs')
    try:
        while not shutdown_requested:
            response = client.pull(subscription_path,
                                   max_messages=1,
                                   return_immediately=True)

            if not response.received_messages:
                logging.info('Job queue is empty')
                time.sleep(SUB_SLEEP_TIME)
                continue

            if len(response.received_messages) > 1:
                logging.warning(
                    'Received more than one job when only one expected')

            with lock:
                message = response.received_messages[0]

            logging.info('Beginning: {}'.format(message.message.data.decode()))
            process = multiprocessing.Process(
                target=worker, args=(message.message.data.decode(), ))
            process.start()
            process.join()

            if process.exitcode == 0:
                # Success; acknowledge and return
                try:
                    client.acknowledge(subscription_path, [message.ack_id])
                    logging.info('Ending and acknowledged: {}'.format(
                        message.message.data.decode()))
                except Exception as e:
                    logging.error('Could not end and acknowledge: {}'.format(
                        message.message.data.decode()),
                                  exc_info=e)
            elif give_up and (int(time.time()) -
                              message.message.publish_time.seconds) > 600:
                # Failure; give up and acknowledge
                try:
                    client.acknowledge(subscription_path, [message.ack_id])
                    logging.error('Failed but acknowledged: {}'.format(
                        message.message.data.decode()))
                except Exception as e:
                    logging.error(
                        'Failed but could not acknowledge: {}'.format(
                            message.message.data.decode()),
                        exc_info=e)
            else:
                # Failure; refuse to acknowledge
                logging.error('Failed, not acknowledged: {}'.format(
                    message.message.data.decode()))

            # Stop extending this message's deadline in the "watcher" thread
            with lock:
                message = None
    except Exception as e:
        logging.critical('Exception encountered. ', exc_info=e)
    finally:
        # If there is an exception, make sure the "watcher" thread shuts down
        shutdown_requested = True
Example #12
0
 def subscriber_client(self):
     self.log_debug("initialize subscriber client")
     return pubsub_v1.SubscriberClient(credentials=self.credentials)
Example #13
0
def pubsub_interface(subscription_path,
                     input_topic,
                     initial_data=None,
                     delay_seconds=1.0,
                     callback=None):
    import functools
    from google.cloud import pubsub_v1
    from google.cloud.pubsub_v1.subscriber.message import Message
    from rillbeam import tapp

    if callback is None:

        def callback(pane, message):
            # type: (tapp.Pane, Message) -> None
            message.ack()
            with pane.batch:
                pane.write(
                    '{} : {}'.format(message.publish_time,
                                     message.data.decode()), 'cyan')

    subscriber = pubsub_v1.SubscriberClient()
    publisher = pubsub_v1.PublisherClient()

    with tapp.App() as app:
        app.write('Beginning interactive pubsub session.',
                  'yellow',
                  attrs=['bold'])
        app.write()
        app.write('Subscriber {!r}...'.format(subscription_path), 'yellow')
        app.write('Publisher {!r}...'.format(input_topic), 'yellow')
        app.write()
        app.write(
            'Send messages to pubsub. Output messages will print '
            'when they are received.', 'green')
        app.write('Type \'exit\' to stop.', 'green', attrs=['bold'])

        prompt = app.pane(3, app.width, app.line, 0)
        y = app.line + prompt.height

        height = app.height - 1 - y
        ypos = y
        col1 = int(app.width * 0.4)
        col2 = int(app.width * 0.6)

        app.write('stdout', y=ypos, attrs=['bold'])
        app.write('subscriber', x=col1, y=y, attrs=['bold'])
        logpane = app.pane(height, col1, ypos + 1, 0)
        logging.getLogger().addHandler(CursesHandler(logpane))
        streampane = app.pane(height, col2, ypos + 1, col1)

        sub_future = subscriber.subscribe(subscription_path,
                                          callback=functools.partial(
                                              callback, streampane))

        if initial_data:
            _logger.info('Sending {} initial packages...'.format(
                len(initial_data)))
            with streampane.batch:
                for msg in initial_data:
                    _logger.info('Sending {!r}...'.format(msg))
                    time.sleep(delay_seconds)
                    publisher.publish(input_topic, data=msg)
        try:
            while True:
                prompt.win.clear()
                prompt.win.border()
                try:
                    msg = prompt.prompt()
                except KeyboardInterrupt:
                    continue
                if not msg:
                    continue
                elif msg.lower() == 'exit':
                    break
                else:
                    _logger.info('Sending {!r}...'.format(msg))
                    publisher.publish(input_topic, data=msg)
        finally:
            sub_future.cancel()
os.environ.setdefault('GOOGLE_CLOUD_PROJECT', 'project-id')
os.environ.setdefault('MY_TOPIC_NAME', 'test_memory_leak-sub')

topic_name = 'projects/{project_id}/topics/{topic}'.format(
    project_id=os.getenv('GOOGLE_CLOUD_PROJECT'),
    topic=os.getenv('MY_TOPIC_NAME'),  # Set this to something appropriate.
)

subscription_name = 'projects/{project_id}/subscriptions/{sub}'.format(
    project_id=os.getenv('GOOGLE_CLOUD_PROJECT'),
    sub=os.getenv('MY_TOPIC_NAME'),  # Set this to something appropriate.
)


@profile
def callback(message):
    # Memory intensive operation
    # x = [n for n in range(int(1e5))]
    message.ack()
    print("ack")


with pubsub_v1.SubscriberClient() as subscriber:
    future = subscriber.subscribe(subscription_name, callback)
    print("Starting subscriber")
    try:
        print("Listening...")
        future.result(timeout=30)
    except KeyboardInterrupt:
        future.cancel()
Example #15
0
def test_check_messages():
    sub = PubsubSubscriber(sub_client=pubsub_v1.SubscriberClient())
    for message in sub.pull('x-i-a-test', 'xialib-sub-01'):
        header, data, id = sub.unpack_message(message)
        assert len(json.loads(gzip.decompress(data).decode())) == 2
        sub.ack('x-i-a-test', 'xialib-sub-01', id)
Example #16
0
found = False  # Check if topic exists in project
for topic in publisher.list_topics(
        project_path):  # topic is a fully qualified topic path
    if topic.name == topic_name:
        found = True
if not found:  # If not found, create it
    publisher.create_topic(topic_name)

# Publish message. Please note since subscriber does not exist yet, this message is not going to be delivered
#future = publisher.publish (topic_name, b'My first message!', spam='this is spam') # Publish a message
#if future._completed: # Check if successful
#    print ("Message sent successfully!")

# Create subscriber
subscriber = pubsub_v1.SubscriberClient()  # Creates a subscriber client
subscription_name = 'projects/{project_id}/subscriptions/{sub}'.format(
    project_id="famous-store-237108", sub="mysubscription")
subscription_path = subscriber.subscription_path(
    "famous-store-237108",
    "mysubscription")  # Creates a fully qualified subscriber path

found = False  # Check if subscription exists in project
for subscription in subscriber.list_subscriptions(
        project_path):  # subscription is a fully qualified subscription path
    if subscription.name == subscription_name:
        found = True
if not found:  # If not found, create it
    sub_instance = subscriber.create_subscription(name=subscription_name,
                                                  topic=topic_name)
Example #17
0
def synchronous_pull_with_lease_management(project_id, subscription_name):
    """Pulling messages synchronously with lease management"""
    # [START pubsub_subscriber_sync_pull_with_lease]
    import logging
    import multiprocessing
    import random
    import time

    from google.cloud import pubsub_v1

    # TODO project_id = "Your Google Cloud Project ID"
    # TODO subscription_name = "Your Pub/Sub subscription name"

    subscriber = pubsub_v1.SubscriberClient()
    subscription_path = subscriber.subscription_path(project_id,
                                                     subscription_name)

    NUM_MESSAGES = 2
    ACK_DEADLINE = 30
    SLEEP_TIME = 10

    # The subscriber pulls a specific number of messages.
    response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES)

    multiprocessing.log_to_stderr()
    logger = multiprocessing.get_logger()
    logger.setLevel(logging.INFO)

    def worker(msg):
        """Simulates a long-running process."""
        RUN_TIME = random.randint(1, 60)
        logger.info("{}: Running {} for {}s".format(
            time.strftime("%X", time.gmtime()), msg.message.data, RUN_TIME))
        time.sleep(RUN_TIME)

    # `processes` stores process as key and ack id and message as values.
    processes = dict()
    for message in response.received_messages:
        process = multiprocessing.Process(target=worker, args=(message, ))
        processes[process] = (message.ack_id, message.message.data)
        process.start()

    while processes:
        for process in list(processes):
            ack_id, msg_data = processes[process]
            # If the process is still running, reset the ack deadline as
            # specified by ACK_DEADLINE once every while as specified
            # by SLEEP_TIME.
            if process.is_alive():
                # `ack_deadline_seconds` must be between 10 to 600.
                subscriber.modify_ack_deadline(
                    subscription_path,
                    [ack_id],
                    ack_deadline_seconds=ACK_DEADLINE,
                )
                logger.info("{}: Reset ack deadline for {} for {}s".format(
                    time.strftime("%X", time.gmtime()),
                    msg_data,
                    ACK_DEADLINE,
                ))

            # If the processs is finished, acknowledges using `ack_id`.
            else:
                subscriber.acknowledge(subscription_path, [ack_id])
                logger.info("{}: Acknowledged {}".format(
                    time.strftime("%X", time.gmtime()), msg_data))
                processes.pop(process)

        # If there are still processes running, sleeps the thread.
        if processes:
            time.sleep(SLEEP_TIME)

    print("Received and acknowledged {} messages. Done.".format(
        len(response.received_messages)))

    subscriber.close()
Example #18
0
 def __init__(self, project_id, client_identifier='default'):
     self.publisher = pubsub_v1.PublisherClient()
     self.subscriber = pubsub_v1.SubscriberClient()
     self.client_identifier = client_identifier
     self.project_id = project_id
Example #19
0
cur = connectionsql.cursor(pymysql.cursors.DictCursor) # creating a cursor to read and write from the database


#setting for controlling our recieveing email

SCOPES = 'https://mail.google.com/' 			#scopes are used to set permissons that the app wants, current scope can perform all actions on an gmail id
CLIENT_SECRET_FILE = './storage/secret.json'	#secret file is generated by the api manager and is used to identify the email you want to connect to
APPLICATION_NAME = name							#name of the app you want to connect to the email id with
project_id = 'locationbasedimages'				#name of your project on google cloud platform

#creating workers for pubsub system


pubsubclient = pubsub_v1.PublisherClient()		#creating a publisher worker so create and manage topics
subscriber = pubsub_v1.SubscriberClient()		#creating a subsciber worker so create and manage subscriptions for the topics
project = pubsubclient.project_path(project_id) #setting the project you want the pubsub to work on

gmailHistoryId = '' #creating an empty varibale to hold the last history id, to make sure the process only checks the newer mails from this historyID
checkedEmails = []  #creating a empty list to add the id of all the checked mails , so as it to not check them again by mistake

#Used to get credentials for the recieveing emailid , checks if the authentication process is already performed, if not performs it.
def Getcredentials():
	store = Storage('./storage/myauth.json') 
	credentials = store.get()				
	if not credentials or credentials.invalid:									# fetches and checks if the credentials exist and if the credentials are still valid
		flow = oauth2client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)	# starts the authentication process 
		flow.user_agent = APPLICATION_NAME										# gives the auth process your application name
		credentials = tools.run_flow(flow,store)								# gets the new credentials and saves them to the myauth.json file
		print('fetched new credentials')
	return credentials  														#return the new credentials to the main program
Example #20
0
 def get_subscriber(self):
     return pubsub.SubscriberClient()
Example #21
0
    def read_from_topic(self, topic_name):

        # Get kafka properties from config table
        pubsub_config_pandas_df = self.get_pubsub_config(
            topic_name, self.tenant)

        project_id = pubsub_config_pandas_df.iloc[0]['project']
        subscriber_id = pubsub_config_pandas_df.iloc[0]['subscription_name']

        self.logger.info(f"Project_id ==> {project_id}")
        self.logger.info(f"subscriber_id ==> {subscriber_id}")

        # initialize subscriber client
        subscriber = pubsub_v1.SubscriberClient()

        # get subscriber path from subscriber id
        subscription_path = subscriber.subscription_path(
            project_id, subscriber_id)

        self.logger.info(f"Subscription Path ==> {subscription_path}")

        # subscribe to the subscription which listens from its registered topic
        future = subscriber.subscribe(subscription_path, self.callback)
        self.logger.info(
            f"Subscribed and listening for messages on {subscription_path} ..."
        )

        # with clause handles auto closing of action
        with subscriber:
            try:
                # When `timeout` is not set, result() will block indefinitely,
                # unless an exception is encountered first.
                future.result(timeout=20)
            except TimeoutError:
                future.cancel()  # Trigger the shutdown.
                future.result()  # Block until the shutdown is complete.

        self.logger.info(f"Total messages consumed : {self.message_count}")

        # Create spark dataframe
        if len(self.consumed_message_list) > 0:
            self.logger.info("found records")
            df = self.spark_session.createDataFrame(self.consumed_message_list,
                                                    ["key", "value"])
            df = df.withColumn('run_date', f.lit(self.run_date)) \
                .withColumn('run_time', f.to_timestamp(f.lit(self.ADAPTER_RUN_START))) \
                .withColumn('topic', f.lit(topic_name))
            df = self.spark_session.createDataFrame(df.rdd,
                                                    self.pubsub_df_schema)

        # archive to consolidated disposition table
        if df.rdd.isEmpty():
            self.logger.info(
                f"No records consumed from {topic_name} on {subscription_path} to archive"
            )
        else:
            self.logger.info(f"Archiving to : {self.pubsub_disposition_table}")
            self.bq_api.save_as_bigquery_table(df,
                                               self.pubsub_disposition_table,
                                               'append')
            self.logger.info(
                f"Archiving completed with {self.run_date} and {self.ADAPTER_RUN_START}"
            )
            schema = self.spark_session.read.json(
                df.rdd.map(lambda row: row.value)).schema
        return df, schema  # df with schema as key,value,run_date,run_time,topic name
 def create_patron(self, patron):
     if self._subscriber is None:
         self._subscriber = pubsub_v1.SubscriberClient()
Example #23
0
 async def new_subscriber(
         self, creds, subscription_name,
         callback) -> pubsub_v1.subscriber.futures.StreamingPullFuture:
     subscriber = pubsub_v1.SubscriberClient(credentials=creds)
     return subscriber.subscribe(subscription_name, callback)
Example #24
0
import uuid

from google.api_core.exceptions import AlreadyExists
from google.cloud import pubsub_v1
import mock
import pytest

import sub  # noqa

UUID = uuid.uuid4().hex
PROJECT = os.environ["GCLOUD_PROJECT"]
TOPIC = "quickstart-sub-test-topic-" + UUID
SUBSCRIPTION = "quickstart-sub-test-topic-sub-" + UUID

publisher_client = pubsub_v1.PublisherClient()
subscriber_client = pubsub_v1.SubscriberClient()


@pytest.fixture(scope="module")
def topic_path():
    topic_path = publisher_client.topic_path(PROJECT, TOPIC)

    try:
        topic = publisher_client.create_topic(topic_path)
        yield topic.name
    except AlreadyExists:
        yield topic_path

    publisher_client.delete_topic(topic_path)

Example #25
0
def update_subscription_with_dead_letter_policy(project_id,
                                                topic_id,
                                                subscription_id,
                                                dead_letter_topic_id,
                                                max_delivery_attempts=5):
    """Update a subscription's dead letter policy."""
    # [START pubsub_dead_letter_update_subscription]
    from google.cloud import pubsub_v1
    from google.cloud.pubsub_v1.types import DeadLetterPolicy, FieldMask

    # TODO(developer)
    # project_id = "your-project-id"
    # TODO(developer): This is an existing topic that the subscription
    # with dead letter policy is attached to.
    # topic_id = "your-topic-id"
    # TODO(developer): This is an existing subscription with a dead letter policy.
    # subscription_id = "your-subscription-id"
    # TODO(developer): This is an existing dead letter topic that the subscription
    # with dead letter policy will forward dead letter messages to.
    # dead_letter_topic_id = "your-dead-letter-topic-id"
    # TODO(developer): This is the maximum number of delivery attempts allowed
    # for a message before it gets delivered to a dead letter topic.
    # max_delivery_attempts = 5

    publisher = pubsub_v1.PublisherClient()
    subscriber = pubsub_v1.SubscriberClient()

    topic_path = publisher.topic_path(project_id, topic_id)
    subscription_path = subscriber.subscription_path(project_id,
                                                     subscription_id)
    dead_letter_topic_path = publisher.topic_path(project_id,
                                                  dead_letter_topic_id)

    subscription_before_update = subscriber.get_subscription(
        request={"subscription": subscription_path})
    print(f"Before the update: {subscription_before_update}.")

    # Indicates which fields in the provided subscription to update.
    update_mask = FieldMask(paths=["dead_letter_policy"])

    # Construct a dead letter policy you expect to have after the update.
    dead_letter_policy = DeadLetterPolicy(
        dead_letter_topic=dead_letter_topic_path,
        max_delivery_attempts=max_delivery_attempts,
    )

    # Construct the subscription with the dead letter policy you expect to have
    # after the update. Here, values in the required fields (name, topic) help
    # identify the subscription.
    subscription = pubsub_v1.types.Subscription(
        name=subscription_path,
        topic=topic_path,
        dead_letter_policy=dead_letter_policy,
    )

    with subscriber:
        subscription_after_update = subscriber.update_subscription(
            request={
                "subscription": subscription,
                "update_mask": update_mask
            })

    print(f"After the update: {subscription_after_update}.")
    # [END pubsub_dead_letter_update_subscription]
    return subscription_after_update
Example #26
0
def worker(msg):
    start_time = time.time()
    print(msg.message.data)

    task_id = int(msg.message.data)
    client = datastore.Client(project_id)
    key = client.key(task_kind, task_id)
    params = client.get(key)

    # Setting the status to 'InProgress'
    mark_in_progress(client, task_id)

    cancertype = params['cancertype']
    category = params['category']
    shard_length = int(params['shard_length'])
    shard_index = int(params['shard_index'])
    gcs_output_path = params['gcs_output_path']

    print('Loading metadata...')
    image_file_metadata_filename = 'data/caches_basic_annotations.txt'
    util.gsutil_cp('{}/{}/caches_basic_annotations.txt'.format(gcs_ann_path, cancertype), 'data/', make_dir=True)
    image_files_metadata = pd.read_csv(image_file_metadata_filename, skiprows=range(1, shard_index*shard_length+1), nrows=shard_length)

    shard_length_tiles = len(image_files_metadata.index)

    label_names = ['is_tumor']

    print('Downloading cache files...')
    image_files_metadata['cache_values'] = choose_input_list.load_cache_values(image_files_metadata, 
                                                                               bucket_name = tiles_input_bucket,
                                                                               notebook = False)
    
#    print('Downloading tiles...')
#    bucket = handle_google_cloud_apis.gcsbucket(project_id, tiles_input_bucket)
#    def download_tile(df_row, bucket):
#        gcs_rel_path = df_row['GCSurl'][len('gs://' + bucket.bucket_name)+1:]
#        bucket.download_from_gcs(gcs_rel_path, output_dir=df_row['rel_path'])

#    tqdm.pandas()
#    image_files_metadata.progress_apply(lambda df_row: download_tile(df_row, bucket), axis=1)

    crossval_groups = ['training','testing','validation']
    if category not in crossval_groups+['all']:
        raise Exception('Unknown cross validation category.')

    # Create tfrecords for each category
    if category != 'all': # keyword 'all' will loop through all three categories
        crossval_groups = [category]

    tfrecords_folder = 'tfrecords_{}'.format(cancertype)
    util.mkdir_if_not_exist(tfrecords_folder)

    for category in crossval_groups:
        print('Creating TFRecord for {:s}...'.format(category))
        handle_tfrecords.create_tfrecords_per_category_for_caches(image_files_metadata, label_names, category,
                                                                 tfrecord_file_name_prefix = tfrecords_folder + '/tfrecord{:d}'.format(shard_index))

    tfrecords_bucket = re.search('gs://(.+?)/', gcs_output_path).group(1)
    prefix = 'gs://' + tfrecords_bucket + '/'
    gcs_directory = "".join(gcs_output_path.rsplit(prefix))

    bucket = handle_google_cloud_apis.gcsbucket(project_name=project_id, bucket_name=tfrecords_bucket)
    bucket.copy_files_to_gcs(tfrecords_folder, gcs_directory, verbose=True)

    command = 'du -s ' + tfrecords_folder + '/'
    tfrecord_size_MBi = round(int(os.popen(command).read().split()[0])/1000,1)  # in MB

    # Removing local files
    command = "rm -rf " + tfrecords_folder
    os.popen(command)
    os.popen("rm -rf tcga_tiles")

    elapsed_time_s = round((time.time() - start_time), 1)  # in seconds

    completed_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())

    # We now can comfirm the job
    client = datastore.Client(project_id)
    mark_done(client=client, task_id=task_id, completed_time=completed_time,
              elapsed_time_s=elapsed_time_s, shard_length_tiles=shard_length_tiles,
              tfrecord_size_MBi=tfrecord_size_MBi)

    print('Finish Timestamp: {} - Time elapsed: {} seconds.'.format(completed_time, elapsed_time_s))

    subscriber = pubsub.SubscriberClient()
    subscription_path = subscriber.subscription_path(project_id, subscription_name)

    # Acknowledging the message
    subscriber.acknowledge(subscription_path, [msg.ack_id])
    print("{}: Acknowledged {}".format(
        time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), msg.message.data))
Example #27
0
def subscriber():
    yield pubsub_v1.SubscriberClient()
Example #28
0
import sys
import re
from tqdm import tqdm
import json
import glob

project_id = PROJECT_ID
subscription_name = SUBSCRIPTION_NAME
input_bucket = INPUT_BUCKET
task_kind = TASK_KIND
annotations_path = ANNOTATIONS_PATH
results_path = RESULTS_PATH
pancancer_tfrecords_path = PANCANCER_TFRECORDS_PATH
gcs_output_path = GCS_OUTPUT_PATH

subscriber = pubsub.SubscriberClient()
subscription_path = subscriber.subscription_path(project_id, subscription_name)

NUM_MESSAGES = 1
ACK_DEADLINE = 60
SLEEP_TIME = 30


def mark_done(client, task_id, completed_time, elapsed_time_s):
    with client.transaction():
        key = client.key(task_kind, task_id)
        task = client.get(key)

        if not task:
            raise ValueError('{} {} does not exist.'.format(
                task_kind, task_id))
def subscriber_client():
    subscriber_client = pubsub_v1.SubscriberClient()
    yield subscriber_client
    subscriber_client.close()
def predict():
    print("I was here 1")
    if request.method == 'POST':
        print(request.form.get('age'))
        try:
            age = float(request.form['age'])
            income = float(request.form['income'])
            Gender = float(request.form['Gender'])
            MaritalStatus = float(request.form['MaritalStatus'])
            HaveKids = float(request.form['HaveKids'])
            isVeg = float(request.form['isVeg'])
            IsStudent = float(request.form['IsStudent'])
            email= request.form['email']
            offer_received = 0
            pred_args = [age, income, Gender,MaritalStatus,HaveKids,isVeg,IsStudent]
            pred_args_arr = np.array(pred_args)
            pred_args_arr = pred_args_arr.reshape(1, -1)
            # mul_reg = open("multiple_regression_model.pkl", "rb")
            # ml_model = joblib.load(mul_reg)
            model_prediction = ml_model.predict(pred_args_arr)
            model_prediction = round(float(model_prediction), 2)

            cred = service_account.Credentials.from_service_account_file(
                'subway_cred.json')

            # Setting up the Configuration Variables:
            project_id = "subwayoffers"
            bucket_name = "bucket_subway"
            topic_name = "subwaytopic"
            subscription_name = "subwayoffers"
            dataset_name = "data"
            table_name = "Profile"

            publisher = pubsub_v1.PublisherClient(credentials=cred)

            client = pubsub_v1.PublisherClient(credentials=cred)
            topic_path = publisher.topic_path(project_id, topic_name)
            print(topic_path)

            subscriber = pubsub_v1.SubscriberClient(credentials=cred)
            topic_path = subscriber.topic_path(project_id, topic_name)
            subscription_path = subscriber.subscription_path(project_id, subscription_name)





            model_prediction_dict='dict'

            if isVeg==0:
                model_prediction_dict = "Free Subway Chicken Sandwich"
            elif HaveKids==1:
                model_prediction_dict = "40% on Happy Meal"

            else:
                model_prediction_dict = "30 % of on any Subway Sandwich"



            if model_prediction==1 :
                offer_received='1'
                model_prediction = 0
                msg = Message(subject="Subway offers",
                          sender=app.config.get("MAIL_USERNAME"),
                          recipients=[email],  # replace with your email for testing
                          body="Thank you for taking this survey. Your offer is "+str(model_prediction_dict))
                print(str(model_prediction))
                mail.send(msg)


            else:
                msg = Message(subject="Subway offers",
                          sender=app.config.get("MAIL_USERNAME"),
                          recipients=[email],  # replace with your email for testing
                          body="Thank you for taking this survey. You have earned a cookie")
                mail.send(msg)
                #print(str(model_prediction))


                model_prediction=0

            data_row = {"age": age, "income": income, "Gender": Gender, "MaritalStatus": MaritalStatus,
                    "HaveKids": HaveKids, "isVeg": isVeg, "IsStudent": IsStudent, "offer_received": offer_received,
                    "offer_completed": model_prediction, "email": email}


            # print(data_row)
            message_data = json.dumps(data_row)
            message_data = message_data.encode('utf-8')

            print(message_data)
            # Publishing a message on the PubSub Topic Created:
            response = publisher.publish(topic_path, message_data, origin='python-sample')
            print(response)


        except ValueError:
            return "Please check if the values are entered correctly"
    return render_template('predict.html', prediction = model_prediction)