def setUpModule(): _helpers.PROJECT = TESTS_PROJECT if os.getenv(PUBSUB_EMULATOR) is None: Config.CLIENT = pubsub.Client() else: credentials = EmulatorCreds() http = httplib2.Http() # Un-authorized. Config.CLIENT = pubsub.Client(credentials=credentials, http=http)
def __init__(self, player_id): """Initialize session object. Sets up DB API connections to Redis and Pub/Sub for this session, and does initial fetch of player and cards from the database. Args: player_id: Hashed player name. """ # Logging and configuration self.log = open('backend_issues.log', 'a+') self.cfg = cfg self.session_id = player_id # Connect to DB API Cloud Pub/Sub and Redis logger.info("Connecting to DB API...") client = pubsub.Client(project=self.cfg['gcp']['project']) logger.info("Connecting: DB API pubsub topic '%s'", self.cfg['pubsub']['topic']) self.workq = client.topic(self.cfg['pubsub']['topic']) logger.info("Connecting: DB API Redis instance at '%s:%s'", self.cfg['redis_con']['hostname'], self.cfg['redis_con']['port']) self.redis = StrictRedis(host=self.cfg['redis_con']['hostname'], port=self.cfg['redis_con']['port'], db=self.cfg['redis_con']['db'], password=self.cfg['redis_con']['password']) # Initialize attributes to empty self.player = None self.cards = {} # Attempt to get initial attribute values from DB self._get_player(player_id) self._get_cards()
def test_topic(): client = pubsub.Client() topic = client.topic(TEST_TOPIC) topic.create() yield topic if topic.exists(): topic.delete()
def signUpUser(): print("main.signUpUser") contentJSON = request.json print("main.signUpUser request.json") print(contentJSON) content = json.dumps(contentJSON) print("main.signUpUser json.dumps(contentJSON)") print(content) #content.encode("utf-8") print("main.signUpUser exclusions") print(contentJSON['exclusions']) pubsub_client = pubsub.Client(PROJECT_ID) topic = pubsub_client.topic(TOPIC) message_id = topic.publish(content.encode("utf-8")) print("main.signUpUser published message{}".format(message_id)) print(content) return json.dumps({ 'received_exclusions': contentJSON['exclusions'], 'received_python_segments': contentJSON['segmentForFFMPEG'], 'received_file': contentJSON['fileName'], 'message_id': message_id })
def get_visitors_queue(): client = pubsub.Client(project=current_app.config['PROJECT_ID']) # Create a queue specifically for processing books and pass in the # Flask application context. This ensures that tasks will have access # to any extensions / configuration specified to the app, such as # models. return psq.Queue(client, 'books', extra_context=current_app.app_context)
def publish(msg): pubsub_client = pubsub.Client(PROJECT_ID) topic = pubsub_client.topic("ffmpeg-pool") if not topic.exists(): topic.create() topic.publish(msg)
def delete_topic(topic_name): """Deletes an existing Pub/Sub topic.""" pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) topic.delete() print('Topic {} deleted.'.format(topic.name))
def create_topic(topic_name): """Create a new Pub/Sub topic.""" pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) topic.create() print('Topic {} created.'.format(topic.name))
def create_subscription(topic_name, subscription_name): """Create a new pull subscription on the given topic.""" pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) subscription = topic.subscription(subscription_name) subscription.create() print('Subscription {} created on topic {}.'.format( subscription.name, topic.name))
def delete_subscription(topic_name, subscription_name): """Deletes an existing Pub/Sub topic.""" pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) subscription = topic.subscription(subscription_name) subscription.delete() print('Subscription {} deleted on topic {}.'.format( subscription.name, topic.name))
def index(): if request.method == 'GET': return render_template('index.html', messages=MESSAGES) ps = pubsub.Client() topic = ps.topic(current_app.config['PUBSUB_TOPIC']) topic.publish( request.form.get('payload', 'Example payload').encode('utf-8')) return 'OK', 200
def check_topic_permissions(topic_name): """Checks to which permissions are available on the given topic.""" pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) permissions_to_check = ['pubsub.topics.publish', 'pubsub.topics.update'] allowed_permissions = topic.check_iam_permissions(permissions_to_check) print('Allowed permissions for topic {}: {}'.format( topic.name, allowed_permissions))
def publish_message(topic_name, data): """Publishes a message to a Pub/Sub topic with the given data.""" pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) # Data must be a bytestring data = data.encode('utf-8') message_id = topic.publish(data) print('Message {} published.'.format(message_id))
def subscribe(): global psclient, pstopic, pssub psclient = pubsub.Client(PROJECT_ID) pstopic = psclient.topic("ffmpeg-pool") if not pstopic.exists(): pstopic.create() pssub = pstopic.subscription("ffmpeg-worker-" + socket.gethostname()) if not pssub.exists(): pssub.create()
def get_topic_policy(topic_name): """Prints the IAM policy for the given topic.""" pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) policy = topic.get_iam_policy() print('Policy for topic {}:'.format(topic.name)) print('Version: {}'.format(policy.version)) print('Owners: {}'.format(policy.owners)) print('Editors: {}'.format(policy.editors)) print('Viewers: {}'.format(policy.viewers)) print('Publishers: {}'.format(policy.publishers)) print('Subscribers: {}'.format(policy.subscribers))
def list_topics(): """Lists all Pub/Sub topics in the current project.""" pubsub_client = pubsub.Client() topics = [] next_page_token = None while True: page, next_page_token = pubsub_client.list_topics() topics.extend(page) if not next_page_token: break for topic in topics: print(topic.name)
def set_topic_policy(topic_name): """Sets the IAM policy for a topic.""" pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) policy = topic.get_iam_policy() # Add all users as viewers. policy.viewers.add(policy.all_users()) # Add a group as editors. policy.editors.add(policy.group('*****@*****.**')) # Set the policy topic.set_iam_policy(policy) print('IAM policy for topic {} set.'.format(topic.name))
def list_subscriptions(topic_name): """Lists all subscriptions for a given topic.""" pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) subscriptions = [] next_page_token = None while True: page, next_page_token = topic.list_subscriptions() subscriptions.extend(page) if not next_page_token: break for subscription in subscriptions: print(subscription.name)
def check_subscription_permissions(topic_name, subscription_name): """Checks to which permissions are available on the given subscription.""" pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) subscription = topic.subscription(subscription_name) permissions_to_check = [ 'pubsub.subscriptions.consume', 'pubsub.subscriptions.update' ] allowed_permissions = subscription.check_iam_permissions( permissions_to_check) print('Allowed permissions for subscription {} on topic {}: {}'.format( subscription.name, topic.name, allowed_permissions))
def transcode(): message_string = request.args.get('video', None) if message_string: print("main.transcode video received in URL is :{}".format(message_string)) message_string = message_string + ".mp4" message_var = message_string.encode("utf-8") else: message_var = DEFAULT_VIDEO.encode("utf-8") pubsub_client = pubsub.Client(PROJECT_ID) topic = pubsub_client.topic(TOPIC) print("main.transcode about to publish message '{}' to youtube_partners: ".format(message_var)) message_id = topic.publish(message_var) print("main.transcode Message {} published to youtube_partners.".format(message_id)) return message_var
def receive_message(topic_name, subscription_name): """Receives a message from a pull subscription.""" pubsub_client = pubsub.Client('safe-browsing-notification-api') topic = pubsub_client.topic(topic_name) subscription = topic.subscription(subscription_name) # Change return_immediately=False to block until messages are # received. results = subscription.pull(return_immediately=True) print('Received {} messages.'.format(len(results))) for ack_id, message in results: print('* {}: {}, {}'.format(message.message_id, message.data, message.attributes)) # Acknowledge received messages. If you do not acknowledge, Pub/Sub will # redeliver the message. if results: subscription.acknowledge([ack_id for ack_id, message in results])
def main_func(): #if __name__ == '__main__': #Logging configurarion logging.basicConfig( level=logging.DEBUG, format="%(asctime)s [%(name)s] %(levelname)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S") segments = [[10, 25], [30, 40], [50, 55]] #,[ 65 , 75 ],[ 80 , 90 ] ] ffmpeg_command = trim_blacks_reencoding_command("audio_video.mp4", segments, "200") print("ffmpeg command is{}".format(ffmpeg_command)) #ffmpeg_command = './ffmpeg -i _ -strict -2 -y -filter_complex "[0:v]trim=10:25,setpts=PTS-STARTPTS[v0]; [0:v]trim=30:32,setpts=PTS-STARTPTS[v1]; [0:v]trim=33:35,setpts=PTS-STARTPTS[v2]; [0:v]trim=37:39,setpts=PTS-STARTPTS[v3]; [0:v]trim=42:45,setpts=PTS-STARTPTS[v4]; [0:v]trim=47:49,setpts=PTS-STARTPTS[v5]; [0:v]trim=50:55,setpts=PTS-STARTPTS[v6]; [0:v]trim=65:75,setpts=PTS-STARTPTS[v7]; [0:v]trim=80:90,setpts=PTS-STARTPTS[v8]; [v0][v1][v2][v3][v4][v5][v6][v7][v8] concat=n=9:v=1[out] " -map "[out]" test_video_nb_re.mp4' #ffmpeg_command = './ffmpeg -framerate 1/5 -i final_%d.png -c:v libx264 -r 30 -pix_fmt yuv420p new_merged_test.mp4' subprocess.check_call(ffmpeg_command, shell=True) print("calling ffmpeg with: {}".format(ffmpeg_command)) subprocess.call(ffmpeg_command, shell=True) print("ffmpeg call finished is{}".format(ffmpeg_command)) # pubsub_client = pubsub.Client(PROJECT_ID) topic = pubsub_client.topic("youtube_partners") subscription = topic.subscription("set_videos") sys.stderr.write("Polling the topic") while True: results = subscription.pull(return_immediately=False, max_messages=1) if results: print("Received {} messages.".format(len(results))) for ack_id, message in results: print("worker.main message received in loop is {}: {}, {}". format(message.message_id, message.data, message.attributes)) subscription.acknowledge( [ack_id for ack_id, message in results]) print("Aknowledged {}: ".format(message.message_id)) messageAsString = message.data.decode('utf-8') print("worker.main calling transcode with parameter:") print(messageAsString)
def test_create_sink_pubsub_topic(self): from gcloud import pubsub # Create the destination topic, and set up the IAM policy to allow # Stackdriver Logging to write into it. pubsub_client = pubsub.Client() topic = pubsub_client.topic(TOPIC_NAME) topic.create() self.to_delete.append(topic) policy = topic.get_iam_policy() policy.owners.add(policy.group('*****@*****.**')) topic.set_iam_policy(policy) TOPIC_URI = 'pubsub.googleapis.com/%s' % (topic.full_name, ) sink = Config.CLIENT.sink(DEFAULT_SINK_NAME, DEFAULT_FILTER, TOPIC_URI) self.assertFalse(sink.exists()) sink.create() self.to_delete.append(sink) self.assertTrue(sink.exists())
def transcode(): pubsub_client = pubsub.Client(PROJECT_ID) topic = pubsub_client.topic("message") # topic.publish(b"this is a hmt messageasdhajkshdui", foo="201812241613") subscriber = pubsub_v1.SubscriberClient() # The `subscription_path` method creates a fully qualified identifier # in the form `projects/{project_id}/subscriptions/{subscription_name}` subscription_name = 'mysub' subscription_path = subscriber.subscription_path(PROJECT_ID, subscription_name) list = [] def callback(message): print('Received message: {}'.format(message.data)) if message.attributes: print("Attributes") for key in message.attributes: value = message.attributes.get(key) print("{}: {}".format(key, value)) # envelop = json.loads(message) pprint: message list.append(message) message.ack() message = subscriber.subscribe(subscription_path, callback=callback) # messages = sub.pull( # return_immediately=False, max_messages=110) # if messages: # pprint(messages) # topic_name = "projects/scalable-transcoding/topics/message" # publisher = pubsub.Client(PROJECT_ID) # topic_path = publisher.topic(topic_name) n = 0 while (n < 10): time.sleep(10) n += 1 return 'hihi'
response = vision.detect_labels(image_contents) for image_url, labels in zip(image_urls, response): storage.add_labels(labels) storage.add_image(image_url, labels) def label_images_task(image_urls): vision = VisionApi() storage = Storage() label_images(vision, storage, image_urls) def scrape_reddit(subreddit, pages=10): after = None for _ in range(pages): posts, after = reddit.get_hot('aww', after=after) yield reddit.get_previews(posts) def scrape_reddit_task(subreddit, pages=20): for image_urls in scrape_reddit(subreddit, pages): q = psq.Queue(pubsub.Client(), 'images') q.enqueue('main.label_images_task', image_urls) print("Enqueued {} images".format(len(image_urls))) q = psq.Queue(pubsub.Client(), 'images')
def zones_current_proj(): pubsub_client = pubsub.Client() gcp_project_id = pubsub_client.project return zones(gcp_project_id)
import time from gcloud import pubsub client = pubsub.Client(project='thedataclouds') topic = client.topic('mytopic234') for i in range(10): topic.publish('Hello ' + str(i) + '!') time.sleep(1)
ffmpegCommand = trim_blacks_reencoding_command(fileName, segments) print("calling ffmpeg with: {} ".format(ffmpegCommand)) #subprocess.call(ffmpeg_command, shell=True) #print("ffmpeg call finished is{}".format(ffmpeg_command)) ret = subprocess.call(ffmpegCommand, shell=True) if ret: sys.stderr.write("FAILED") print('useFFMPEG OS command failed') return "Failed" print('useFFMPEG OS command succedded') return "SUCCESS" if __name__ == '__main__': pubsub_client = pubsub.Client(PROJECT_ID) topic = pubsub_client.topic("youtube_partners") subscription = topic.subscription("set_videos") print("worker.main Polling the topic...\n") while True: results = subscription.pull(return_immediately=True, max_messages=1) if results: print("worker.main received {} messages.".format(len(results))) for ack_id, message in results: print("worker.main message received in loop is {}: {}, {}". format(message.message_id, message.data, message.attributes)) subscription.acknowledge( [ack_id for ack_id, message in results]) print("worker.main Aknowledged {}: ".format( message.message_id))
def scrape_reddit_task(subreddit, pages=20): for image_urls in scrape_reddit(subreddit, pages): q = psq.Queue(pubsub.Client(), 'images') q.enqueue('main.label_images_task', image_urls) print("Enqueued {} images".format(len(image_urls)))
try: import googleclouddebugger googleclouddebugger.enable() except: for e in sys.exc_info(): print(e) app = Flask(__name__) # if debugging or running on localhost if __name__ == "__main__": gcp_project_id = os.environ['GCP_PROJECT'] # running in Google Cloud Run else: pubsub_client = pubsub.Client() gcp_project_id = pubsub_client.project error_reporting_client = error_reporting.Client() @app.route('/health', methods=['GET']) def health_check(): import flask return 'Running Flask {0} on Python {1}!\n'.format(flask.__version__, sys.version) # full stacktrace is in Cloud Error reporting @app.errorhandler(Exception) def handle_uncaught_exception(err): error_reporting_client.report_exception()