class TestQueue(unittest.TestCase): default_queue = greenstalk.Client(host='127.0.0.1', port=12000) custom_queue = greenstalk.Client(host='127.0.0.1', port=12000, use="custom") def test_default_queue(self): self.default_queue.put("hello") job = self.default_queue.reserve(timeout=5) self.assertEqual("hello", job.body) self.default_queue.delete(job) try: self.default_queue.reserve(timeout=5) self.assertTrue(False) except greenstalk.TimedOutError: pass def test_custom_queue(self): self.custom_queue.put("hello") job = self.custom_queue.reserve(timeout=5) self.assertEqual("hello", job.body) self.custom_queue.delete(job) try: self.custom_queue.reserve(timeout=5) self.assertTrue(False) except greenstalk.TimedOutError: pass
def __init__(self, host, port, in_queue=None, out_queue=None): """Set up Beanstalk connections. :param host: Beanstalk host. :param port: Beanstalk port. :param in_queue: Optional input queue name. :param out_queue: Optional output queue name. """ self.in_queue = None self.out_queue = None if in_queue: self.in_queue = greenstalk.Client(host, port, watch=in_queue) if out_queue: self.out_queue = greenstalk.Client(host, port, use=out_queue)
def image_event(team): """ Route Function for handling the image event, and push it to the queue @param team (string) team to which this image belongs @returns json response if it's a url_verification type, blank otherwise """ host = app_config['beanstalk']['host'] port = app_config['beanstalk']['port'] if not team in app_config['teams'].keys(): return abort(404) else: app_token = app_config['teams'][team]['app_token'] if app_token == request.json['token']: if request.json['type'] == 'url_verification': return jsonify({'challenge': request.json['challenge']}) else: image_id = request.json['event']['file']['id'] stalk = greenstalk.Client(host, port) jobid = stalk.put(json.dumps((image_id, team))) stalk.close() return '' else: return ''
def connect(self, beanstalk_host: Text = "127.0.0.1", beanstalk_port: int = 11300) -> None: """Try to create a beanstalk connection""" if self.client: # close existing connection self.client.close() while not self.client_ready(): try: logging.info( "Trying to connect to beanstalkd on %s:%s", beanstalk_host, beanstalk_port, ) self.client = greenstalk.Client( (beanstalk_host, beanstalk_port)) except ConnectionRefusedError as err: logging.warning("Server.connect: %s", err) logging.info("Server.connect: waiting 5 seconds") time.sleep(5) # We only want to receive messages specifically to scio. The default # beanstalk tube is ignored. self.client.watch("scio_doc") self.client.ignore("default") self.client.use("scio_analyze")
def main(): """ Main process to loop on the queue """ log.info("Starting Server Creation Consumer") try: with greenstalk.Client(host=VPN_QUEUE_SERVER, port=VPN_QUEUE_PORT, watch=VPN_QUEUE_TUBE, use=VPN_QUEUE_TUBE) as queue: log.info("Daemon Ready") while True: log.debug("Waiting for job ... ") job = queue.reserve() if job is not None: log.debug("[%s] New job received. Data: %s" % (job.id, job.body)) json_body = json.loads(job.body) try: result = process_single_server(json_body) if result: queue.delete(job) else: queue.bury(job) except Exception as e: log.error("A major error has occurred. Script cannot process job", exc_info=True) queue.bury(job) except ConnectionRefusedError: log.error( "Unable to connect to ip %s:%s. Is beanstalkd the daemon running?" % (VPN_QUEUE_SERVER, VPN_QUEUE_PORT), exc_info=True)
def check_beanstalk(): # Check that beanstalkd is running in the correct place try: queue = greenstalk.Client() except ConnectionRefusedError: raise RuntimeError('Could not connect to beanstalkd at 127.0.0.1:11300') return queue
def master(): client = greenstalk.Client(host=config["Beanstalkd"]["host"], port=config["Beanstalkd"]["port"], use="job", watch="result") while True: try: job = client.reserve(0.1) parsed = json.loads(job.body) finished_id = parsed.get("id") print("job finished", finished_id) rotate_num = 0 while user_q[0] != finished_id: user_q.rotate(-1) rotate_num += 1 user_q.popleft() user_q.rotate(rotate_num) client.delete(job) except greenstalk.TimedOutError: pass try: req = req_q.get(timeout=0.1) print("put job queue", req["id"]) client.put(json.dumps(req)) except queue.Empty: pass
def init_beanstalk(self): print("[BEANSTALK SERVICE] Initializing Beanstalk Client") self.beanstalk_client = greenstalk.Client( host=BEANSTALK_IP, port=BEANSTALK_PORT, watch="ocean" )
def __init__(self, host="127.0.0.1", port=11300): """ Initializes the client, connecting to a given host and port """ try: self.client = greenstalk.Client(host=host, port=11300) except: self.client = None raise
def consume(self): """Loop which is reading the queue. Deleteing the massage only if message accepted.""" with greenstalk.Client(host=self.host, port=self.port) as queue: while True: try: job = queue.reserve() #print(job.body) self.endpoint.send(job.body) queue.delete(job) except Exception as err: queue.delete(job) queue.put(job.body, delay=10) print("Delaying the send to API.")
def publish_message(self, message:Any, dest: DefinedTubes): """ Publish the json-serializable message object (e.g. dict) to configured destination (e.g. queue, tube). :param message: A message object that can be serialized with json.dumps() """ queue = greenstalk.Client(host=self.host, port=self.port, use=dest.value) try: queue.put(self.serialize_message(message)) except: log.exception("Error publishing message.") finally: queue.close()
def start_processing_thread(): queue = greenstalk.Client(host=os.getenv("GREENSTALK_HOST"), port=os.getenv("GREENSTALK_PORT"), watch=[os.getenv("GREENSTALK_TUBE_QUEUE")]) while True: job = queue.reserve() try: connection = psycopg2.connect( dbname=os.getenv("POSTGRESQL_DB"), host=os.getenv("POSTGRESQL_HOST"), port=os.getenv("POSTGRESQL_PORT"), user=os.getenv("POSTGRESQL_USER"), password=os.getenv("POSTGRESQL_PASS")) payload = json.loads(job.body) misc.log_message(f"Processing job: {payload}") # Process screenshot or preview. if payload["type"] == "screenshot": process_screenshot(connection, payload) elif payload["type"] == "preview": process_preview(connection, payload) else: raise Exception(f"""Invalid payload type: {payload["type"]}""") # Delete job. queue.delete(job) misc.log_message(f"Deleted job: {payload}") except: # Delete job. queue.delete(job) misc.log_message(f"Deleting failed job: {payload}") # Erase submission record if it exists. if payload["type"] == "preview": db.delete_submission_record(connection, payload["id"]) connection.commit() misc.log_message( f"""Deleted submission record: {payload["id"]}""") # Update error count for url_id. record = db.get_url_record(connection, payload["id"]) error_count = record["errors"] + 1 db.update_url_record(connection, payload["id"], {"errors": error_count}) connection.commit() misc.log_message( f"""Updating errors for url_id {payload["id"]}: {error_count}""" ) raise finally: connection.close()
def worker(beanstalk_server, placeholder, beanstalk_port=11300): ''' start worker loop :param queue_names: :return: ''' # import beanstalkc # bq = beanstalkc.Connection(host=beanstalk_server, port=beanstalk_port) import greenstalk with greenstalk.Client((beanstalk_server, 11300)) as bq: queue_names = ["pbs"] for i in queue_names: bq.watch(i) # print bq.watching() # print ("current-job-ready", bq.stats_tube('pbs')['current-jobs-ready']) # while bq.stats_tube('pbs')['current-jobs-ready'] > 0: while 1: # print "current-job-ready", bq.stats_tube('pbs')['current-jobs-ready'] job = bq.reserve(timeout=1) if not job: continue try: # deal with the message message = json.loads(job.body) # print message # log.debug("message is: %s" % message) origin_path = os.path.abspath(os.curdir) module_name = message['cmd'] single_element = message['arg'] if module_name == 'end': job.release() break os.system(module_name + ' ' + single_element) job.delete() continue # if result: # print("==========", result) # job.delete() # # # TODO: 执行失败不能直接 bury # # else: # # job.bury() except: traceback.print_exc() if job: job.bury()
def api_capture(api_key): """API endpoint for capturing a new screenshot.""" with db.connect() as connection, greenstalk.Client( host=os.getenv("GREENSTALK_HOST"), port=os.getenv("GREENSTALK_PORT"), use=os.getenv("GREENSTALK_TUBE_QUEUE")) as queue: db.lock_data_table(connection) form = v.CaptureForm(request.values) if form.validate(): request_id = str(uuid4()) settings = misc.screenshot_settings(request.values) user_id = db.get_api_key_record_by_api_key(connection, api_key)["user_id"] if db.check_pending_request(connection, settings["url"], user_id): return ({ "error": "A request for this exact URL is currently pending." }, 429) data = { "request_id": request_id, "url": settings["url"], "block_id": 0, "user_id": user_id, "queued": "true", "pruned": "false", "flagged": "false", "removed": "false", "failed": "false" } db.create_data_record(connection, data) db.update_api_key_use_count(connection, api_key) connection.commit() payload = {"request_id": request_id, "settings": settings} queue.put(json.dumps(payload), ttr=int(os.getenv("WWW2PNG_PROCESSING_TTR"))) payload = { "request_id": request_id, "status_url": f"""{os.getenv("WWW2PNG_BASE_URL")}/api/status/{api_key}/{request_id}""", "image_url": f"""{os.getenv("WWW2PNG_BASE_URL")}/api/image/{api_key}/{request_id}""", "proof_url": f"""{os.getenv("WWW2PNG_BASE_URL")}/api/proof/{api_key}/{request_id}""", } return (payload, 200) else: for key in form.errors: return ({"error": form.errors[key][0]}, 400)
def status(): """ Route Function for handling the queue status page @param None @returns Json Response containing the greenstalk stats """ host = app_config['beanstalk']['host'] port = app_config['beanstalk']['port'] stalk = greenstalk.Client(host, port) stats = stalk.stats() return jsonify(stats)
def beanstalk_client( args: argparse.Namespace, use: Optional[Text] = None, watch: Optional[Text] = None) -> Optional[greenstalk.Client]: """ Return beanstalk client if args.beanstalk, otherwise, return None """ client = None if args.beanstalk: logging.info("Connection to beanstalk") client = greenstalk.Client((args.beanstalk, args.beanstalk_port), encoding=None) if watch: client.watch(watch) if use: client.use(use) return client
def do_save(args): client = greenstalk.Client(host=args.host, port=args.port, use="results", watch=["results"]) client.use("results") while True: job = client.reserve() current = json.loads(job.body) name = current['name'] #data = current['data'] path = os.path.join(args.o, name) print(path) #with open(path, 'wb') as fd: #fd.write(base64.decode64(data)) client.delete(job)
def main(): """ Main process to loop on the queue """ log.debug("Starting Server ...") try: with greenstalk.Client(host=VPN_QUEUE_SERVER, port=VPN_QUEUE_PORT, watch=VPN_QUEUE_TUBE, use=VPN_QUEUE_TUBE) as queue: log.info("Daemon Ready") # import random # queue.put(body='{"common_name": "*****@*****.**' + str(random.randint(1, 10000)) + '"}', ttr=3600) while True: log.debug("Waiting for job ... ") job = queue.reserve() if job is not None: log.debug("[%s] New job received. Data: %s" % (job.id, job.body)) json_body = json.loads(job.body) try: result = process_single_certificate(json_body) if result: log.debug("Job processed successfully") queue.delete(job) else: log.warning( "Job finalized with error, burying job for analysis" ) queue.bury(job) except Exception as e: log.error( "A major error has occurred. Script cannot process job: %s" % json_body, exc_info=True) queue.bury(job) except ConnectionRefusedError: log.error( "Unable to connect to ip %s:%s. Is beanstalkd the daemon running?" % (VPN_QUEUE_SERVER, VPN_QUEUE_PORT), exc_info=True)
def buried(): with greenstalk.Client(host=os.getenv("GREENSTALK_HOST"), port=os.getenv("GREENSTALK_PORT"), use=os.getenv("GREENSTALK_TUBE_QUEUE")) as queue: form = v.BuriedForm() if form.validate_on_submit(): try: print(form.data) if form.data["action"] == "delete" and form.data["job_id"] is not None: queue.delete(form.data["job_id"]) elif form.data["action"] == "kick" and form.data["job_id"] is not None: queue.kick_job(form.data["job_id"]) except greenstalk.NotFoundError: return redirect("/buried", code=302) try: job = queue.peek_buried() data = {"job_body": job.body, "job_id": job.id} except greenstalk.NotFoundError: data = {} return render_template("buried.html", page_title=misc.page_title("buried"), data=data)
def __init__(self, host, port, queue_name, artifact_types=None, filter_string=None, allowed_sources=None, **kwargs): """Beanstalk operator.""" self.queue = greenstalk.Client(host, port, use=queue_name) super(Plugin, self).__init__(artifact_types=artifact_types, filter_string=filter_string, allowed_sources=allowed_sources, **kwargs) self.artifact_types = artifact_types or [ threatingestor.artifacts.URL, ]
def _lock_instance(self, instance, reason="locked", timeout=30, job_id=None, flush_keys=[]): lock_key = self._instance_lock_key(instance) cache.set(lock_key, reason, timeout) locked_instances = cache.get('locked_instances') if locked_instances is not None: locked_instances["%s" % instance] = reason cache.set('locked_instances', locked_instances, 90) else: cache.set('locked_instances', {'%s' % instance: "%s" % reason}, 90) if job_id is not None: b = None for i in range(5): try: b = greenstalk.Client(host=settings.BEANSTALKD_HOST, port=settings.BEANSTALKD_PORT) break except Exception: sleep(1) if b is None: return if BEANSTALK_TUBE: b.use(BEANSTALK_TUBE) b.put( json.dumps({ "type": "JOB_LOCK", "cluster": self.slug, "instance": instance, "job_id": job_id, "lock_key": lock_key, "flush_keys": flush_keys + [self._instance_cache_key(instance)] }))
def pushend(): ''' parse a conf(dict) and run it :param tree: :return: ''' parser = argparse.ArgumentParser(description=""" Put end singal to the labkit queue. """) port = 11300 parser.add_argument("beanstalk_server", help="hostname of beanstalk server") args = parser.parse_args() beanstalk_server = args.beanstalk_server # import beanstalkc # bq = beanstalkc.Connection(host=beanstalk_server, port=port) import greenstalk with greenstalk.Client((beanstalk_server, 11300)) as bq: push_pbs(bq, 'end', 'end')
def monitor_jobs(): # We have to open one socket per Greenlet, as currently socket sharing is # not allowed try: b = greenstalk.Client(host=settings.BEANSTALKD_HOST, port=settings.BEANSTALKD_PORT) except Exception as err: logger.error("Error connecting to beanstalkd: %s" % str(err)) sleep(5) return try: b.watch(settings.BEANSTALK_TUBE) b.ignore("default") except AttributeError: # We are watching "default" anyway pass while True: job = b.reserve() stats = b.stats_job(job) # Check for erratic jobs and bury them if stats["reserves"] > RESERVE_ERROR_THRESHOLD: logger.error("Job %d reserved %d (> %d) times, burying" % (job.id, stats["reserves"], RESERVE_ERROR_THRESHOLD)) b.bury(job) continue try: data = json.loads(job.body) except ValueError: logger.error("Job %d has malformed body '%s', burying" % (job.id, job.body)) b.bury(job) continue if "type" in data and data["type"] in DISPATCH_TABLE: DISPATCH_TABLE[data["type"]](b, job)
def web_capture(): """Web endpoint to capture a new screenshot.""" with db.connect() as connection, greenstalk.Client( host=os.getenv("GREENSTALK_HOST"), port=os.getenv("GREENSTALK_PORT"), use=os.getenv("GREENSTALK_TUBE_QUEUE")) as queue: db.lock_data_table(connection) form = v.CaptureForm() if form.validate_on_submit(): request_id = str(uuid4()) settings = misc.screenshot_settings(request.values) data = { "request_id": request_id, "url": settings["url"], "block_id": 0, "user_id": 1, "queued": "true", "pruned": "false", "flagged": "false", "removed": "false", "failed": "false" } db.create_data_record(connection, data) connection.commit() payload = {"request_id": request_id, "settings": settings} queue.put(json.dumps(payload), ttr=int(os.getenv("WWW2PNG_PROCESSING_TTR"))) return redirect("/web/view/" + request_id, code=303) else: for key in form.errors: return render_template("error.html", page_title=misc.page_title("500"), data={ "header": "Error", "error": f"""{key}: {form.errors[key][0]}""" }), 400
def api_request(): """API endpoint to register a user and request a new API key.""" with db.connect() as connection, greenstalk.Client( host=os.getenv("GREENSTALK_HOST"), port=os.getenv("GREENSTALK_PORT"), use=os.getenv("GREENSTALK_TUBE_ACTIONS")) as actions: form = v.ApiKeyForm() if form.validate_on_submit(): challenge = str(uuid4()) email = request.values["email"] data = {"email": email, "challenge": challenge} db.create_unverified_user_record(connection, data) connection.commit() data = {"email": email, "challenge": challenge} payload = {"action": "send_api_request_email", "data": data} actions.put(json.dumps(payload), ttr=int(os.getenv("WWW2PNG_PROCESSING_TTR"))) return render_template("web_api_key_requested.html", page_title=misc.page_title("api_request"), data=data) else: for key in form.errors: return ({"error": form.errors[key][0]}, 400)
def process_image_queue(): """ Queue worker entrypoint @param None @returns None """ global team_config host = app_config['beanstalk']['host'] port = app_config['beanstalk']['port'] stalk = greenstalk.Client() print("ImgSlack Processor Has Started") while not exit_loop: jid, body = stalk.reserve() body = tuple(json.loads(body)) team_config = app_config['teams'][body[1]] metadata = get_data(body[0]) if metadata is not False: image = download_image(metadata) metadata['local_path'] = image for channel in metadata['channels']: notify_channel(channel[1], generate_link(image)) print(body[0] + " complete") stalk.delete(jid) stalk.close() exit
def fetcher(): client = greenstalk.Client(host=config["Beanstalkd"]["host"], port=config["Beanstalkd"]["port"], use="result", watch="job") while True: print("waiting job..") job = client.reserve() print("get job", job.body) parsed = json.loads(job.body) reqq.put(parsed) while True: try: client.touch(job) resq.get(timeout=0.5) client.delete(job) client.put(json.dumps({"id": parsed["id"]})) break except queue.Empty: continue except Exception as e: print(e) client.release(job) break
def do_load(args): with open(args.fuzz_config, 'r') as fd: config = yaml.load(fd) with open(args.fuzz_file, 'r') as fd: fuzzd = yaml.load(fd) host, port = config['vars']['queue'].split(':') queue = greenstalk.Client(host=host, port=int(port), use='jobs', watch=['jobs']) for target in fuzzd['fuzz_targets']: props = copy.copy(fuzzd[target]) meta = config[props['key']] for i in range(1, config['vars']['trials'] + 1): props['trial'] = i props['timeout'] = config['vars']['time'] if props['timeout'].endswith('h'): props['timeout'] = "%sm" % (int(props['timeout'][:-1]) * 60) if props['timeout'].endswith('m'): props['timeout'] = int(props['timeout'][:-1]) * 60 elif props['timeout'].endswith('s'): props['timeout'] = int(props['timeout'][:-1]) props['jobs'] = config['vars']['jobs'] props['ncores'] = config['vars']['ncores'] props['cmd'] = meta['cmd'].replace("bin", props['path']) props['seed'] = os.path.join(config['vars']['seeds'], meta['seed']) props['workdir'] = os.path.join(config['vars']['workdir'], target, str(i)) queue.put(json.dumps(props))
def main(): """ Stats reporting tool to send beastalk tube stats to graylog """ # # Set up arg parsing and gather CLI args parser = argparse.ArgumentParser() parser.add_argument("-b", "--beanstalk", type=str, help="beanstalk server and port") parser.add_argument("-g", "--graylog", type=str, help="graylog server and port for GELF input") parser.add_argument("-t", "--tubes", type=str, default="default", help="list of tubes to watch (comma-separated)") parser.add_argument( "-u", "--udp", action="store_true", help="use UDP for GELF graylog connection (TCP is default)") args = parser.parse_args() if not args.beanstalk: print("You need to provide a beanstalk server to connect to") sys.exit(1) else: beanstalk = {'host': None, 'port': None} beanstalk['host'], _, beanstalk['port'] = args.beanstalk.partition(':') if not args.graylog: print("You need to provide a graylog server to send data to") sys.exit(1) else: graylog = {'host': None, 'port': None} graylog['host'], _, graylog['port'] = args.graylog.partition(':') if not args.tubes: print("You need to provide one or more tubes to monitor stats for") sys.exit(1) # # Set up logger logging.basicConfig(level=logging.INFO) gelf_handler = (GelfUdpHandler if args.udp else GelfTcpHandler)( host=graylog.get('host', '127.0.0.1'), port=int(graylog.get('port', '12201')), include_extra_fields=True) logger = logging.getLogger() logger.handlers = [] logger.addHandler(gelf_handler) # Default record info logging_defaults = { 'name': 'graybean', 'level': logging.getLevelName(logging.INFO), 'levelno': logging.INFO, 'pathname': osp.realpath(__file__), 'msg': '', 'args': None, 'exc_info': None } # Get connected queue = gs.Client(**beanstalk) tubes = [x.strip() for x in args.tubes.split(',')] print("Collecting stats for tube{} '{}' every 5 seconds...".format( 's' if len(tubes) > 1 else '', ', '.join(tubes))) while True: try: for tube in tubes: record = merged( logging_defaults, { 'msg': "Stats for tube '{}' from '{}'".format( tube, beanstalk['host']), 'tube': tube }, queue.stats_tube(tube)) logger.handle(logging.makeLogRecord(record)) # Sleep for 5s time.sleep(5) except KeyboardInterrupt: print("\nExiting gracefully") sys.exit(0)
import json import greenstalk import subprocess DOWNLOAD_FOLDER = os.environ.get('DOWNLOAD_FOLDER', '.') BEANSTALK_PORT = int(os.environ.get('BEANSTALK_PORT', -1)) os.chdir(DOWNLOAD_FOLDER) print("cd to folder " + DOWNLOAD_FOLDER) if BEANSTALK_PORT == -1: print("BEANSTALK_PORT required") exit(-1) queue = greenstalk.Client(host='127.0.0.1', port=BEANSTALK_PORT) print("Connected to beanstalk") def downloadVideo(body): url = body['url'] time.sleep(1) print("*** DOWNLOADING *** " + url) args = [ 'youtube-dl', '--restrict-filenames', '--write-description', '--write-info-json', '--no-playlist', '--write-thumbnail', url ] print(args) result = subprocess.run(args) print("*** END PROCESS *** ")