Пример #1
0
def run(queue_name=None, 
        queue_name_output=None,
        write_delay=None,
        flush_queue=None,
        batch_size=None,
        format_any=None,
        trigger_none_msg=None, trigger_topic=None,
        delete_on_error=False, dont_pass_through=False,
        simulate_error=False, error_msg=None,
        log_network_error=False
        ,**_):
    
    if write_delay>900 or write_delay<1:
        raise Exception("Invalid value for 'delay_seconds': must be between 1 and 900")
    
    # SETUP PRIVATE QUEUE
    #####################
    def setup_queue(qn):
        """ closure for setting up private queue """
        conn = boto.connect_sqs()
        q=conn.create_queue(qn)
        if format_any:
            q.set_message_class(RawMessage)
        else:
            q.set_message_class(JSONMessage)
        return q 
        
    logging.info("Creating Input Queue... (automatic retry)")
    try:
        qi=retry(partial(setup_queue, queue_name))
        logging.info("Input Queue successfully created") 
    except Exception,e:
        raise Exception("Creating queue '%s': %s" % (queue_name, str(e)))
Пример #2
0
def run(queue_name=None, flush_queue=None,
        batch_size=None, polling_interval=None,
        format_any=None, propagate_error=None,
        retry_always=None, wait_trigger=None,
        trigger_none_msg=None, trigger_topic=None,
        delete_on_error=False, dont_pass_through=False,
        simulate_error=False, error_msg=None
        ,**_ ):
    
    ## we need a minimum of second between polls
    polling_interval=max(1, polling_interval)
   
    # SETUP PRIVATE QUEUE
    #####################
    def setup_private_queue():
        """ closure for setting up private queue """
        conn = boto.connect_sqs()
        q=conn.create_queue(queue_name)
        if format_any:
            q.set_message_class(RawMessage)
        else:
            q.set_message_class(JSONMessage)
        return q 
        
    try:
        q=retry(setup_private_queue, always=retry_always)
        logging.info("Queue successfully created") 
    except Exception,e:
        raise Exception("Creating queue '%s': %s" % (queue_name, str(e)))
Пример #3
0
def run(
    queue_name=None,
    flush_queue=None,
    format_any=None,
    retry_always=None,
    topics=None,
    error_msg=None,
    simulate_error=None,
    **_
):

    queue_name = queue_name.strip()

    # SETUP PRIVATE QUEUE
    def setup_private_queue():
        conn = boto.connect_sqs()
        q = conn.create_queue(queue_name)
        if not format_any:
            q.set_message_class(JSONMessage)
        else:
            q.set_message_class(RawMessage)
        return q

    try:
        q = retry(setup_private_queue, always=retry_always)
    except Exception, e:
        raise Exception("Creating queue '%s': %s" % (queue_name, str(e)))
Пример #4
0
def run(bucket_name=None, 
        path_source=None, 
        path_move=None,
        delete_source=False,
        polling_interval=60,
        extd=None,
        extf=None
        ,**_):
        

    if not delete_source and path_move is None:
        raise Exception("Options 'delete source' or 'move path' is required")
    
    if delete_source and path_move is not None:
        raise Exception("Options 'delete source' and 'move path' are mutually exclusive")
    
    
    #if args.enable_debug:
    #    logger=logging.getLogger()
    #    logger.setLevel(logging.DEBUG)
    
    bucket_name=bucket_name.strip()
    path_source=path_source.strip()
    
    code, p_src=resolve_path(path_source)
    if not code.startswith("ok"):
        raise Exception("Invalid source path: %s" % path_source)

    mkdir_p(p_src)

    if path_move is not None:
        code, path_move=resolve_path(path_move)
        if not code.startswith("ok"):
            raise Exception("Invalid move path: %s" % path_move)
    
        code,_=mkdir_p(path_move)
        if not code.startswith("ok"):
            raise Exception("Can't create move path: %s" % path_move)
    
    
    try:
        conn = boto.connect_s3()
    except:
        ## not much we can do
        ## but actually no remote calls are made
        ## at this point so it should be highly improbable
        raise Exception("Can't 'connect' to S3")
    
    ###################### BUCKET
    logging.info("Getting/creating bucket (unlimited retries with backoff)")
    def _get_create_bucket():
        return conn.create_bucket(bucket_name)
              
    bucket=retry(_get_create_bucket)
    logging.info("Got bucket: %s" % bucket_name)
    #############################

    logging.debug("Starting loop...")

    ppid=os.getppid()
    logging.info("Process pid: %s" % os.getpid())
    logging.info("Parent pid:  %s" % ppid)
    while True:
        if os.getppid()!=ppid:
            logging.warning("Parent terminated... exiting")
            break
        #################################################

        logging.debug("Start processing...")
        
        code, dirs=get_root_dirs(p_src)
        if not code.startswith("ok"):
            raise Warning("Source path disappeared: %s" % p_src)
        
        dirs=filter_dirs(extd, dirs)
        
        for _dir in dirs:
            process_dir(bucket, _dir, delete_source, extf, path_move)
        

        #####################################################
        logging.debug("...sleeping for %s seconds" % polling_interval)
        sleep(polling_interval)
Пример #5
0
        if format_any:
            q.set_message_class(RawMessage)
        else:
            q.set_message_class(JSONMessage)
        return q 
        
    logging.info("Creating Input Queue... (automatic retry)")
    try:
        qi=retry(partial(setup_queue, queue_name))
        logging.info("Input Queue successfully created") 
    except Exception,e:
        raise Exception("Creating queue '%s': %s" % (queue_name, str(e)))

    logging.info("Creating Output Queue... (automatic retry)")
    try:
        qo=retry(partial(setup_queue, queue_name_output))
        logging.info("Output Queue successfully created") 
    except Exception,e:
        raise Exception("Creating queue '%s': %s" % (queue_name_output, str(e)))

    # FLUSH QUEUE
    #############
    if flush_queue:
        try:    
            qi.clear()
            logging.info("Input Queue flushed")
        except: pass


    # MAIN LOOP
    ###########
Пример #6
0
def run(
    enable_simulate=False,
    bucket_name=None,
    path_source=None,
    path_moveto=None,
    path_check=None,
    num_files=5,
    enable_delete=False,
    propagate_error=False,
    prefix=None,
    polling_interval=None,
    only_ext=None,
    filename_input_full=False,
    filename_input_regex=None,
    key_output_format=None,
    enable_progress_report=False,
    write_done=False,
    **_
):

    if key_output_format is not None:
        if filename_input_regex is None:
            raise Exception("-ifnr and -okf options work in tandem")

    if filename_input_regex is not None:

        if key_output_format is None:
            raise Exception("Input filename regex specified but no output S3 key format specified")

        logging.info("Compiling input filename regex...")
        try:
            ireg = re.compile(filename_input_regex.strip("'"))
            ofmt = key_output_format.strip("'")
        except:
            raise Exception("Can't compile input filename regex pattern")
    else:
        ireg = None
        ofmt = None

    # if args.enable_debug:
    #    logger=logging.getLogger()
    #    logger.setLevel(logging.DEBUG)

    bucket_name = bucket_name.strip()
    path_source = path_source.strip()

    try:
        prefix = prefix.strip()
    except:
        prefix = None

    try:
        path_moveto = path_moveto.strip()
    except:
        path_moveto = None

    if path_check is not None:
        code, path_check = resolve_path(path_check)
        if not code.startswith("ok"):
            logging.warning("path_check '%s' might be in error..." % path_check)

    ### VALIDATE PARAMETERS
    if not enable_delete and path_moveto is None:
        raise Exception("either -d or -m must be used")

    if enable_delete and path_moveto is not None:
        raise Exception("-d can't be used with -m")

    code, p_src = resolve_path(path_source)
    if not code.startswith("ok"):
        raise Exception("Invalid source path: %s" % path_source)

    if path_moveto is not None:
        code, p_dst = resolve_path(path_moveto)
        if not code.startswith("ok"):
            raise Exception("Invalid moveto path: %s" % path_moveto)
    else:
        p_dst = None

    ### wait for 'source' path to be available
    logging.info("Waiting for source path to be accessible... CTRL-c to stop")
    while True:
        if os.path.isdir(p_src):
            break
        sleep(1)
    logging.info("* Source path accessible")

    if path_moveto is not None:
        logging.info("Creating 'moveto' directory if required")
        code, _ = mkdir_p(p_dst)
        if not code.startswith("ok"):
            raise Exception("Can't create 'moveto' directory: %s" % p_dst)
        logging.info("* Created moveto directory")

    if not enable_simulate:
        try:
            conn = boto.connect_s3()
        except:
            ## not much we can do
            ## but actually no remote calls are made
            ## at this point so it should be highly improbable
            raise Exception("Can't 'connect' to S3")

    if not enable_simulate:
        ###################### BUCKET
        logging.info("Getting/creating bucket (unlimited retries with backoff)")

        def _get_create_bucket():
            return conn.create_bucket(bucket_name)

        bucket = retry(_get_create_bucket)
        logging.info("Got bucket")
        #############################

    if enable_simulate:
        logging.info("Begin simulation...")
    else:
        logging.debug("Starting loop...")

    ppid = os.getppid()
    logging.info("Process pid: %s" % os.getpid())
    logging.info("Parent pid:  %s" % ppid)
    while True:
        if os.getppid() != ppid:
            logging.warning("Parent terminated... exiting")
            break
        #################################################

        _code, path_exists = safe_path_exists(path_check)

        if path_check is None or path_exists:
            try:
                logging.debug("Start processing...")
                count = 0
                gen = gen_walk(p_src, max_files=num_files, only_ext=only_ext)

                for src_filename in gen:

                    if enable_progress_report:
                        logging.info("Processing file: %s" % src_filename)

                    if write_done:
                        if is_done_file(src_filename):
                            continue

                    try:
                        s3key_name = gen_s3_key(ireg, ofmt, p_src, src_filename, prefix, filename_input_full)
                    except Exception, e:
                        raise Exception(
                            "Error generating S3 key... check your command line parameters... use the 'simulate' facility: %s"
                            % e
                        )

                    if enable_simulate:
                        simulate(src_filename, s3key_name, enable_delete, p_dst)
                    else:
                        k = S3Key(bucket)
                        k.key = s3key_name
                        was_uploaded = process_file(
                            enable_progress_report,
                            bucket_name,
                            prefix,
                            k,
                            src_filename,
                            p_dst,
                            enable_delete,
                            propagate_error,
                            write_done,
                        )
                        if was_uploaded:
                            count = count + 1

            except Exception, e:
                logging.error("Error processing files...(%s)" % str(e))
        else:
            logging.info()

        if count > 0:
            logging.info("Progress> uploaded %s files" % count)

        #####################################################
        logging.debug("...sleeping for %s seconds" % polling_interval)
        sleep(polling_interval)
Пример #7
0
def run_aws(node_id, proc, polling_interval, queue_name, topic_name, dst_path, delete_queue):

    if topic_name is None:
        raise Exception("Need a topic_name")

    auto_queue = False
    if queue_name is None:
        auto_queue = True
        queue_name = gen_queue_name()

    def setup_private_queue():
        conn = boto.connect_sqs()
        q = conn.create_queue(queue_name)
        q.set_message_class(RawMessage)
        return (conn, q)

    # SETUP PRIVATE QUEUE
    logging.info("Creating queue '%s'" % queue_name)
    sqs_conn, q = retry(setup_private_queue, logmsg="Having trouble creating queue...")

    topic_arn = build_topic_arn(sqs_conn, topic_name)

    logging.info("topic_arn: %s" % topic_arn)

    ### create topic
    def create_topic():
        """
        {'CreateTopicResponse': 
            {'ResponseMetadata': 
                {'RequestId': '5e2c6700-4dd0-11e1-b421-41716ce69b95'}, 
            'CreateTopicResult': {'TopicArn': 'arn:aws:sns:us-east-1:674707187858:election'}}}
        """
        snsconn = boto.connect_sns()
        snsconn.create_topic(topic_name)

    retry(create_topic, logmsg="Having trouble creating topic...")

    # SUBSCRIBE TO TOPIC
    def sub_topic():
        snsconn = boto.connect_sns()
        snsconn.subscribe_sqs_queue(topic_arn, q)
        return snsconn

    snsconn = retry(sub_topic, logmsg="Having trouble subscribing queue to topic...")

    logging.info("Subscribed to topic '%s'" % topic_name)

    current_state = "NL"

    MSGS = {"NL": "Leadership lost", "L": "Leadership gained", "ML": "Leadership momentum"}

    poll_count = 0

    def cleanup():
        rm(dst_path)
        if auto_queue or delete_queue:
            logging.info("... deleting queue")
            try:
                sqs_conn.delete_queue(q)
            except:
                pass

    ppid = os.getppid()
    logging.info("Process pid: %s" % os.getpid())
    logging.info("Parent pid: %s" % ppid)
    logging.info("Starting loop...")
    while True:
        if os.getppid() != ppid:
            logging.warning("Parent terminated... exiting")
            cleanup()
            break

        try:
            ## do a bit of garbage collection :)
            global sigtermReceived
            if sigtermReceived:
                cleanup()
                raise SignalTerminate()
            #########################################

            try:
                ### BATCH PROCESS - required!!!
                while True:
                    rawmsg = q.read()
                    if rawmsg is not None:
                        jsonmsg = json.loads(rawmsg.get_body())
                        q.delete_message(rawmsg)

                        ## SNS encapsulates the original message...
                        nodeid = str(jsonmsg["Message"])

                        transition, current_state = proc.send((poll_count, nodeid))
                        jstdout({"state": current_state})

                        if transition:
                            logging.info(MSGS[current_state])
                            if current_state == "L":
                                code, _ = touch(dst_path)
                                logging.info("Created '%s': %s" % (dst_path, code))
                            else:
                                code, _ = rm(dst_path)
                                logging.info("Deleted '%s': %s" % (dst_path, code))
                    else:
                        break

            except SQSDecodeError:
                logging.warning("Message decoding error")

            except Exception, e:
                logging.error(str(e))
                continue

            msg = str(node_id)

            logging.debug("Publishing our 'node id': %s" % node_id)
            try:
                snsconn.publish(topic_arn, msg)
            except:
                try:
                    snsconn.publish(topic_arn, msg)
                except:
                    logging.warning("Can't publish to topic '%s'" % topic_name)

            logging.debug("... sleeping for %s seconds" % polling_interval)
            sleep(polling_interval)
            poll_count = poll_count + 1

        except KeyboardInterrupt:
            cleanup()
            raise