s3cxn = boto.connect_s3()
    sqscxn = boto.connect_sqs()
    qin = sqscxn.create_queue(options.inqueue)
    qout = sqscxn.create_queue(options.outqueue)
    qin.set_message_class(MHMessage)
    qout.set_message_class(MHMessage)
    retries = 0
    while retries < options.max_retries:
        result_set = qin.get_messages(1, visibility_timeout=60*30)
        if len(result_set) >= 1:
            m = result_set[0] 
            print "Received message: ", m.get_body()
            input_file = m['input_file'] 
            bucket = m['bucket'] 
            s3_download_file.get_s3_file_to_local(s3cxn, bucket, input_file, input_file, STORAGE_PREFIX)	

            full_input_path = STORAGE_PREFIX + input_file
            hdf_file = input_file.replace('csv.gz', 'hdf')
            full_hdf_path = FEATURE_DIR + hdf_file
            if os.path.isfile(full_hdf_path) and hdf_complete(full_hdf_path):
                print "HDF generated and complete - skipping feature extraction" 
            else:
                print "Processing file: ", input_file
                command = "python %s -d %s %s" % \
                  (feature_extractor, FEATURE_DIR, full_input_path) 
                (code, string) = commands.getstatusoutput(command) 
                if options.debug is True:
                    print "Processing retured: ", code, string

            print "Moving processed file file to bucket"
Example #2
0
    bucket = s3cxn.get_bucket(options.bucket_name)
    if len(args) == 0:
        keys = bucket.get_all_keys()
        keys = [k.name for k in keys]
        if options.pair_name is not None:
            filter = upper(options.pair_name)
            try:
                keys = [k for k in keys if string.find(k.name, filter) != -1]
            except ValueError:
                print "On to the next one..." 
             
    else:
        keys = args

    for k in keys:
        if k[-4:] == '.hdf':
            logging.debug("Downloading %s", k)
            if os.path.isfile(EPHEMERAL0 + k):
                print "Skipping download - file exists ", k
            else:
                s3_download_file.get_s3_file_to_local(s3cxn, bucket, k, k, EPHEMERAL0)
            for params in TEST_PARAMS:
                for strategy in TEST_STRATEGIES:
                    strategy_func = strategy[strategy.keys()[0]]
                    strategy_name = strategy.keys()[0]
                    test_strategy(strategy_name, strategy_func, EPHEMERAL0+k, min_profit_prct=params['profit'], signal_window_time=params['signal_window'], min_window_signals=params['min_signal_count']) 



Example #3
0
    s3cxn = boto.connect_s3()
    sqscxn = boto.connect_sqs()
    qin = sqscxn.create_queue(options.inqueue)
    qout = sqscxn.create_queue(options.outqueue)
    qin.set_message_class(MHMessage)
    qout.set_message_class(MHMessage)
    retries = 0
    while retries < options.max_retries:
        result_set = qin.get_messages(1, visibility_timeout=60 * 30)
        if len(result_set) >= 1:
            m = result_set[0]
            print "Received message: ", m.get_body()
            input_file = m['input_file']
            bucket = m['bucket']
            s3_download_file.get_s3_file_to_local(s3cxn, bucket, input_file,
                                                  input_file, STORAGE_PREFIX)

            full_input_path = STORAGE_PREFIX + input_file
            hdf_file = input_file.replace('csv.gz', 'hdf')
            full_hdf_path = FEATURE_DIR + hdf_file
            if os.path.isfile(full_hdf_path) and hdf_complete(full_hdf_path):
                print "HDF generated and complete - skipping feature extraction"
            else:
                print "Processing file: ", input_file
                command = "python %s -d %s %s" % \
                  (feature_extractor, FEATURE_DIR, full_input_path)
                (code, string) = commands.getstatusoutput(command)
                if options.debug is True:
                    print "Processing retured: ", code, string

            print "Moving processed file file to bucket"