Example #1
0
 def test_integration(self):
     loader = CSVTradeLoader("trade_file", source=os.path.dirname(__file__))
     loader.run()
     with open(os.path.join(tempfile.gettempdir(), loader.error_file),
               'r') as f:
         expect = 'Quantity,Price,Action,Trade Date,Instrument,Fund,TradeType,Trade ID,Msg Type,Status\n\n'
         expect += "333,112,Buy,2015-03-03,BAC.N,Fund1,,FF235,New,'TradeType': 'Valid values for TradeType are [Vanilla]'"
         self.assertEquals(
             expect.replace("\n", "").replace("\r", ""),
             f.read().replace("\n", "").replace("\r", ""))
Example #2
0
 def test_write_csv(self):
     loader = CSVTradeLoader("some_csv")
     headers = ['F1', 'F2']
     data = [{'F1': 'V1'}, {'F1': 'V1', 'F2': 'V2'}]
     csv_file = os.path.join(tempfile.gettempdir(), 'some_csv.csv')
     loader.write_csv(csv_file, headers, data)
     with open(csv_file, 'r') as f:
         expect = 'F1,F2\n\nV1,\n\nV1,V2\n'
         self.assertEquals(
             expect.replace("\n", "").replace("\r", ""),
             f.read().replace("\n", "").replace("\r", ""))
Example #3
0
    def test_load_trade_file(self):
        loader = CSVTradeLoader("trade_file", source=os.path.dirname(__file__))
        fields, trades = loader.load_trade_file()
        expect = [
            'Quantity', 'Price', 'Action', 'Trade Date', 'Instrument', 'Fund',
            'TradeType', "Trade ID", 'Msg Type'
        ]
        self.assertEquals(expect, fields)
        self.assertEquals(3, len(trades))
        expect = {
            'Action': 'Buy',
            'Fund': 'Fund1',
            'Instrument': 'BAC.N',
            'Price': '22',
            'Quantity': '200',
            'Trade Date': '2015-03-03',
            'TradeType': 'Vanilla',
            'Trade ID': 'FF123',
            'Msg Type': 'New'
        }

        self.assertEquals(expect, trades[0])
Example #4
0
    def build(self, file_name, number, msg_type="new"):
        ID_BASE = hash(random.random())
        header = default_record.keys()
        records = []
        trade_ids = None
        if msg_type != "new":
            s = Session()
            trade_ids = [t.trade_id for t in s.query(Trade).limit(number)]
            s.close()

        for i in range(number):
            rec = default_record.copy()
            rnd = Decimal("{:5f}".format(random.random()))
            rec['Price'] = round(rec['Price'] * rnd * 2, 5)
            rec['Quantity'] = round(rec['Quantity'] * rnd * 2 + 1, 0)
            rec['Instrument'] = INSTRUMENTS[int(rnd * 100) % len(INSTRUMENTS)]
            if trade_ids:
                rec['Trade ID'] = trade_ids[i]
            else:
                rec['Trade ID'] = "{}-{}".format(ID_BASE, i)
            rec["Msg Type"] = msg_type
            records.append(rec)

        CSVTradeLoader.write_csv(file_name, header, records)
    def build(self, file_name, number, msg_type ="new"):
        ID_BASE = hash(random.random())
        header = default_record.keys()
        records = []
        trade_ids = None
        if msg_type != "new":
            s = Session()
            trade_ids = [t.trade_id for t in s.query(Trade).limit(number)]
            s.close()
             
        for i in range(number):
            rec = default_record.copy()
            rnd = Decimal("{:5f}".format( random.random() ))
            rec['Price'] = round(rec['Price'] * rnd*2, 5)
            rec['Quantity'] = round(rec['Quantity'] * rnd*2 +1, 0)
            rec['Instrument'] = INSTRUMENTS[int(rnd*100)%len(INSTRUMENTS)]
            if trade_ids:
                rec['Trade ID'] = trade_ids[i]
            else:
                rec['Trade ID'] = "{}-{}".format(ID_BASE, i)
            rec["Msg Type"] = msg_type
            records.append(rec)

        CSVTradeLoader.write_csv(file_name, header, records)
def run_once(cnt):
#     DATA_CACHE.clear()
#     import gc
#     gc.collect()  # don't care about stuff that would be garbage collected properly
# #     tr.print_diff()      
# #     all_objects = muppy.get_objects()
# #     print(len(all_objects))
#     import objgraph
#     growth = objgraph.show_growth(limit=10)
#     if growth:
#         print ("I leaked{}".format( growth ) )
#     print(objgraph.show_most_common_types())
    start = time.time()
    loader  = CSVTradeLoader.create_from_path(file_name)
    status = loader.run()
#     loaders.append(loader)
    print ("{}. Took {}. {}".format(cnt, time.time() - start, status))
Example #7
0
def run_once(cnt):
    #     DATA_CACHE.clear()
    #     import gc
    #     gc.collect()  # don't care about stuff that would be garbage collected properly
    # #     tr.print_diff()
    # #     all_objects = muppy.get_objects()
    # #     print(len(all_objects))
    #     import objgraph
    #     growth = objgraph.show_growth(limit=10)
    #     if growth:
    #         print ("I leaked{}".format( growth ) )
    #     print(objgraph.show_most_common_types())
    start = time.time()
    loader = CSVTradeLoader.create_from_path(file_name)
    status = loader.run()
    #     loaders.append(loader)
    print("{}. Took {}. {}".format(cnt, time.time() - start, status))
Example #8
0
    def process(self, conn):
        bucket = conn.get_bucket(self.bucket)
        trade_file_key = bucket.get_key(self.file_name)

        if str(trade_file_key.get_metadata('user_upload')).lower() !='true':
            return ("Skipping {}. Not user uploaded file".format(self.file_name))
        
        trade_file = os.path.join(tempfile.gettempdir(),self.file_name)
        trade_file_key.get_contents_to_filename(trade_file)
        
        loader = CSVTradeLoader.create_from_path(trade_file)
        loader.run()

        error_file_key = bucket.new_key(loader.error_file)
        error_file_key.set_contents_from_filename(loader.error_filepath)

        os.remove(trade_file)
        os.remove(loader.error_filepath)
        return ("Trade file processed. {} filed produced".format( loader.error_file ))
Example #9
0
    def process(self, conn):
        bucket = conn.get_bucket(self.bucket)
        trade_file_key = bucket.get_key(self.file_name)

        if str(trade_file_key.get_metadata('user_upload')).lower() !='true':
            return ("Skipping {}. Not user uploaded file".format(self.file_name))
        
        trade_file = os.path.join(tempfile.gettempdir(),self.file_name)
        trade_file_key.get_contents_to_filename(trade_file)
        
        loader = CSVTradeLoader.create_from_path(trade_file)
        loader.run()

        error_file_key = bucket.new_key(loader.error_file)
        error_file_key.set_contents_from_filename(loader.error_filepath)

        os.remove(trade_file)
        os.remove(loader.error_filepath)
        return ("Trade file processed. {} filed produced".format( loader.error_file ))
                
Example #10
0
 def test_create_from_path(self):
     file_name = '/foo/bar/test.csv'
     loader = CSVTradeLoader.create_from_path(file_name)
     self.assertEquals('test', loader.file_root)
     self.assertEquals('/foo/bar', loader.source)
     self.assertEquals('/foo/bar', loader.output)
Example #11
0
 def test_trade_file(self):
     loader = CSVTradeLoader("file_root")
     self.assertEquals('file_root.csv', loader.trade_file)
Example #12
0
 def test_error_filepath(self):
     loader = CSVTradeLoader("file_root", output='/foo/bar')
     self.assertEquals(os.path.join('/foo/bar', 'file_root_errors.csv'),
                       loader.error_filepath)
Example #13
0
 def test_error_file(self):
     loader = CSVTradeLoader("file_root", output='/foo/bar')
     self.assertEquals(r'file_root_errors.csv', loader.error_file)
 def build_csv(cls, file_name):
     models = RecordProcessor.model_list()
     fields = RecordProcessor.FIELD_MAP
     records = cls.get_data_table(fields, models)
     CSVTradeLoader.write_csv(file_name, cls.get_headers(models), records)