def run_query(self, engine_type, line, cell): ip = get_ipython() try: args = self.parse_query_args(engine_type, line) except SystemExit: return self.code_list = [] self.push_code("# translated code") query = self.build_query(cell) engine = self.build_engine(engine_type, args.database, args) # queue if args.queue: return self.submit_query(query, engine, args) # read_td_query self.push_code("_d = td.read_td_query(_q, _e)") if args.dry_run: return self.display_code_block() d = td.read_td_query(query, engine) # output r = self.post_process(d, args) if args.verbose: self.display_code_block() return r
def test_ok(self): read_td_query("select 1", self.engine) self.assert_query("select 1")
#!/usr/bin/python import os import sys import pandas as pd import pandas_td as td print "load.py started" con = td.connect(apikey="TD_APIKEY", endpoint='https://api.treasuredata.com') # Type: Presto, Database: sample_datasets engine = td.create_engine('presto:sample_datasets', con=con) # Read Treasure Data query from into a DataFrame. df = td.read_td_query(''' SELECT time, close FROM nasdaq LIMIT 100 ''', engine, index_col='time', parse_dates={'time': 's'}) print df.head # Output DataFrame to TreasureData via Streaming Import. (If your dataset is large, this method is not recommended.) td.to_td(df, 'workflow_temp.test_emr', con, if_exists='replace', index=False) print "load.py finished"
def test_ok(self): read_td_query('select 1', self.engine) self.assert_query('select 1')