buf.write('<html><body>\n') for filename in d: buf.write(d[filename].getvalue()) buf.write('\n') buf.write('\n</body></html>') att = MIMEText(buf.getvalue(), "html") msg.attach(att) else: for filename in d: c = d[filename].getvalue() (maintype, subtype) = BabeBase.getMimeType(format) att = MIMEBase(maintype, subtype) att.set_payload(c) encoders.encode_base64(att) att.add_header('Content-Disposition', 'attachment', filename=filename + "." + format) msg.attach(att) s = smtplib.SMTP(smtp_server, smtp_port) s.ehlo() if smtp_tls: s.starttls() s.ehlo() s.login(smtp_login, smtp_password) s.sendmail(author, recipients, msg.as_string()) s.quit() BabeBase.registerFinalMethod('mail', mail)
def pull_mongo(false_stream, db, collection, spec=None, **kwargs): """ Pull objects from mongo as rows """ k = kwargs.copy() if 'fields' in k: del k['fields'] if 'typename'in k: del k['typename'] connection = Connection(**k) db_ = connection[db] coll = db_[collection] metainfo = None for doc in coll.find(spec, **k): if not metainfo: fields = kwargs.get('fields', None) if not fields: fields = [StreamHeader.keynormalize(n) for n in doc] fields.sort() # Mandatory for determisn. typename = kwargs.get('typename', collection) metainfo = StreamHeader(**dict(kwargs, typename=typename, fields=fields)) yield metainfo yield metainfo.t(*[doc[field] for field in fields]) if metainfo: yield StreamFooter() BabeBase.registerFinalMethod("push_mongo", push_mongo) BabeBase.register("pull_mongo", pull_mongo)
p = Popen(c, stdin=PIPE, stdout=None, stderr=None) tmpfifo = TempFifo() import_query = db_params['import_query'] % (tmpfifo.filename, table_name) p.stdin.write(import_query) p.stdin.flush() writestream = tmpfifo.open_write() elif 'load_command' in db_params: load_command = [Template(s).substitute(table=table_name, database=database) for s in db_params['load_command']] print load_command pp = Popen(load_command, stdin=PIPE, stdout=None, stderr=None) writestream = pp.stdin else: raise Exception("Missing load_command or import_query in db_kind spec") writer = UnicodeCSVWriter(writestream, dialect=sql_dialect(), encoding="utf-8") #writer = csv.writer(writestream, dialect=sql_dialect()) elif isinstance(row, StreamFooter): if "import_query" in db_params: tmpfifo.close() p.stdin.close() p.wait() elif 'load_command' in db_params: pp.stdin.close() pp.wait() else: writer.writerow(row) BabeBase.register('pull_sql', pull_sql) BabeBase.registerFinalMethod('push_sql', push_sql)
elif 'load_command' in db_params: load_command = [ Template(s).substitute(table=table_name, database=database) for s in db_params['load_command'] ] print load_command pp = Popen(load_command, stdin=PIPE, stdout=None, stderr=None) writestream = pp.stdin else: raise Exception( "Missing load_command or import_query in db_kind spec") writer = UnicodeCSVWriter(writestream, dialect=sql_dialect(), encoding="utf-8") #writer = csv.writer(writestream, dialect=sql_dialect()) elif isinstance(row, StreamFooter): if "import_query" in db_params: tmpfifo.close() p.stdin.close() p.wait() elif 'load_command' in db_params: pp.stdin.close() pp.wait() else: writer.writerow(row) BabeBase.register('pull_sql', pull_sql) BabeBase.registerFinalMethod('push_sql', push_sql)
page_token = response.get('pageToken', None) query_complete = response.get('jobComplete', False) if query_complete: if not metainfo: fields = [f['name'] for f in response['schema']['fields']] typename = kwargs.get('typename', 'BigQuery') metainfo = StreamHeader(**dict(kwargs, typename=typename, fields=fields)) yield metainfo for row in response['rows']: yield metainfo.t(*[field['v'] for field in row['f']]) if page_token is None: # The query is done and there are no more results # to read. yield StreamFooter() break response = bigquery.jobs().getQueryResults( pageToken=page_token, timeoutMs=timeout, **job_ref ).execute( num_retries=num_retries ) BabeBase.register('pull_bigquery', pull_bigquery) BabeBase.registerFinalMethod('push_bigquery', push_bigquery)
if format == "html": buf = StringIO() buf.write('<html><body>\n') for filename in d: buf.write(d[filename].getvalue()) buf.write('\n') buf.write('\n</body></html>') att = MIMEText(buf.getvalue(),"html") msg.attach(att) else: for filename in d: c = d[filename].getvalue() (maintype, subtype) = BabeBase.getMimeType(format) att = MIMEBase(maintype, subtype) att.set_payload(c) encoders.encode_base64(att) att.add_header('Content-Disposition', 'attachment', filename=filename + "." + format) msg.attach(att) s = smtplib.SMTP(smtp_server, smtp_port) s.ehlo() if smtp_tls: s.starttls() s.ehlo() s.login(smtp_login, smtp_password) s.sendmail(author, recipients, msg.as_string()) s.quit() BabeBase.registerFinalMethod('mail', mail)
def pull_mongo(false_stream, db, collection, spec=None, **kwargs): """ Pull objects from mongo as rows """ k = kwargs.copy() if 'fields' in k: del k['fields'] if 'typename' in k: del k['typename'] connection = Connection(**k) db_ = connection[db] coll = db_[collection] metainfo = None for doc in coll.find(spec, **k): if not metainfo: fields = kwargs.get('fields', None) if not fields: fields = [StreamHeader.keynormalize(n) for n in doc] fields.sort() # Mandatory for determisn. typename = kwargs.get('typename', collection) metainfo = StreamHeader( **dict(kwargs, typename=typename, fields=fields)) yield metainfo yield metainfo.t(*[doc[field] for field in fields]) if metainfo: yield StreamFooter() BabeBase.registerFinalMethod("push_mongo", push_mongo) BabeBase.register("pull_mongo", pull_mongo)
import_query = db_params["import_query"] % (tmpfifo.filename, table_name) p.stdin.write(import_query) p.stdin.flush() writestream = tmpfifo.open_write() elif "load_command" in db_params: load_command = [ Template(s).substitute(table=table_name, database=database) for s in db_params["load_command"] ] print load_command pp = Popen(load_command, stdin=PIPE, stdout=None, stderr=None) writestream = pp.stdin else: raise Exception("Missing load_command or import_query in db_kind spec") writer = UnicodeCSVWriter(writestream, dialect=sql_dialect(), encoding="utf-8") # writer = csv.writer(writestream, dialect=sql_dialect()) elif isinstance(row, StreamFooter): if "import_query" in db_params: tmpfifo.close() p.stdin.close() p.wait() elif "load_command" in db_params: pp.stdin.close() pp.wait() else: writer.writerow(row) BabeBase.register("pull_sql", pull_sql) BabeBase.registerFinalMethod("push_sql", push_sql)