Пример #1
0
	def __init__(self, report, selector):
		self.report_number = report
		self.original_report_number = report
		self.year = selector.report_list['year'][1]
		self.parsed = parse() #for parsing inputs from rows
		self.connection = connect() #connects to the DB
		self.queries = queries() #query text for all tables
		self.agg = agg() #aggregation functions for all tables

		self.json_builder = self.JSON_constructor_return()
		self.parse_return_string = self.parse_return()
		self.aggregation = self.aggregation_return(self.year, report)
		self.id_compiler = id_compiler()
		self.report_types = {'A':'Aggregate', 'D':'Disclosure', 'N':'National'}
Пример #2
0
	def __init__(self, report, selector):
		self.report_number = report
		self.original_report_number = report
		self.year = selector.report_list['year'][1]
		self.parsed = parse() #for parsing inputs from rows
		self.connection = connect() #connects to the DB
		self.queries = queries() #query text for all tables
		self.agg = agg() #aggregation functions for all tables

		self.json_builder_return = self.JSON_constructor_return()
		self.parse_return_string = self.parse_return()
		self.aggregation = self.aggregation_return(self.year, report)
		self.id_compiler = id_compiler()
		self.report_types = {'A':'Aggregate', 'D':'Disclosure', 'N':'National'}
    def write(self, cr, uid, ids, vals, context={}):
        res = super(res_partner_custom, self).write(cr, uid, ids, vals)
        openmrs_object = self.pool.get('openmrs.connect')
        recId = openmrs_object.search(cr, uid, [], offset=0, limit=1, order=None, context=None, count=False)[0]
        username = openmrs_object.browse(cr, uid, recId, context={}).username
        ip_address = openmrs_object.browse(cr, uid, recId, context={}).ip_address
        port = openmrs_object.browse(cr, uid, recId, context={}).port
        password = openmrs_object.browse(cr, uid, recId, context={}).password
        database = openmrs_object.browse(cr, uid, recId, context={}).database
        identifier_type = openmrs_object.browse(cr, uid, recId, context={}).identifier_type
        for rec in ids:
            values ={}
            values['state'] = self.browse(cr, uid, rec, context={}).address[0].state_id.name
            values['street'] = self.browse(cr, uid, rec, context={}).address[0].street
            values['city'] = self.browse(cr, uid, rec, context={}).address[0].city
            values['country'] = self.browse(cr, uid, rec, context={}).address[0].country_id.name
            values['last'] = self.browse(cr, uid, rec, context={}).name
            values['fname'] = self.browse(cr, uid, rec, context={}).fname
            values['mname'] = self.browse(cr, uid, rec, context={}).mname
            values['gender'] = self.browse(cr, uid, rec, context={}).gender
            values['birthdate'] = self.browse(cr, uid, rec, context={}).birthdate
            values['number'] = self.browse(cr, uid, rec, context={}).ref
            values['mother'] = self.browse(cr, uid, rec, context={}).mother.whole_name
            values['birthplace'] = self.browse(cr, uid, rec, context={}).birthplace
            patientid = self.browse(cr, uid, rec, context={}).openmrs_number
            #raise osv.except_osv(_('Expecting an Agency Code'),_('IP adress is: %s' % patientid))
            for item in values:
                if (values[item] is None) or (values[item] is False):
                    values[item] = " "
            #raise osv.except_osv(_('Expecting an Agency Code'),_('ids numbers: %s' % ids))
            if patientid != 0:
                try:
                    connect_write(ip_address, port, username, password, database, patientid, values, identifier_type)
                    super(res_partner_custom, self).write(cr, uid, rec, {'for_synchronization': False}, context={})
                except:
                    super(res_partner_custom, self).write(cr, uid, rec, {'for_synchronization': True}, context={})
            else:
                try:
                    id_openmrs = connect(ip_address, port, username, password, database, values, identifier_type)
                    super(res_partner_custom, self).write(cr, uid, rec, {'openmrs_number': id_openmrs}, context={})
                    super(res_partner_custom, self).write(cr, uid, rec, {'for_synchronization': False}, context={})

                except:
                    super(res_partner_custom, self).write(cr, uid, rec, {'for_synchronization': True}, context={})

        return True
def main():
    scope = [
        'https://spreadsheets.google.com/feeds',
        'https://www.googleapis.com/auth/drive'
    ]
    credentials = 'breast-cancer-dataset-credentials.json'

    worksheet = connector.connect(
        scope=scope,
        credentials=credentials).open('breast-cancer-dataset').sheet1

    all_records = worksheet.get_all_values()
    headers = all_records[0]

    worksheet.update_cell(1, len(all_records[0]), 'clump_thickness_cat2')
    for row, data in enumerate(all_records[1:]):
        worksheet.update_cell(
            row + 2, len(data),
            fuzzifier(value=data[headers.index('clump_thickness')]))
        time.sleep(1)
Пример #5
0
def getData(company, amount, datef, datet):

    news_dates, news, news_count = downloadNews(company, amount)
    writeNews(news_dates, news, news_count,
              path + 'news' + sep + '{}.csv'.format(company))
    #news_dates, news, news_count = readNews(path + 'news' + sep + '{}.csv'.format(company))

    stocks_dates, stocks, stocks_count = downloadStock(company, datef, datet)
    writeStock(stocks_dates, stocks, stocks_count,
               path + 'stocks' + sep + '{}.csv'.format(company))
    #stocks_dates, stocks, stocks_count = readStock(path + 'stocks' + sep + '{}.csv'.format(company))

    stems_dates, stems, stems_count = stem(news_dates, news, news_count)
    writeNews(stems_dates, stems, stems_count,
              path + 'stems' + sep + '{}.csv'.format(company))
    #stems_dates, stems, stems_count = readNews(path + 'stems' + sep + '{}.csv'.format(company))

    connections_dates, connections_news, connections_stocks, connections_count = connect(
        stems_dates, stems, stems_count, stocks_dates, stocks, stocks_count)
    writeConnections(connections_dates, connections_news, connections_stocks,
                     connections_count,
                     path + 'connections' + sep + '{}.csv'.format(company))
Пример #6
0
def upload(state_dic):

    total_states = len(state_dic.keys())

    connection, metadata, engine = connect()

    db_tables = set(metadata.tables.keys())

    print("Loading DataFrames to Database Tables")

    for progress, k in enumerate(state_dic.keys(), 1):
        if k in db_tables:
            update_progress(progress / total_states)
            continue
        try:
            state_dic[k].to_sql(k, con=engine, if_exists='replace')
        except exc.SQLAlchemyError as e:
            print("****Couldn't insert {0} table. Investigate!****".format(k))

        update_progress(progress / total_states)
        state_dic[k] = ''

    print("Done uploading state DataFrames to the Database")
Пример #7
0
#!/usr/bin/env python

# Use connector directly
import connector
cursor = connector.connect('default').cursor()
cursor.execute('SELECT * FROM test LIMIT 10')
print(cursor.fetchone())

# Register SQLAlchemy dialect
from sqlalchemy.dialects import registry
registry.register("clickhouse", "base", "dialect")

# Test engine and table 
from sqlalchemy import *
from sqlalchemy.engine import create_engine
from sqlalchemy.schema import *

engine = create_engine('clickhouse://default:@localhost:8123/default')
logs = Table('test', MetaData(bind=engine), autoload=True)
print(select([func.count('*')], from_obj=logs).scalar())
Пример #8
0
 def __init__(self):
     self.max_key = 0
     self.old_max_key = ''
     self.next_conn_time = 10
     self.client = connect()
     self.data = {}
Пример #9
0
import connector
conn = connector.connect()

cur = conn.cursor()

# TODO

cur.close()
conn.close()
Пример #10
0
from proxy import ProxyList
import parser
import connector

proxies = ProxyList()
for proxy in proxies:
    connector.connect(proxy.addr, proxy.port)
Пример #11
0
#import json
import time
import psycopg2
import psycopg2.extras
from connector import connect_DB as connect
from builder import build_JSON as build
from selector import report_selector as selector
from constructor import report_construction
from file_check import check_file
from report_list import report_list_maker

connection = connect() #connects to the DB
selector = selector() #holds lists of reports to be generated for each MSA
cur = connection.connect() #creates cursor object connected to HMDAPub2013 sql database, locally hosted postgres
selector.get_report_lists('MSAinputs2013.csv') #fills the dictionary of lists of reports to be generated
build_msa = build() #instantiate the build object for file path, jekyll files
#build_msa.msas_in_state(cur, selector, 'aggregate') #creates a list of all MSAs in each state and places the file in the state's aggregate folder


#List of Alabama MSAs for test state case
#AL_MSAs = ['45180', '45980', '11500', '10760', '42460', '13820', '19460', '23460', '46740', '17980', '12220', '20020', '18980', '33860', '46260', '33660', '19300', '22840', '21460','10700','21640','42820','26620','22520','46220']
AL_MSAs = ['33660']

#report lists for testing
#selector.reports_to_run = ['A 3-1', 'D 3-1']
#selector.reports_to_run = ['A 11-1']
#selector.reports_to_run = ['D 4-1', 'D 4-2', 'D 4-3', 'D 4-4', 'D 4-6', 'D 4-7']
#selector.reports_to_run = ['A 5-1', 'A 5-2', 'A 5-3', 'A 5-4', 'A 5-5', 'A 5-7']
#selector.reports_to_run = ['A 7-1', 'A 7-2', 'A 7-3', 'A 7-4', 'A 7-5', 'A 7-6', 'A 7-7']
#selector.reports_to_run = ['A 8-1', 'A 8-2', 'A 8-3', 'A 8-4', 'A 8-5', 'A 8-6', 'A 8-7']
#selector.reports_to_run = ['A 11-1', 'A 11-2', 'A 11-3', 'A 11-4', 'A 11-5', 'A 11-6', 'A 11-7', 'A 11-8', 'A 11-9', 'A 11-10']
Пример #12
0
def upload(name_dic):

    year_list = [i for i in range(1880,2018)]

    total_keys = len(name_dic.keys())
    total_years = len(year_list)

    connection, metadata, engine = connect()

    db_tables = set(metadata.tables.keys())

    if "Names" in db_tables:
        print("Names Table already exists")
        name_table = Table('Names', metadata)

    else:
        name_table =  Table('Names', metadata,
            Column('id', Integer, primary_key=True),
            Column('name', String(30)))

    if "Years" in db_tables:
        print("Years Table already exists")
        year_table = Table('Years', metadata)

    else:
        year_table =  Table('Years', metadata,
            Column('id', Integer, primary_key=True),
            Column('year', Integer))

    try:
        metadata.create_all(engine)

    except exc.SQLAlchemyError as e:
        print("Tables already existed")

    print("Loading DataFrames to Database Tables")

    for progress,k in enumerate(name_dic.keys(), 1):

        if k in db_tables:
            update_progress(progress/total_keys)
            continue

        try:
            name_dic[k].to_sql(k, con=engine, if_exists='replace')
        except exc.SQLAlchemyError as e:
            print("****Couldn't insert {0} table. Investigate!****".format(k))

        ins = name_table.insert().values(name=k)
        connection.execute(ins)

        update_progress(progress/total_keys)
        name_dic[k] = ''

    print("Loading Years to Year table")

    for progress, y in enumerate(year_list, 1):

        ins = year_table.insert().values(year=y)
        connection.execute(ins)
        update_progress(progress/total_years)

    connection.close()

    print("Done Loading Database")
Пример #13
0
    container = containers[0]
    l5 = container.a["href"]
    t5 = container.img["title"]

    containers = page_soup.findAll("div", {"class": "blk23"})

    container = containers[0]
    l6 = container.a["href"]
    t6 = container.img["title"]

    data = {t1: l1, t2: l2, t3: l3, t4: l4, t5: l5, t6: l6}

    if data != data_chk:
        data_chk = data
        try:
            cnx = connector.connect(user='******', database='mindhub')
        except connector.Error as err:
            if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
                print("Something is wrong with your user name or password")
            elif err.errno == errorcode.ER_BAD_DB_ERROR:
                print("Database does not exist")
            else:
                print(err)
        else:
            try:
                cursor = cnx.cursor()
                add_news = ("INSERT INTO news "
                            "(news_title, news_author, news_url, date_posted) "
                            "VALUES (%s, %s, %s, %s )")

                data_news = (t1, 'IndiaTimes', data[t1], datetime.now().date())
Пример #14
0
import connector

# from m

cnx = connector.connect(user='******',
                        password='******',
                        host='127.0.0.1',
                        database='employees')
cnx.close()
Пример #15
0
    cursor = database.cursor()
    fourteenDays = timedelta(14)
    val = (bookName, today, today + fourteenDays, name)
    try:            
        cursor.execute(requests.borrowBookRequest, val)
        database.commit()
        print ("Borrowing the book was successful")
    except connector.Error as error :
        database.rollback()
        print("Borrowing the book was not successful : {}".format(error))
    cursor.close()

db = connector.connect(
  host="localhost",
  user="******",
  passwd="",
  database="tp2_plourdefalardeaulescieux")

choices = "0. Add a book\n1. Add book copy\n2. Add borrower\n3. Search for a book\n4. Get book copies"\
    +"\n5. Get Loaned Book Copies\n6. Borrow Book\nEnter exit to quit"

print(choices)
request = input("Choice : ")
while(request != "exit"):
    if (request == "0"):
        AddBook(db)
    elif (request == "1"):
        AddBookCopy(db)
    elif (request == "2"):
        AddBorrower(db)
Пример #16
0
#import json
import time
import psycopg2
import psycopg2.extras
from connector import connect_DB as connect
from builder import build_JSON as build
from selector import report_selector as selector
from constructor import report_construction
from file_check import check_file
from report_list import report_list_maker

connection = connect() #connects to the DB
selector = selector() #holds lists of reports to be generated for each MSA
cur = connection.connect() #creates cursor object connected to HMDAPub2013 sql database, locally hosted postgres
selector.get_report_lists('MSAinputs2013.csv') #fills the dictionary of lists of reports to be generated
build_msa = build() #instantiate the build object for file path, jekyll files
#build_msa.msas_in_state(cur, selector, 'aggregate') #creates a list of all MSAs in each state and places the file in the state's aggregate folder


#List of Alabama MSAs for test state case
#AL_MSAs = ['45180', '45980', '11500', '10760', '42460', '13820', '19460', '23460', '46740', '17980', '12220', '20020', '18980', '33860', '46260', '33660', '19300', '22840', '21460','10700','21640','42820','26620','22520','46220']

AL_MSAs = ['33340']


#report lists for testing
#selector.reports_to_run = ['A B']
#selector.reports_to_run = ['A 11-2']
#selector.reports_to_run = ['D 4-1', 'D 4-2', 'D 4-3', 'D 4-4', 'D 4-6', 'D 4-7']

#selector.reports_to_run = ['A 7-1', 'A 7-2', 'A 7-3', 'A 7-4', 'A 7-5', 'A 7-6', 'A 7-7']
Пример #17
0
class Connection:

    connection, metadata, engine = connect()

    conn_vars = dict(connection=connection, metadata=metadata, engine=engine)
Пример #18
0
	def __init__(self):
		self.max_key = 0
		self.old_max_key = ''
		self.next_conn_time = 10
		self.client = connect()
		self.data = {}
Пример #19
0
import os
from StringIO import StringIO

from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
import datetime

from connector import connect
from metashare.tmx_management.forms import TmxQueryForm

SESSION = connect("tmx_test")


@login_required
def db_info(request):
    return HttpResponse(SESSION.execute("INFO DB").replace("\n", "<br/>"))


@login_required()
def get_by_lang_pair(request):
    l1 = request.GET['l1']
    l2 = request.GET['l2']

    query_string = None

    # read the predefined xquery file
    with open(
            os.path.join(os.path.dirname(__file__),
                         'queries/get_tus_by_lang_pair.xq'), 'r') as xq:
        query_string = xq.read()
Пример #20
0
#!/usr/bin/env python

from connector import connect


class Plan(dict):
    @property
    def total(self):
        return sum(l['amount'] for l in self['lines'])


rows = connect()

plan = Plan(theme_num=rows[0][0],
            theme_text=rows[0][1],
            lesson_num=rows[0][2],
            lesson_text=rows[0][3],
            department_num=rows[0][4],
            time=rows[0][5])
    def create(self, cr, uid, vals, context={}):
        openmrs_object = self.pool.get('openmrs.connect')
        recId = openmrs_object.search(cr,
                                      uid, [],
                                      offset=0,
                                      limit=1,
                                      order=None,
                                      context=None,
                                      count=False)[0]
        username = openmrs_object.browse(cr, uid, recId, context={}).username
        ip_address = openmrs_object.browse(cr, uid, recId,
                                           context={}).ip_address
        port = openmrs_object.browse(cr, uid, recId, context={}).port
        password = openmrs_object.browse(cr, uid, recId, context={}).password
        database = openmrs_object.browse(cr, uid, recId, context={}).database
        identifier_type = openmrs_object.browse(cr, uid, recId,
                                                context={}).identifier_type

        res = super(res_partner_custom, self).create(cr, uid, vals)

        values = {}
        values['state'] = self.browse(cr, uid, res,
                                      context={}).address[0].state_id.name
        values['street'] = self.browse(cr, uid, res,
                                       context={}).address[0].street
        values['city'] = self.browse(cr, uid, res, context={}).address[0].city
        values['country'] = self.browse(cr, uid, res,
                                        context={}).address[0].country_id.name
        values['last'] = self.browse(cr, uid, res, context={}).name
        values['fname'] = self.browse(cr, uid, res, context={}).fname
        values['mname'] = self.browse(cr, uid, res, context={}).mname
        values['gender'] = self.browse(cr, uid, res, context={}).gender
        values['birthdate'] = self.browse(cr, uid, res, context={}).birthdate
        values['number'] = self.browse(cr, uid, res, context={}).ref
        values['mother'] = self.browse(cr, uid, res,
                                       context={}).mother.whole_name
        values['birthplace'] = self.browse(cr, uid, res, context={}).birthplace
        for item in values:
            if (values[item] is None) or (values[item] is False):
                values[item] = " "
        #raise osv.except_osv(_('Expecting an Agency Code'),_('IP adress is: %s' % values))
        try:
            id_openmrs = connect(ip_address, port, username, password,
                                 database, values, identifier_type)
            super(res_partner_custom,
                  self).write(cr,
                              uid,
                              res, {'openmrs_number': id_openmrs},
                              context={})
            super(res_partner_custom,
                  self).write(cr,
                              uid,
                              res, {'for_synchronization': False},
                              context={})
        except:
            super(res_partner_custom,
                  self).write(cr,
                              uid,
                              res, {'for_synchronization': True},
                              context={})
        return res
Пример #22
0
 def __init__(self):
     self.driver = connect()
     self.config = configparser.ConfigParser()
     self.config.read("config.ini")