def PostgresCreateTable(_schema, table_name):

    print "Table creation started!"
    sql = 'CREATE TABLE %s\n(' % (table_name)

    for i in _schema:

        t = i['type']
        print i['type']
        if t == 'object':
            #field_types[k] = 'character varying'
            sql = sql + '{0} character varying,'.format(i['name'])
        elif t == 'int64':
            #field_types[k] = 'integer'
            sql = sql + '{0} integer,'.format(i['name'])
        elif t == 'float64':
            #field_types[k] = 'NUMERIC'
            sql = sql + '{0} NUMERIC,'.format(i['name'] )

        elif t == 'datetime64[ns]':
            sql = sql + '{0} timestamp,'.format(i['name'])


    sql = sql[:len(sql ) -1] + '\n)'
    # sql = 'CREATE TABLE %s\n(' % (tblname)
    # for col in cols:
    #     sql = sql + ('\n\t{0} NVARCHAR({1}) NULL,'.format(col[0],col[1]))
    # sql = sql[:len(sql)-1] + '\n)'
    print sql
    try:
        result = pg.execute_query(sql)
        print result
        print "Table creation successful!"
    except Exception, err:
        print err
        print "Error occurred in table creation!"
Ejemplo n.º 2
0
                except Exception, err:
                    logger.error('Error occurred while getting data from BigQuery Handler! %s' % err)
                    return cmg.format_response(False,None,'Error occurred while getting data from BigQuery Handler!',sys.exc_info())

            elif db.lower() == 'mssql':
                try:
                    result = mssql.execute_query(query)
                    logger.info('Data received!')
                    logger.debug('Result %s' % result)
                except Exception, err:
                    logger.error('Error occurred while getting data from sql Handler! %s' % err)
                    return cmg.format_response(False,None,'Error occurred while getting data from BigQuery Handler!',sys.exc_info())

            elif db.lower() == 'postgresql':
                try:
                    result = PG.execute_query(query)
                    logger.info('Data received!')
                    logger.debug('Result %s' % result)
                except Exception, err:
                    logger.error('Error occurred while getting data from pgsql Handler! %s' % err)
                    return cmg.format_response(False,None,'Error occurred while getting data from Postgres Handler!',sys.exc_info())

            result_dict = result
            #  sets up json
            #  levels_memory = {'vehicle_usage': [], 'vehicle_type': [], 'vehicle_class': []}
            total = result_dict[0]["total"]
            levels_memory_f = []
            levels_memory_str = '{%s}'
            for i in range(0, len(fields)):
                levels_memory_f.append("'{0}': []".format(fields[i]))
            levels_index = dict(zip(dictb.values(), dictb.keys()))
        elif dbtype.lower() == 'bigquery':

            try:
                query = 'SELECT {0} FROM {1}'.format(fields_str, tables_str)
                result = BQ.execute_query(query, user_id=user_id, tenant=tenant)

            except Exception, err:

                result = cmg.format_response(False, err, 'Error occurred while getting data from BigQuery Handler!', sys.exc_info())
                return result

        elif dbtype.lower() == 'postgresql':

            try:
                query = 'SELECT {0}::FLOAT FROM {1}'.format(fields_str, tables_str)
                result = postgres.execute_query(query)

            except Exception, err:

                result = cmg.format_response(False, err, 'Error occurred while getting data from Postgres Handler!', sys.exc_info())
                return result

        if df.empty:
            df = pd.DataFrame(result)
        else:
            df1 = pd.DataFrame(result)
            df = pd.concat([df, df1], axis=1)

    return df

Ejemplo n.º 4
0
               try:
                    logger.info('Inserting to cache..')
                    # p = Process(target=MEM_insert,args=(cache_key,json.dumps(result),query,cache_timeout))
                    # p.start()
                    t = threading.Thread(target=MEM_insert, args=(cache_key,json.dumps(result),query,cache_timeout))
                    t.start()
               except Exception, err:
                    logger.error("Cache insertion failed. %s" % err)
                    pass
               return  comm.format_response(True,result,query,exception=None)

          elif db.lower() == 'postgresql':
              if offset_ is not None:
                  query +=  ' OFFSET ' + str(offset_)
              query +=  ' LIMIT ' + str(limit_)
              data = pgsqlhandler.execute_query(query)
              try:
                    logger.info('Inserting to cache..')
                    # p = Process(target=MEM_insert,args=(cache_key,json.dumps(data),query,cache_timeout))
                    # p.start()
                    t = threading.Thread(target=MEM_insert, args=(cache_key,json.dumps(data),query,cache_timeout))
                    t.start()
              except Exception, err:
                    logger.error("Cache insertion failed. %s" % err)
                    pass
              return  comm.format_response(True,data,query,exception=None)

          elif db.lower() == 'mysql':
                try:
                    resultSet = mysqlhandler.execute_query(query,params.db_name)
                except Exception, err:
Ejemplo n.º 5
0
                    intersect_groups_orders.extend(x for x in Order_bys if x not in intersect_groups_orders)
                    fields_list = intersect_groups_orders + [aggregation_fields_str]

                fields_str = ' ,'.join(fields_list)

                logger.info("Select statement creation started!")

                query = 'SELECT {0} FROM {1} {2} {3} {4}'.format(fields_str, query_body, conditions, group_bys_str,
                                                                 order_bys_str)
                print query
                logger.info('Query formed successfully! : %s' % query)
                logger.info('Fetching data from SQL...')
                result = ''

                try:
                    result_ = PG.execute_query(query)
                    result = cmg.format_response(True,result_,query,None)
                    logger.info('Data received!')
                    # p = Process(target=MEMcache_insert,args=(result_,query,pkey,cache_timeout))
                    # p.start()
                    t = threading.Thread(target=MEMcache_insert, args=(result_,query,pkey,cache_timeout))
                    t.start()
                    logger.debug('Result %s' % result)
                    logger.info("PostgreSQL - Processing completed!")
                except Exception, err:
                    logger.error('Error occurred while getting data from PG Handler!')
                    logger.error(err)
                    result = cmg.format_response(False,None,'Error occurred while getting data from PG Handler!',sys.exc_info())
                # result_dict = json.loads(result)
                finally:
                    return result