def ret_data(dbtype, rec_data,user_id=None, tenant=None):

    df = pd.DataFrame()
    for i in range(0,len(rec_data)):
        tables = rec_data[i].keys()
        fields = rec_data[i].values()
        fields = fields[0]

        fields_str = ', '.join(fields)
        tables_str = ', '.join(tables)

        if dbtype.lower() == 'mssql':

            try:
                query = 'SELECT {0} FROM {1}'.format(fields_str, tables_str)
                result = mssql.execute_query(query)

            except Exception, err:

                result = cmg.format_response(False, err, 'Error occurred while getting data from MSSQL!', sys.exc_info())
                return result

        elif dbtype.lower() == 'bigquery':

            try:
                query = 'SELECT {0} FROM {1}'.format(fields_str, tables_str)
                result = BQ.execute_query(query, user_id=user_id, tenant=tenant)

            except Exception, err:

                result = cmg.format_response(False, err, 'Error occurred while getting data from BigQuery Handler!', sys.exc_info())
                return result
Example #2
0
def slr_get(dbtype, db, table,x,y,predict):

    #http://localhost:8080/linear?dbtype=MSSQL&db=Demo&table=OrdersDK&x=Unit_Price&y=Sales&predict=[5,8]
    if dbtype == 'MSSQL':

        try:
            query = 'SELECT {0} as x, {1} as y From {2}'.format(x, y, table)
            result = mssql.execute_query(query)

        except Exception, err:
            result = cmg.format_response(False, None, 'Error occurred while getting data from MSSQL!', sys.exc_info())
            return result
def get_fields(params):

          tablename = params.tableName
          db = params.db

          if db.lower() == 'bigquery':
                datasetname = params.dataSetName
                fields = bqhandler.get_fields(datasetname, tablename)
                return  comm.format_response(True,fields,"",exception=None)
          elif db.lower() == 'mssql':
                fields = mssqlhandler.get_fields(tablename)
                return comm.format_response(True,fields,"",exception=None)
          elif db.lower() == 'postgresql':
                schema_name = params.schema
                colnames = pgsqlhandler.get_fields(tablename,schema_name)
                return comm.format_response(True,colnames,"",exception=None)
          elif db.lower() == 'mysql':
                colnames = mysqlhandler.get_fields(params.tableName)
                return comm.format_response(True,colnames,"",exception=None)
          else:
                return comm.format_response(False,db,"DB not implemented!",exception=None)
                            where_clause)

                print query
            result = ''
            if db.lower() == 'bigquery':
                try:
                    result = BQ.execute_query(query,user_id=user_id, tenant=tenant)
                    logger.info('Data received!')
                    logger.debug('Result %s' % result)
                except Exception, err:
                    logger.error('Error occurred while getting data from BigQuery Handler! %s' % err)
                    return cmg.format_response(False,None,'Error occurred while getting data from BigQuery Handler!',sys.exc_info())

            elif db.lower() == 'mssql':
                try:
                    result = mssql.execute_query(query)
                    logger.info('Data received!')
                    logger.debug('Result %s' % result)
                except Exception, err:
                    logger.error('Error occurred while getting data from sql Handler! %s' % err)
                    return cmg.format_response(False,None,'Error occurred while getting data from BigQuery Handler!',sys.exc_info())

            elif db.lower() == 'postgresql':
                try:
                    result = PG.execute_query(query)
                    logger.info('Data received!')
                    logger.debug('Result %s' % result)
                except Exception, err:
                    logger.error('Error occurred while getting data from pgsql Handler! %s' % err)
                    return cmg.format_response(False,None,'Error occurred while getting data from Postgres Handler!',sys.exc_info())
                    return comm.format_response(False, err_content['error']['errors'][0]['reason']+ ' ' + err_content['error']['errors'][0]['message'], query)
               try:
                    logger.info('Inserting to cache..')
                    # p = Process(target=MEM_insert,args=(cache_key,json.dumps(results),query,cache_timeout))
                    # p.start()
                    t = threading.Thread(target=MEM_insert, args=(cache_key,json.dumps(results),query,cache_timeout))
                    t.start()
               except Exception, err:
                    logger.error("Cache insertion failed. %s" % err)
                    pass
               return  comm.format_response(True,results,query,exception=None)

          elif db.lower() == 'mssql':
               sql = text(query)
               sql = re.sub(r'(SELECT)', r'\1 TOP {0} '.format(limit_), '{0}'.format(sql), count=1, flags=re.IGNORECASE)
               result = mssqlhandler.execute_query(sql)
               try:
                    logger.info('Inserting to cache..')
                    # p = Process(target=MEM_insert,args=(cache_key,json.dumps(result),query,cache_timeout))
                    # p.start()
                    t = threading.Thread(target=MEM_insert, args=(cache_key,json.dumps(result),query,cache_timeout))
                    t.start()
               except Exception, err:
                    logger.error("Cache insertion failed. %s" % err)
                    pass
               return  comm.format_response(True,result,query,exception=None)

          elif db.lower() == 'postgresql':
              if offset_ is not None:
                  query +=  ' OFFSET ' + str(offset_)
              query +=  ' LIMIT ' + str(limit_)
                else:
                    intersect_groups_orders = group_bys
                    intersect_groups_orders.extend(x for x in Order_bys if x not in intersect_groups_orders)
                    fields_list = intersect_groups_orders + [aggregation_fields_str]

                fields_str = ' ,'.join(fields_list)
                logger.info("Select statement creation completed!")
                query = 'SELECT {0} FROM {1} {2} {3} {4}'.format(fields_str, query_body, conditions, group_bys_str,
                                                                 order_bys_str)
                print query
                logger.info('Query formed successfully! : %s' % query)
                logger.info('Fetching data from SQL...')
                result = ''

                try:
                    result_ = mssql.execute_query(query)
                    logger.info('Data received!')
                    # p = Process(target=MEMcache_insert,args=(result_,query,pkey,cache_timeout))
                    # p.start()
                    t = threading.Thread(target=MEMcache_insert, args=(result_,query,pkey,cache_timeout))
                    t.start()
                    logger.debug('Result %s' % result)
                    logger.info("MSSQL - Processing completed!")
                    result = cmg.format_response(True,result_,query)
                except Exception, err:
                    logger.error('Error occurred while getting data from sql Handler!')
                    logger.error(err)
                    result = cmg.format_response(False,None,'Error occurred while getting data from sql Handler!',sys.exc_info())
                #result_dict = json.loads(result)
                finally:
                    return result
                    return result
            elif db_type.lower() == 'postgresql':
                query = "SELECT date_part('epoch',{0}::date) as date, SUM({1})::FLOAT as value from {2} group by date order by date".\
                    format(field_name_date,field_name_forecast,table_name)
                try:
                    result = PG.execute_query(query)
                except:
                    result = cmg.format_response(False,None,'Error occurred while getting data from PG Handler!',sys.exc_info())
                    return result
            elif db_type.lower() == 'mssql':
                query = "SELECT DATEDIFF(s, '1970-01-01 00:00:00', cast({0} as Date)) date, SUM({1}) as value from {2} " \
                        "group by DATEDIFF(s, '1970-01-01 00:00:00', cast({0} as Date))  " \
                        "order by DATEDIFF(s, '1970-01-01 00:00:00', cast({0} as Date)) ".format(field_name_date,field_name_forecast,table_name)
                print query
                try:
                    result = mssql.execute_query(str(query))
                except Exception, err:
                    print err
                    result = cmg.format_response(False,None,'Error occurred while getting data from MSSQL Handler!',sys.exc_info())
                    return result

            datapoints = []
            for row in result:

                datapoints.append([row['value'], row['date']])
            data_in = [{"target": "Historical_values", "datapoints": datapoints}]

            #translate the data.  There may be better ways if you're
            #prepared to use pandas / input data is proper json
            time_series = data_in[0]["datapoints"]