def ret_data(dbtype, rec_data,user_id=None, tenant=None):

    df = pd.DataFrame()
    for i in range(0,len(rec_data)):
        tables = rec_data[i].keys()
        fields = rec_data[i].values()
        fields = fields[0]

        fields_str = ', '.join(fields)
        tables_str = ', '.join(tables)

        if dbtype.lower() == 'mssql':

            try:
                query = 'SELECT {0} FROM {1}'.format(fields_str, tables_str)
                result = mssql.execute_query(query)

            except Exception, err:

                result = cmg.format_response(False, err, 'Error occurred while getting data from MSSQL!', sys.exc_info())
                return result

        elif dbtype.lower() == 'bigquery':

            try:
                query = 'SELECT {0} FROM {1}'.format(fields_str, tables_str)
                result = BQ.execute_query(query, user_id=user_id, tenant=tenant)

            except Exception, err:

                result = cmg.format_response(False, err, 'Error occurred while getting data from BigQuery Handler!', sys.exc_info())
                return result
def file_upload(params, file_obj,data_set_name, user_id, domain):
    start_time = datetime.datetime.now()
    print "File received.. Uploading started.."
    o_data = params.other_data

    if o_data == 'logo':
        upload_path = conf.get_conf('FilePathConfig.ini', 'User Files')[
                          'Path'] + '/digin_user_data/' + user_id + '/' + domain + '/logos'
        try:
            os.makedirs(upload_path)
        except OSError:
            if not os.path.isdir(upload_path):
                raise
        if 'file' in file_obj:  # to check if the file-object is created
            try:
                filename = file_obj.file.filename.replace('\\',
                                                          '/')  # replaces the windows-style slashes with linux ones.
                # filename=filepath.split('/')[-1] # splits the and chooses the last part (the filename with extension)
                fout = open(upload_path + '/' + filename,
                            'wb')  # creates the file where the uploaded file should be stored
                fout.write(file_obj.file.file.read())  # writes the uploaded file to the newly created file.
                fout.close()  # closes the file, upload complete.
            except Exception, err:
                print err
                return cmg.format_response(False, err, "Error occurred while uploading file", sys.exc_info())

            uploaded_time = datetime.datetime.now()
            time_taken = uploaded_time - start_time
            print "Upload completed! Time taken - " + str(time_taken)
            return cmg.format_response(True, 1, "File Upload successful!")
def fb_posts_with_summary(params):

        token = params.token
        limit = ''
        since = ''
        until = ''
        page = 'me'
        try:
            page = str(params.page)
        except AttributeError:
            pass
        try:
            limit = params.limit
        except AttributeError:
            pass
        try:
            since = params.since
        except AttributeError:
            pass
        try:
            until = params.until
        except AttributeError:
            pass
        logger.info('Request received: %s' % params.values())
        try:
            data_ = FB.get_page_posts(token, limit, since, until, page=page)
            data = cmg.format_response(True,data_,'Data successfully processed!')
        except ValueError, err:
            data = cmg.format_response(False,err,'Error validating access token: This may be because the user logged out or may be due to a system error.',sys.exc_info())
def fb_promotional_info(params):

        token = params.token
        promotional_name = params.metric_name
        try:
            data_ = FB.get_promotional_info(token, promotional_name)
            data = cmg.format_response(True,data_,'Data successfully processed!')
        except ValueError, err:
            data = cmg.format_response(False,err,'Error validating access token: This may be because the user logged out or may be due to a system error.',sys.exc_info())
def twitter_acc_info(params):
        tokens = ast.literal_eval(params.tokens)
        id_list = ast.literal_eval(params.ids)
        try:
            api = SMAuth.tweepy_auth(tokens['consumer_key'], tokens['consumer_secret'], tokens['access_token'], tokens['access_token_secret'])
            data_ = Tw.get_account_summary(api, id_list)
            data = cmg.format_response(True,data_,'Data successfully processed!')
        except Exception, err:
            data = cmg.format_response(False,err,'Error occurred while getting data from Twitter API',sys.exc_info())
def get_tables(params):

          datasetID = params.dataSetName
          db = params.db
          if db.lower() == 'bigquery':
              try:
                  result = bqhandler.get_tables(datasetID)
              except Exception, err:
                  return  comm.format_response(False,err,"Error Occurred when retrieving tables!",exception=sys.exc_info())
              return  comm.format_response(True,result,"Tables retrieved!",exception=None)
def fb_page_user_locations(params):

        token = params.token
        logger.info('Requested received: %s' % params.values())
        try:
            data_ = FB.get_page_fans_city(token)
            data = cmg.format_response(True,data_,'Data successfully processed!')
        except ValueError, err:
            data = cmg.format_response(False,err,'Error validating access token: This may be because the user logged out or may be due to a system error.',sys.exc_info())
            return data
def build_word_cloud(params):

        tokens = ast.literal_eval(params.tokens)
        hash_tag = params.hash_tag
        try:
            api = SMAuth.tweepy_auth(tokens['consumer_key'], tokens['consumer_secret'], tokens['access_token'], tokens['access_token_secret'])
            data_ = Tw.hashtag_search(api, hash_tag)
            wc_data = json.loads(wc.wordcloud_json(data_))
            data = cmg.format_response(True,wc_data,'Data successfully processed!')
        except ValueError, err:
            data = cmg.format_response(False,err,'Error validating access token: This may be because the user logged out or may be due to a system error.',sys.exc_info())
def clear_cache():

    if cache_state == 0: return True
    with get_connection() as conn:
        for table in ast.literal_eval(tables):
            try:
                conn.query("TRUNCATE TABLE {0}".format(table))
            except Exception, err:
                print "Error clearing cache"
                print err
                return cmg.format_response(False,None,"Error Occurred while clearing cache!", exception = sys.exc_info())
        return cmg.format_response(True,None,"Cache cleared successfully!")
def get_report_names(params):
        names = []
        for name in os.listdir(Reports_path):
          if os.path.isdir(os.path.join(Reports_path, name)):
                names.append(name)
        result = cmg.format_response(True,names,'Data successfully processed!',None)
        return result
def get_user_settings(user_id, domain):

    query = "SELECT components, user_role, cache_lifetime, widget_limit, " \
            "query_limit, logo_path, dp_path, theme_config, modified_date_time, created_date_time, " \
            "domain FROM digin_user_settings WHERE user_id = '{0}' AND domain = '{1}'".format(user_id, domain)
    logo_path = conf.get_conf('FilePathConfig.ini','User Files')['Path']
    document_root = conf.get_conf('FilePathConfig.ini','Document Root')['Path']
    path = re.sub(document_root, '', logo_path)
    try:
        user_data = CC.get_data(query)
        if user_data['rows'] == ():
            logger.info('No user settings saved for given user ' + user_id)
            return cmg.format_response(True,user_id,"No user settings saved for given user and domain")
        data ={
             'components': user_data['rows'][0][0],
             'user_role': user_data['rows'][0][1],
             'cache_lifetime': int(user_data['rows'][0][2]),
             'widget_limit': int(user_data['rows'][0][3]),
             'query_limit': int(user_data['rows'][0][4]),
             'logo_path': path+user_data['rows'][0][5],
             'dp_path': path+user_data['rows'][0][6],
             'theme_config': user_data['rows'][0][7],
             'modified_date_time': user_data['rows'][0][8],
             'created_date_time': user_data['rows'][0][9],
             'domain': user_data['rows'][0][10]
             }

    except Exception, err:
        logger.error("Error retrieving user settings")
        logger.error(err)
        print "Error retrieving user settings"
        print err
        raise
def build_word_cloud_fb(params):

        #source = str(params.source)
        try:
            limit = params.limit
        except AttributeError:
            limit = ''
        try:
            since = params.since
        except AttributeError:
            since = ''
        try:
            until = params.until
        except AttributeError:
            until = ''
        try:
            post_ids = ast.literal_eval(params.post_ids)
        except AttributeError:
            post_ids = None
        page = 'me'
        try:
            page = str(params.page)
        except AttributeError:
            pass
        token = params.token

        try:
            data = FB.get_page_posts_comments(token, limit, since, until, page, post_ids)
        except ValueError, err:
            return cmg.format_response(False,err,'Error validating access token: This may be because the user logged out or may be due to a system error.',sys.exc_info())
示例#13
0
    def get_package_detail(self):
        query = "SELECT " \
                "a.package_id, " \
                "a.package_name, " \
                "a.package_attribute, " \
                "SUM(a.package_value), " \
                "SUM(a.package_price), " \
                "b.expiry_datetime, " \
                "TIMESTAMPDIFF(DAY, CURRENT_TIMESTAMP, expiry_datetime) as remaining_days, " \
                "CURRENT_TIMESTAMP > b.expiry_datetime " \
                "FROM digin_packagedetails a " \
                "INNER JOIN digin_tenant_package_details b " \
                "ON a.package_id = b.package_id " \
                "WHERE b.tenant_id = '{0}' AND b.package_status = 'current_package' " \
                "GROUP BY a.package_id, a.package_name, a.package_attribute, b.expiry_datetime, remaining_days".format(self.tenant)

        try:
            result = db.get_data(query)['rows']
            data_list = []
            for row in result:
                data = {'package_id': row[0],
                        'package_name': row[1],
                        'package_attribute': row[2],
                        'package_value_sum': row[3],
                        'package_price_sum': row[4],
                        'expiry_datetime': row[5],
                        'remaining_days': row[6],
                        'is_expired': bool(row[7])}
                data_list.append(data)
        except Exception, err:
            print err
            return cmg.format_response(False, err, "Error occurred while getting data", exception=sys.exc_info())
def delete_component(params, user_id, domain):
    for comp in params:
        print "Deleting component %s ..." % comp
        if comp['permanent_delete']:
            try:
                    p1 = threading.Thread(target=_permanent_delete_components,args=(comp['comp_id'],'digin_component_header',
                                                                                          user_id, domain))

                    p2 = threading.Thread(target=_permanent_delete_components,args=(comp['comp_id'],'digin_component_page_detail',
                                                                                          user_id, domain))

                    p3 = threading.Thread(target=_permanent_delete_components,args=(comp['comp_id'],'digin_component_detail',
                                                                                          user_id, domain))
                    p1.start()
                    p2.start()
                    p3.start()
                    p1.join()
                    p2.join()
                    p3.join()
            except Exception, err:
                    print err
                    logger.error("Permanent deletion failed. %s" % err)
                    return cmg.format_response(False,0,"Error Occurred in deletion!", exception = sys.exc_info())


        else:
            try:
                    p1 = threading.Thread(target=_temporary_delete_components,args=(comp['comp_id'],'digin_component_header',
                                                                                          user_id, domain))

                    p2 = threading.Thread(target=_temporary_delete_components,args=(comp['comp_id'],'digin_component_page_detail',
                                                                                          user_id, domain))

                    p3 = threading.Thread(target=_temporary_delete_components,args=(comp['comp_id'],'digin_component_detail',
                                                                                          user_id, domain))
                    p1.start()
                    p2.start()
                    p3.start()
                    p1.join()
                    p2.join()
                    p3.join()
            except Exception, err:
                    print err
                    logger.error("Temporary deletion failed. %s" % err)
                    return cmg.format_response(False,0,"Error Occurred in deletion!", exception = sys.exc_info())
def create_Dataset(params):
          datasetID = params.dataSetName
          db = params.db
          if db.lower() == 'bigquery':
              try:
                   result = bqhandler.create_dataset(datasetID)
                   return  comm.format_response(True,result,"",exception=None)
              except Exception, err:
                   print err
                   return False
示例#16
0
 def deactivate_packages(self):
     ""
     if self.is_default:
         try:
             db.update_data('digin_tenant_package_details'," WHERE tenant_id = '{0}' AND package_id IN ({1},{2},{3},{4})".format(self.tenant,int(free_package),int(default_1),int(default_2),int(default_3)),
                            package_status = 'deactivated')
         except Exception, err:
             print "Error inserting to DB!"
             return cmg.format_response(False, err, "Error occurred while deactivate_packages.. \n" + str(err),
                                           exception=sys.exc_info())
 def get_usage(self):
     if self.attribute == "data":
         try:
             summary = db.get_data(" SELECT sum(value) FROM digin_usage_summary "
                                   " WHERE tenant = '{0}' AND parameter = 'download_bq' OR 'upload_size_bq' ".format(self.tenant))['rows']
             return summary[0]
         except Exception, err:
             print "Error inserting to DB!"
             return cmg.format_response(False, err, "Error occurred while getting packages details .. \n" + str(err),
                                           exception=sys.exc_info())
def executeKTR(params):
      eportName = params.ReportName
      paramaeters = ast.literal_eval(params.parameters)
      strJSON = json.dumps(paramaeters)
      reportName = 'C:\\Reports\\' + eportName + "\\" +eportName
      if(os.path.isfile(reportName+'.html')):
          os.remove(reportName+'.html')
      renderedReport = 'http://104.131.48.155/reports/' +eportName + '/' + eportName
      args = ['ktrjob.jar',reportName,strJSON]
      result = cmg.format_response(True,jarWrapper(*args),renderedReport +'.html',None)
      return result
示例#19
0
def slr_get(dbtype, db, table,x,y,predict):

    #http://localhost:8080/linear?dbtype=MSSQL&db=Demo&table=OrdersDK&x=Unit_Price&y=Sales&predict=[5,8]
    if dbtype == 'MSSQL':

        try:
            query = 'SELECT {0} as x, {1} as y From {2}'.format(x, y, table)
            result = mssql.execute_query(query)

        except Exception, err:
            result = cmg.format_response(False, None, 'Error occurred while getting data from MSSQL!', sys.exc_info())
            return result
示例#20
0
    def do_share(self):

        self._users_groups_segregator()
        print "Component sharing UserIDs: {0}, GroupIds: {1}, Tenant: {2}, ComponentIds: {3}".format(self.user_ids,self.group_ids,self.tenant, self.component_ids)
        if self.group_ids:
            result = self._set_group_user_ids()
        if self.is_success:
            data = []
            for user in self.user_ids:
                for component in self.component_ids:
                    d = {'component_id': component,
                         'user_id':user,
                         'type':'dashboard',
                         'domain':self.tenant}
                    data.append(d)
            try:
                db.CacheController.insert_data(data,'digin_component_access_details')
            except Exception, err:
                print err
                return cmg.format_response(False, err, "Component already shared!",exception=sys.exc_info())
            return cmg.format_response(True,0,"Components shared successfully")
def get_fields(params):

          tablename = params.tableName
          db = params.db

          if db.lower() == 'bigquery':
                datasetname = params.dataSetName
                fields = bqhandler.get_fields(datasetname, tablename)
                return  comm.format_response(True,fields,"",exception=None)
          elif db.lower() == 'mssql':
                fields = mssqlhandler.get_fields(tablename)
                return comm.format_response(True,fields,"",exception=None)
          elif db.lower() == 'postgresql':
                schema_name = params.schema
                colnames = pgsqlhandler.get_fields(tablename,schema_name)
                return comm.format_response(True,colnames,"",exception=None)
          elif db.lower() == 'mysql':
                colnames = mysqlhandler.get_fields(params.tableName)
                return comm.format_response(True,colnames,"",exception=None)
          else:
                return comm.format_response(False,db,"DB not implemented!",exception=None)
def get_all_components(params, user_id, domain):
      try:
        data = CC.get_data("SELECT h.digin_comp_id, h.digin_comp_name, h.digin_comp_type "
                           "FROM digin_component_access_details a "
                           "INNER JOIN digin_component_header h "
                           "ON a.component_id = h.digin_comp_id "
                           "WHERE h.is_active = TRUE AND a.domain = '{0}' AND a.user_id = '{1}' "
                           "AND a.type IN ('dashboard','report')".format(domain, user_id))
        print data["rows"]
        comps = []
        for comp in data["rows"]:
            comp_dict ={}
            comp_dict["compID"]= comp[0]
            comp_dict["compName"]=comp[1]
            comp_dict["compType"]=comp[2]
            comps.append(comp_dict)
        return cmg.format_response(True,comps,"Successful!")

      except Exception, err:
        logger.error("Error getting data from cache. %s" % err)
        return cmg.format_response(False,0,"Error Occurred!", exception = sys.exc_info())
def fb_overview(params):

        token = params.token
        metric_names = None
        try:
            metric_names = ast.literal_eval(params.metric_names)
        except AttributeError:
            pass
        since = None
        until = None
        try:
            since = params.since
            until = params.until
        except AttributeError:
            pass
        logger.info('Requested received: %s' % params.values())
        # data = json.dumps(fb.insight_metric(token, metric_name, since, until))
        try:
            data_ = FB.get_overview(token, metric_names, since, until)
            data = cmg.format_response(True,data_,'Data successfully processed!')
        except ValueError, err:
            data = cmg.format_response(False,err,'Error validating access token: This may be because the user logged out or may be due to a system error.',sys.exc_info())
示例#24
0
    def _set_group_user_ids(self):

        user_emails = []
        for _id in self.group_ids:
            user_emails.append(auth.get_group_users(self.tenant, _id))

        for email in user_emails[0]:
            query = "SELECT user_id, email FROM digin_user_settings WHERE email = '{0}'".format(email['Id'])
            user_id = db.CacheController.get_data(query)['rows']
            if user_id != ():
                self.user_ids.append(user_id[0][0])
            else:
                self.is_success = False
                return cmg.format_response(False, email, "Error sharing components no user registered in DigIn", exception=None)
 def get_rating_summary(self):
     user_dict = {}
     if self.security_level == 'admin':
         summary = db.get_data("SELECT user_id, parameter, SUM(value) as value FROM digin_usage_summary "
                               "WHERE tenant = '{0}' GROUP BY user_id, parameter".format(self.tenant))['rows']
     else:
         summary = db.get_data("SELECT user_id, parameter, value FROM digin_usage_summary "
                           "WHERE user_id = '{0}' AND tenant = '{1}'".format(self.user_id, self.tenant))['rows']
     for parameter in summary:
         if parameter[0] in user_dict:
             user_dict[parameter[0]][parameter[1]]=parameter[2]
         else:
             user_dict[parameter[0]] = {parameter[1]:parameter[2]}
     rated_dict = {'usage': [{self.tenant:user_dict}],
                   'exceed_blocked': euc.ExceedUsageCalculator(tenant=self.tenant, attribute=None).calculation()}
     return cmg.format_response('True',rated_dict,"Usage data retrieved")
示例#26
0
    def get_ledger(self):
        # query = "SELECT " \
        #         "a.package_id, " \
        #         "a.package_name, " \
        #         "a.package_attribute, " \
        #         "a.package_value, " \
        #         "a.package_price, " \
        #         "b.expiry_datetime, " \
        #         "TIMESTAMPDIFF(DAY, CURRENT_TIMESTAMP, expiry_datetime) as remaining_days, " \
        #         "b.package_status, " \
        #         "b.created_datetime " \
        #         "FROM digin_packagedetails a " \
        #         "INNER JOIN digin_tenant_package_details b " \
        #         "ON a.package_id = b.package_id " \
        #         "WHERE b.tenant_id = '{0}' " \
        #         "AND b.created_datetime >= TIMESTAMP('{1}') AND  b.created_datetime <= TIMESTAMP('{2}') " \
        #         "GROUP BY a.package_id, a.package_name, a.package_attribute, b.expiry_datetime, remaining_days " \
        #         "ORDER BY b.created_datetime ".format(self.tenant, self.start_date, self.end_date)

        query = "SELECT " \
                "package_id, " \
                "expiry_datetime, " \
                "TIMESTAMPDIFF(DAY, CURRENT_TIMESTAMP, expiry_datetime) as remaining_days, " \
                "package_status, " \
                "created_datetime " \
                "FROM digin_tenant_package_details  " \
                "WHERE tenant_id = '{0}' " \
                "AND created_datetime >= TIMESTAMP('{1}') AND  created_datetime <= TIMESTAMP('{2}') " \
                "ORDER BY created_datetime ".format(self.tenant, self.start_date, self.end_date)


        try:
            result = db.get_data(query)['rows']
            data_list = []
            for row in result:
                data = {'package_id': row[0],
                        'package_Details': PackageProcessor(package_name=None, package_attribute=None, package_value=None, package_price=None,is_default=False, tenant =self.tenant,package_id=int(row[0])).get_package_attributes(),
                        'expiry_datetime': row[1],
                        'remaining_days': row[2],
                        'package_status': row[3],
                        'created_datetime': row[4]}
                data_list.append(data)
        except Exception, err:
            print err
            return cmg.format_response(False, err, "Error occurred while getting data", exception=sys.exc_info())
def Forecasting(params):
        try:
            fcast_days = int(params.fcast_days)
            timesteps_per_day = int(params.steps_pday)
            pred_error_level = float(params.pred_error_level)
            model = str(params.model)
            m = int(params.m)
            #alpha = params.alpha
            #beta = params.beta
            #gamma = params.gamma
            table_name = params.table_name
            field_name_date = params.field_name_d
            field_name_forecast = params.field_name_f
            interval = str(params.interval)
            db_type = params.dbtype

        except Exception, err:
            return cmg.format_response(False, err, 'Input parameters caused the service to raise an error',
                                       sys.exc_info())
def set_initial_user_env(params,email,user_id,domain):

    default_sys_settings = conf.get_conf('DefaultConfigurations.ini','System Settings')
    dataset_name = email.replace(".", "_").replace("@","_")

    if ast.literal_eval(default_sys_settings['signup_dataset_creation']):
        db = params['db']
        if db.lower() == 'bigquery':
            logger.info("Creation of dataset started!")
            print "Creation of dataset started!"
            try:
                result_ds= bq.create_dataset(dataset_name)
                print result_ds
                logger.info("Creation of dataset status " + str(result_ds))
                print "Creation of dataset " + str(result_ds)
            except Exception, err:
              print err
              print "Creation of dataset failed!"
              return cmg.format_response(False,err,"Error Occurred while creating dataset in bigquery!",exception=sys.exc_info())
        else:
            raise
示例#29
0
    def set_packages(self):
        time_now = datetime.datetime.now()
        _, num_days = calendar.monthrange(time_now.year, time_now.month)
        free_package = conf.get_conf('DefaultConfigurations.ini', 'Package Settings')['Free']
        if self.package_id == int(free_package):
            last_day = time_now + datetime.timedelta(days=30)
        else:
            last_day = datetime.datetime(time_now.year, time_now.month, num_days, 23, 59, 59)

        tenant_package_mapping = [{'tenant_id': self.tenant,
                                   'package_id': self.package_id,
                                   'created_datetime': time_now,
                                   'modified_datetime': time_now,
                                   'expiry_datetime': last_day,
                                   'package_status':'current_package'}]
        try:
            db.insert_data(tenant_package_mapping, 'digin_tenant_package_details')
        except Exception, err:
            print "Error inserting to cacheDB!"
            return cmg.format_response(False, err, "Error occurred while inserting.. \n" + str(err),
                                          exception=sys.exc_info())
示例#30
0
    def get_package_summary(self):
        query = "SELECT " \
                "a.package_attribute, " \
                "SUM(a.package_value), " \
                "SUM(a.package_price) " \
                "FROM digin_packagedetails a " \
                "INNER JOIN digin_tenant_package_details b " \
                "ON a.package_id = b.package_id " \
                "WHERE b.tenant_id = '{0}' AND b.package_status = 'current_package' " \
                "GROUP BY a.package_attribute".format(self.tenant)

        try:
            result = db.get_data(query)['rows']
            data_list = []
            for row in result:
                data = {'package_attribute': row[0],
                        'package_value_sum': row[1],
                        'package_price_sum': row[2]}
                data_list.append(data)
        except Exception, err:
            print err
            return cmg.format_response(False, err, "Error occurred while getting data", exception=sys.exc_info())