コード例 #1
0
def get_user_settings(user_id, domain):

    query = "SELECT components, user_role, cache_lifetime, widget_limit, " \
            "query_limit, logo_path, dp_path, theme_config, modified_date_time, created_date_time, " \
            "domain FROM digin_user_settings WHERE user_id = '{0}' AND domain = '{1}'".format(user_id, domain)
    logo_path = conf.get_conf('FilePathConfig.ini','User Files')['Path']
    document_root = conf.get_conf('FilePathConfig.ini','Document Root')['Path']
    path = re.sub(document_root, '', logo_path)
    try:
        user_data = CC.get_data(query)
        if user_data['rows'] == ():
            logger.info('No user settings saved for given user ' + user_id)
            return cmg.format_response(True,user_id,"No user settings saved for given user and domain")
        data ={
             'components': user_data['rows'][0][0],
             'user_role': user_data['rows'][0][1],
             'cache_lifetime': int(user_data['rows'][0][2]),
             'widget_limit': int(user_data['rows'][0][3]),
             'query_limit': int(user_data['rows'][0][4]),
             'logo_path': path+user_data['rows'][0][5],
             'dp_path': path+user_data['rows'][0][6],
             'theme_config': user_data['rows'][0][7],
             'modified_date_time': user_data['rows'][0][8],
             'created_date_time': user_data['rows'][0][9],
             'domain': user_data['rows'][0][10]
             }

    except Exception, err:
        logger.error("Error retrieving user settings")
        logger.error(err)
        print "Error retrieving user settings"
        print err
        raise
コード例 #2
0
def file_upload(params, file_obj,data_set_name, user_id, domain):
    start_time = datetime.datetime.now()
    print "File received.. Uploading started.."
    o_data = params.other_data

    if o_data == 'logo':
        upload_path = conf.get_conf('FilePathConfig.ini', 'User Files')[
                          'Path'] + '/digin_user_data/' + user_id + '/' + domain + '/logos'
        try:
            os.makedirs(upload_path)
        except OSError:
            if not os.path.isdir(upload_path):
                raise
        if 'file' in file_obj:  # to check if the file-object is created
            try:
                filename = file_obj.file.filename.replace('\\',
                                                          '/')  # replaces the windows-style slashes with linux ones.
                # filename=filepath.split('/')[-1] # splits the and chooses the last part (the filename with extension)
                fout = open(upload_path + '/' + filename,
                            'wb')  # creates the file where the uploaded file should be stored
                fout.write(file_obj.file.file.read())  # writes the uploaded file to the newly created file.
                fout.close()  # closes the file, upload complete.
            except Exception, err:
                print err
                return cmg.format_response(False, err, "Error occurred while uploading file", sys.exc_info())

            uploaded_time = datetime.datetime.now()
            time_taken = uploaded_time - start_time
            print "Upload completed! Time taken - " + str(time_taken)
            return cmg.format_response(True, 1, "File Upload successful!")
コード例 #3
0
def get_folder_names(params,user_id,domain):


    if params.folder_type == 'data_source_folder':
        if params.folder_name == '' or params.folder_name == None:
            file_path = conf.get_conf('FilePathConfig.ini', 'User Files')[
                          'Path'] + '/digin_user_data/' + user_id + '/' + domain + '/data_sources'
            try:
                if not os.path.isdir(file_path):
                    print "no folder"
                    return []
                else:
                    directory = file_path
                    # root, dirs, files = os.walk(file_path).next()
                    # print dirs
                    # return dirs
                    file_list = []
                    for i in os.listdir(directory):
                        if os.path.isdir(os.path.join(directory, i)):
                            a = os.stat(os.path.join(directory, i))
                            file = {'file' : i,
                                    'created_date': time.ctime(a.st_ctime),
                                    'created_user': user_id
                            }
                            file_list.append(file)  # [file,user_id,created]
                    return file_list
            except OSError, err:
                print err
                if not os.path.isdir(file_path):
                    return 'No data_source_folder'
            except Exception, err:
                print err
コード例 #4
0
def GetSession(SecurityToken):
           secToken = SecurityToken
           AuthURL = conf.get_conf('DatasourceConfig.ini','AUTH')
           url = AuthURL['URL'] +"/GetSession/"+secToken+"/"+'Nil'
           try:
              response = requests.get(url)
           except URLError, err:
              print err
              logger.error("Authorization failed")
              logger.error(err)
              response = None
コード例 #5
0
def get_security_level(security_token):
    secToken = security_token
    tenant = json.loads(GetSession(secToken).text)['Domain']
    AuthURL = conf.get_conf('DatasourceConfig.ini', 'AUTH')
    url = AuthURL['URL'] + "/tenant/Autherized/" + tenant
    try:
        response = requests.get(url,headers={"Securitytoken":security_token})
        if json.loads(response.text)['Autherized']:
           security_level = json.loads(response.text)['SecurityLevel']
           return security_level
    except Exception, err:
        print err
        raise
コード例 #6
0
def csv_uploader(parms, dataset_name, user_id=None, tenant=None):
    folder_name = parms.folder_name
    filename = parms.filename
    extension = filename.split('.')[-1]
    if extension == 'xlsx' or extension == 'xls':
        filename = filename.split('.')[0]+'.csv'

    file_path = conf.get_conf('FilePathConfig.ini', 'User Files')[
                    'Path'] + '/digin_user_data/' + user_id + '/' + tenant + '/data_sources/' + folder_name

    schema = {}
    if parms.folder_type.lower() == 'new':
        try:
            schema = json.loads(parms.schema)
            with open(file_path + '/schema.txt', 'w') as outfile:
                json.dump(schema, outfile)
        except Exception, err:
            print err
コード例 #7
0
def _ktrConfig(user_id, domain, Filename):
    User_Reports_path = conf.get_conf('FilePathConfig.ini', 'User Files')['Path']
    filename = Filename.split('.')[0]
    try:

        xmldoc = minidom.parse(User_Reports_path + '/digin_user_data/' + user_id + '/' + domain + '/prpt_files/' + filename + '/' + filename + '.ktr')

        ReportDF = xmldoc.getElementsByTagName("item")[0]
        ReportDF.firstChild.nodeValue = User_Reports_path + '/digin_user_data/' + user_id + '/' + domain + '/prpt_files/' + filename + '/' + filename + '.prpt'
        OutPut = xmldoc.getElementsByTagName("item")[1]
        OutPut.firstChild.nodeValue = User_Reports_path + '/digin_user_data/' + user_id + '/' + domain + '/prpt_files/' + filename + '/' + filename + '.pdf'

        with open(
            User_Reports_path + '/digin_user_data/' + user_id + '/' + domain + '/prpt_files/' + filename + '/' + filename + '.ktr',"wb") as f:
            xmldoc.writexml(f)

    except Exception, err:
        print err
        print "No such Ktr file "
コード例 #8
0
    def set_packages(self):
        time_now = datetime.datetime.now()
        _, num_days = calendar.monthrange(time_now.year, time_now.month)
        free_package = conf.get_conf('DefaultConfigurations.ini', 'Package Settings')['Free']
        if self.package_id == int(free_package):
            last_day = time_now + datetime.timedelta(days=30)
        else:
            last_day = datetime.datetime(time_now.year, time_now.month, num_days, 23, 59, 59)

        tenant_package_mapping = [{'tenant_id': self.tenant,
                                   'package_id': self.package_id,
                                   'created_datetime': time_now,
                                   'modified_datetime': time_now,
                                   'expiry_datetime': last_day,
                                   'package_status':'current_package'}]
        try:
            db.insert_data(tenant_package_mapping, 'digin_tenant_package_details')
        except Exception, err:
            print "Error inserting to cacheDB!"
            return cmg.format_response(False, err, "Error occurred while inserting.. \n" + str(err),
                                          exception=sys.exc_info())
コード例 #9
0
def set_initial_user_env(params,email,user_id,domain):

    default_sys_settings = conf.get_conf('DefaultConfigurations.ini','System Settings')
    dataset_name = email.replace(".", "_").replace("@","_")

    if ast.literal_eval(default_sys_settings['signup_dataset_creation']):
        db = params['db']
        if db.lower() == 'bigquery':
            logger.info("Creation of dataset started!")
            print "Creation of dataset started!"
            try:
                result_ds= bq.create_dataset(dataset_name)
                print result_ds
                logger.info("Creation of dataset status " + str(result_ds))
                print "Creation of dataset " + str(result_ds)
            except Exception, err:
              print err
              print "Creation of dataset failed!"
              return cmg.format_response(False,err,"Error Occurred while creating dataset in bigquery!",exception=sys.exc_info())
        else:
            raise
コード例 #10
0
#!/usr/bin/env python3

import pymongo
from pymongo import MongoClient
import sys
import bson
import os
import configs.ConfigHandler as conf
out = conf.get_conf('config.ini','output')
mongoconf = conf.get_conf('config.ini','mongo')

NO_ARRAY_ORDER = False

def count_unique_values(field_names, coll):
    #abominations to get mongo to count how many distinct values
    #exist for a field
    pipeline = []
    project_inner = {}
    for name in field_names:
        project_inner[name] = 1
    project_outer = {"$project": project_inner}
    group_inner = {}
    for name in field_names:
        group_inner[name] = "$" + name
    group_outer = {"$group": {"_id": group_inner}}
    pipeline = [
        project_outer,
        group_outer,
        #{"$project": { field_name: 1 } },
        #{"$group": { "_id": "$" + field_name } },
        {"$project": { "dummy": "dummy" } },
コード例 #11
0
import time
import pymongo
from pymongo import MongoClient
from bson.objectid import ObjectId
import configs.ConfigHandler as conf
default_data = conf.get_conf('config.ini', 'DefaultData')
mongoconf = conf.get_conf('config.ini', 'mongo')

db = None


def usergroups():
    try:
        db['usergroups'].update_many(
            {}, {'$set': {
                'businessUnit': default_data['bu']
            }})
    except Exception as err:
        print(err)


# def todos():
#     try:
#         db['todos'].update_many({}, {'$set': {'businessUnit': default_data['bu']}})
#     except Exception as err:
#         print(err)

# todos - No usage, ticketviews - active, order are present in singer db no need of migration

# ticketstatusnodes - discuss and update category field
コード例 #12
0
import modules.CommonMessageGenerator as cmg
import scripts.PentahoReportingService as prs
import scripts.DigInRatingEngine.DigInRatingEngine as dre
import threading
import sys
import os
from shutil import copyfile
import datetime
import logging
import re
import ast
import configs.ConfigHandler as conf

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
path_settings = conf.get_conf('FilePathConfig.ini','Logs')
path = path_settings['Path']
log_path = path + '/UserManagementService.log'
handler = logging.FileHandler(log_path)
handler.setLevel(logging.INFO)

formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)

logger.addHandler(handler)
logger.info('--------------------------------------  UserManagementService  ------------------------------------------')
logger.info('Starting log')

default_user_settings = conf.get_conf('DefaultConfigurations.ini', 'User Settings')

def store_user_settings(params,user_id, domain):
コード例 #13
0
__author__ = 'Marlon Abeykoon'
__version__ = '1.0.0.3'
#code added by Thivatharan Jeganathan

import datetime
import modules.CommonMessageGenerator as cmg
import sys
import scripts.DigINCacheEngine.CacheController as db
import calendar
import configs.ConfigHandler as conf

free_package = conf.get_conf('DefaultConfigurations.ini', 'Package Settings')['Free']
default_1 = conf.get_conf('DefaultConfigurations.ini', 'Package Settings')['Personal Space']
default_2 = conf.get_conf('DefaultConfigurations.ini', 'Package Settings')['We Are A Mini Team']
default_3 = conf.get_conf('DefaultConfigurations.ini', 'Package Settings')['We Are the World']

class PackageProcessor():

    def __init__(self, package_name, package_attribute, package_value, package_price, is_default, tenant, package_id=None, start_date =None, end_date=None):
        self.package_id = self._unix_time_millis(datetime.datetime.now()) if not package_id else package_id
        self.package_name = package_name
        self.package_attribute = package_attribute
        self.package_value = package_value
        self.package_price = package_price
        self.is_default = is_default
        self.tenant = tenant
        self.start_date = start_date
        self.end_date = end_date

    def _unix_time_millis(self,dt):
        epoch = datetime.datetime.utcfromtimestamp(0)
コード例 #14
0
import difflib
import os
import sys
import PostgresHandler as pg
import configs.ConfigHandler as conf
out = conf.get_conf('config.ini', 'output')


def get_db_tables(pg1):
    tables = set()
    res = pg1.execute_query(
        'SELECT schemaname, tablename FROM pg_catalog.pg_tables')
    for table in res:
        if table['schemaname'] == 'public':
            tables.add(table['tablename'])
    return tables


def compare_number_of_items(db_obj, db1_items, db2_items, items_name):

    if db1_items != db2_items:
        additional_db1 = db1_items - db2_items
        additional_db2 = db2_items - db1_items

        if additional_db1:
            print('{}: additional in "{}"\n'.format(items_name, 'db1'))
            for t in additional_db1:
                print('\t{}\n'.format(t))
            print('\n')

        if additional_db2:
コード例 #15
0
__author__ = 'Marlon'
__version__ = '1.0.0.0'

import sys
sys.path.append("...")
from sqlalchemy import create_engine
import configs.ConfigHandler as conf
pg_conf = conf.get_conf('config.ini', 'postgres')


class PostgresHandler():
    def __init__(self, db):

        self.db = db
        if db == 'db1':
            try:
                self.database_name = pg_conf['database_1_name']
                self.database_username = pg_conf['database_1_username']
                self.database_password = pg_conf['database_1_password']
                self.database_host = pg_conf['database_1_host']
                self.database_port = pg_conf['database_1_port']
            except Exception as err:
                print(err)

        else:
            try:
                self.database_name = pg_conf['database_2_name']
                self.database_username = pg_conf['database_2_username']
                self.database_password = pg_conf['database_2_password']
                self.database_host = pg_conf['database_2_host']
                self.database_port = pg_conf['database_2_port']
コード例 #16
0
__author__ = 'Marlon Abeykoon'
__version__ = '1.0.0.0'

import datetime
import time
import sys
import ast
import logging
sys.path.append("...")
import CacheController as CC
import configs.ConfigHandler as conf

caching_tables = conf.get_conf('CacheConfig.ini', 'Caching Tables')
tables = caching_tables['table_names']

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
path_settings = conf.get_conf('FilePathConfig.ini','Logs')
path = path_settings['Path']
log_path = path + '/CacheGarbageCleaner.log'
handler = logging.FileHandler(log_path)
handler.setLevel(logging.INFO)

formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)

logger.addHandler(handler)

logger.info('Starting log')
datasource_settings = conf.get_conf('CacheConfig.ini','Cache Expiration')
cache_cleaning_interval = float(datasource_settings['cache_cleaning_interval'])
コード例 #17
0
__author__ = 'Marlon Abeykoon'
__version__ = '2.0.0.0'

from memsql.common import database
import ast
#import threading
from memsql.common.query_builder import multi_insert
from memsql.common.query_builder import update
import sys
sys.path.append("...")
import configs.ConfigHandler as conf
import modules.CommonMessageGenerator as cmg

datasource_settings = conf.get_conf('DatasourceConfig.ini','MemSQL')
query = ""
DATABASE = datasource_settings['DATABASE']
USER = datasource_settings['USER']
PASSWORD = datasource_settings['PASSWORD']
HOST = datasource_settings['HOST']
PORT = datasource_settings['PORT']

caching_tables = conf.get_conf('CacheConfig.ini', 'Caching Tables')
tables = caching_tables['table_names']

cache_state_conf = conf.get_conf('CacheConfig.ini', 'Cache Expiration')
cache_state = int(cache_state_conf['default_timeout_interval'])

# The number of workers to run
NUM_WORKERS = 20

# Run the workload for this many seconds
コード例 #18
0
                            file = {'file' : i,
                                    'created_date': time.ctime(a.st_ctime),
                                    'created_user': user_id
                            }
                            file_list.append(file)  # [file,user_id,created]
                    return file_list
            except OSError, err:
                print err
                if not os.path.isdir(file_path):
                    return 'No data_source_folder'
            except Exception, err:
                print err


        else:
            file_path = conf.get_conf('FilePathConfig.ini', 'User Files')[
                            'Path'] + '/digin_user_data/' + user_id + '/' + domain + '/data_sources/'+params.folder_name
            try:
                if not os.path.isdir(file_path):
                    print "no folder"
                    return []
                else:
                    filenames = os.listdir(file_path)
                    return [filename for filename in filenames if filename.endswith(".csv")]

            except OSError, err:
                print err
                if not os.path.isdir(file_path):
                    return 'No data_source_folder'
            except Exception, err:
                print err
コード例 #19
0
__author__ = 'Manura Omal Bhagya'
__version__ = '1.0.0.0'

import sys,os
sys.path.append("...")
currDir = os.path.dirname(os.path.realpath(__file__))
rootDir = os.path.abspath(os.path.join(currDir, '../..'))
if rootDir not in sys.path:  # add parent dir to paths
    sys.path.append(rootDir)
import algorithm_processor as ap
import ast
import configs.ConfigHandler as conf
import DiginAlgo as DA

datasource_settings = conf.get_conf('CacheConfig.ini','Cache Expiration')
default_cache_timeout = datasource_settings['default_timeout_interval']

def linear_regression(params):
        dbtype = params.dbtype
        db = params.db
        table = params.table
        x = params.x
        y = params.y
        predict = ast.literal_eval(params.predict)

        result = DA.slr_get(dbtype, db,table, x, y, predict)
        return result

def kmeans_calculation(params,key):

        rec_data = ast.literal_eval(params.data)
コード例 #20
0
import modules.SQLQueryHandler as mssql
import modules.PostgresHandler as PG
import scripts.DigINCacheEngine.CacheController as CC
import modules.CommonMessageGenerator as cmg
# from multiprocessing import Process
from datetime import date
import decimal
import json
import threading
import operator
import ast
import logging
import datetime
import configs.ConfigHandler as conf

datasource_settings = conf.get_conf('CacheConfig.ini','Cache Expiration')
default_cache_timeout = datasource_settings['default_timeout_interval']
path_settings = conf.get_conf('FilePathConfig.ini','Logs')
path = path_settings['Path']
log_path = path + '/LogicImplementer.log'
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)

handler = logging.FileHandler(log_path)
handler.setLevel(logging.INFO)

formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)

logger.addHandler(handler)
logger.info('--------------------------------------  LogicImplementer  -----------------------------------------------')
コード例 #21
0
 def _get_next_scheduled_time(self):
     scheduled_time = conf.get_conf("JobSchedules.ini", self.job_name)
     return int(scheduled_time["job_run_frequency"])
コード例 #22
0
__author__ = "Marlon Abeykoon"
__version__ = "1.0.0.1"

import UsageCalculatorJob as ucj
import configs.ConfigHandler as conf

scheduled_time = conf.get_conf("JobSchedules.ini", "UsageCalculatorJob")


class DigInScheduler:
    def __init__(self, job_name, command):
        self.job_name = job_name
        self.command = command
        self.next_run = self._get_next_scheduled_time()

    def _get_next_scheduled_time(self):
        scheduled_time = conf.get_conf("JobSchedules.ini", self.job_name)
        return int(scheduled_time["job_run_frequency"])

    def start_job(self):
        if self.job_name == "UsageCalculatorJob":
            ucj.UsageCalculatorJob(self.next_run, self.command).initiate_usage_scheduler()
コード例 #23
0
import PostgresHandler as pg
import configs.ConfigHandler as config
default_data = config.get_conf('config.ini', 'DefaultData')

con = None

# CSDB_CacheUpdates ignore


def CSDB_CallCDRProcessed():  #tested
    try:
        print("CSDB_CallCDRProcesseds - BusinessUnit 'ADD' process started")
        con.execute_query(
            """ALTER TABLE "public"."CSDB_CallCDRProcesseds" ADD COLUMN IF NOT EXISTS "BusinessUnit" varchar(255);"""
        )
        print("CSDB_CallCDRProcesseds - BusinessUnit 'ADD' process completed")
        print("CSDB_CallCDRProcesseds - QueuePriority 'ADD' process started")
        con.execute_query(
            """ALTER TABLE "public"."CSDB_CallCDRProcesseds" ADD COLUMN IF NOT EXISTS "QueuePriority" integer;"""
        )
        print("CSDB_CallCDRProcesseds - QueuePriority 'ADD' process completed")
        print(
            "CSDB_CallCDRProcesseds - TimeAfterInitialBridge 'ADD' process started"
        )
        con.execute_query(
            """ALTER TABLE "public"."CSDB_CallCDRProcesseds" ADD COLUMN IF NOT EXISTS "TimeAfterInitialBridge" integer;"""
        )
        print(
            "CSDB_CallCDRProcesseds - TimeAfterInitialBridge 'ADD' process completed"
        )
    except Exception as err:
コード例 #24
0
__author__ = 'Marlon Abeykoon'
__version__ = '1.0.0.1'


from threading import Timer
import datetime
import logging
import sys
sys.path.append("...")
import modules.BigQueryHandler as bq
import scripts.DigInRatingEngine.DigInRatingEngine as dre
import scripts.DigINCacheEngine.CacheController as db
import configs.ConfigHandler as conf

path_settings = conf.get_conf('FilePathConfig.ini','Logs')
path = path_settings['Path']
log_path = path + '/UsageCalculatorJob.log'

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)

handler = logging.FileHandler(log_path)
handler.setLevel(logging.INFO)

formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)

logger.addHandler(handler)

logger.info('Starting log')
コード例 #25
0
                # filename=filepath.split('/')[-1] # splits the and chooses the last part (the filename with extension)
                fout = open(upload_path + '/' + filename,
                            'wb')  # creates the file where the uploaded file should be stored
                fout.write(file_obj.file.file.read())  # writes the uploaded file to the newly created file.
                fout.close()  # closes the file, upload complete.
            except Exception, err:
                print err
                return cmg.format_response(False, err, "Error occurred while uploading file", sys.exc_info())

            uploaded_time = datetime.datetime.now()
            time_taken = uploaded_time - start_time
            print "Upload completed! Time taken - " + str(time_taken)
            return cmg.format_response(True, 1, "File Upload successful!")

    elif o_data == 'dp':
        upload_path = conf.get_conf('FilePathConfig.ini', 'User Files')[
                          'Path'] + '/digin_user_data/' + user_id + '/' + domain + '/DPs'
        try:
            os.makedirs(upload_path)
        except OSError:
            if not os.path.isdir(upload_path):
                raise
        if 'file' in file_obj:  # to check if the file-object is created
            try:
                filename = file_obj.file.filename.replace('\\',
                                                          '/')  # replaces the windows-style slashes with linux ones.
                # filename=filepath.split('/')[-1] # splits the and chooses the last part (the filename with extension)
                fout = open(upload_path + '/' + filename,
                            'wb')  # creates the file where the uploaded file should be stored
                fout.write(file_obj.file.file.read())  # writes the uploaded file to the newly created file.
                fout.close()  # closes the file, upload complete.
            except Exception, err:
コード例 #26
0
__author__ = 'Sajeetharan'
__version__ = '1.0.1.1'
from bigquery import get_client
import sys
sys.path.append("...")
import configs.ConfigHandler as conf
import scripts.DigInRatingEngine.DigInRatingEngine as dre
import threading
from googleapiclient import discovery
from googleapiclient.http import MediaFileUpload
from oauth2client.service_account import ServiceAccountCredentials

datasource_settings = conf.get_conf('DatasourceConfig.ini','BIG-QUERY')
project_id = datasource_settings['PROJECT_ID']
service_account = datasource_settings['SERVICE_ACCOUNT']
key = datasource_settings['KEY']

try:
    datasource_settings = conf.get_conf('DatasourceConfig.ini','BIG-QUERY')
    query = ""
    project_id = datasource_settings['PROJECT_ID']
    service_account = datasource_settings['SERVICE_ACCOUNT']
    key = datasource_settings['KEY']
except Exception, err:
    print err

def execute_query(querystate, offset=None, limit=None, user_id=None, tenant=None):
          query = querystate
          try:
              client = get_client(project_id, service_account=service_account,
                                private_key_file=key, readonly=False)
コード例 #27
0
__author__ = 'Marlon Abeykoon'
__version__ = '1.0.0.0'

import os, sys
import sqlalchemy as sql
from sqlalchemy import text
sys.path.append("...")
import configs.ConfigHandler as conf
currDir = os.path.dirname(os.path.realpath(__file__))
rootDir = os.path.abspath(os.path.join(currDir, '../..'))
if rootDir not in sys.path:  # add parent dir to paths
    sys.path.append(rootDir)

try:
    datasource_settings = conf.get_conf('DatasourceConfig.ini','MS-SQL')
    connection_string = "mssql+pyodbc://{0}:{1}@{2}:{5}/{3}?driver={4}"\
                        .format(datasource_settings['UID'],datasource_settings['PWD'],datasource_settings['SERVER'],
                                datasource_settings['DATABASE'],datasource_settings['DRIVER'],datasource_settings['PORT'])
except Exception, err:
    print err

try:
    engine = sql.create_engine(connection_string)
    metadata = sql.MetaData()
    connection = engine.connect()
except Exception, err:
    print "Error connecting to sqlserver"
    print err

def execute_query(query):
          sql = text(query)
コード例 #28
0
__author__ = 'Jeganathan Thivatharan'
__version__ = '1.0.0'

import os
import zipfile
from xml.dom import minidom
import logging
import ast
import configs.ConfigHandler as conf
from sqlalchemy.engine.url import make_url



Report_cnf = conf.get_conf('FilePathConfig.ini','User Files')
User_Reports_path = Report_cnf['Path']

path_settings = conf.get_conf('FilePathConfig.ini','Logs')
path = path_settings['Path']
log_path = path + '/PentahoReportingService.log'

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)

handler = logging.FileHandler(log_path)
handler.setLevel(logging.INFO)

formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)

logger.addHandler(handler)
コード例 #29
0
__author__ = 'Sajeetharan'
__version__ = '1.0.0.0'

import psycopg2
import psycopg2.extras
import logging
import sys
sys.path.append("...")
import configs.ConfigHandler as conf

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
path_settings = conf.get_conf('FilePathConfig.ini','Logs')
path = path_settings['Path']
log_path = path + '/DiginStore.log'
handler = logging.FileHandler(log_path)
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.info('Starting log')

try:
    datasource_settings = conf.get_conf('DatasourceConfig.ini','PostgreSQL')
    query = ""
    database = datasource_settings['DATABASE']
    user = datasource_settings['USER']
    password = datasource_settings['PASSWORD']
    host = datasource_settings['HOST']
    port = datasource_settings['PORT']
except Exception, err: