Пример #1
0
class DB_offline:

    _user = None
    _password = None
    _host = None
    _database = None
    _cnx = None
    _jsonResponse = None
    _query_type = 'modules'
    _fileGenerator = FileGenerator(template_path='templates')

    def connect(self, user, password, host, port, database):
        print('connected to offline database')

    def disconnect(self):
        print('disconnecting from offline database')

    def getData(self):
        return self._jsonResponse

    def query(self, sql):
        begin = re.search('FROM', sql).start()
        end = re.search('WHERE', sql).start()
        table_name = sql[begin + 5:end - 1]
        print('table name: ' + table_name)
        ID_project = re.findall(r'\d+', sql)[-1]

        # change all queries to selecting all module entries from a given project
        sql = 'SELECT * FROM ' + table_name + " WHERE ID_project = '"+ID_project+"';"
        print('ID_project = '+ID_project)
        for statement in re.sub(r'(\)\s*);', r'\1%;%', sql).split('%;%'):
            print('simulating executing: ' + statement)
        response = {}
        #subject_modules = ['X20AT2222', 'X20AI2622', 'X20DI9371', 'X20AO2622', 'X20DO9322']
        subject_modules = ['X20PS9400a', 'X20DC1376', 'X20AT2222', 'X20AI2622', 'X20DI9371', 'X20AO2622', 'X20DO9322']

        # simulation of database return data
        # data = [[1, 1, subject_modules[0], '[1,2]'], [1, 2, subject_modules[1], '[1,2]'],
        #         [1, 3, subject_modules[2], '[1,2]'], [1, 4, subject_modules[3], '[1,2]'],
        #         [1, 5, subject_modules[4], '[1,2]']]
        data = [[1, 1, subject_modules[0], '[]'], [1, 2, subject_modules[1], '[]'],
                [1, 3, subject_modules[2], '[]'], [1, 4, subject_modules[3], '[1]'],
                [1, 5, subject_modules[4], '[4]'], [1, 6, subject_modules[5], '[]'],
                [1, 7, subject_modules[6], '[1,11]']]
        data_processed = []

        # First query - returns the list of all necessary module configuration files to automation studio
        if table_name == 'modules':
            self._fileGenerator = FileGenerator(template_path='templates')
            for row in data:
                print(row)
                module_sub_idx = row[1]
                module_name = row[2]
                active_ports = eval(row[3])

                modules = self._fileGenerator.add_module(module_name, active_ports, module_sub_idx=module_sub_idx)
                # method add_module returns a list of generated module info (including file names and contents). If the
                # subject module is IO, the list contains 2 modules- subject and test
                if len(modules) == 2:
                    test_module = modules[1]
                    sub_module = modules[0]
                    data_processed.append([row[1], test_module.file_name+'.ar', test_module.content_ar,
                                           test_module.file_name+'.io', test_module.content_io,
                                           sub_module.file_name+'.ar', sub_module.content_ar,
                                           sub_module.file_name+'.io', sub_module.content_io])
                else:
                    sub_module = modules[0]
                    data_processed.append([row[1], 'empty.ar', '', 'empty.io', '',
                                           sub_module.file_name + '.ar', sub_module.content_ar,
                                           sub_module.file_name + '.io', sub_module.content_io])
            response = sqlToJson_offline(['ID', 'test_name_ar', 'test_file_ar', 'test_name_io', 'test_file_io', 'sub_name_ar',
                                  'sub_file_ar',  'sub_name_io', 'sub_file_io'], data_processed)
            # overwriting variable types- cursor.description contains information on the actual database
            # table that has only 4 columns.
            response['types'] = ['TINY', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING',
                                 'VAR_STRING', 'VAR_STRING']
            self._query_type = 'conf'
            print('sending module configuration files: ')
            for row in response['data']:
                print([row[content][0:min(len(row[content]), 1000)] if content != 'ID' else row[content] for content in row])

        # second query - the data returned from the database after this query is discarded.
        # It generates main configuration file based on the previous query with all the module data
        elif table_name == 'conf':
            response = sqlToJson_offline(['config'], [[self._fileGenerator.generate_main_file()]])
            response['types'] = ['VAR_STRING']
            print('main configuration file: \n'+str(response))
            self._query_type = 'io'

        # third query - based on active ports data received in the first query, a table of
        # desired connections is generated. The table has a following structure:
        # [[di_conn[0], do_conn[0], ai_conn[0], ao_conn[0]],[di_conn[1], do_conn[1], ai_conn[1], ao_conn[1]], ...]
        # it is necessary that all the connection types (di, do, ai, ao) have the same length.
        elif table_name == 'io':
            conn = self._fileGenerator.connections

            # to ensure that connection lists of each type have the same length, they are filled with empty strings
            max_len = max([len(conn[key]) for key in conn])
            for key in conn:
                while len(conn[key]) < max_len:
                    conn[key].append('')

            # the connection list is converted from
            # {'di':[di1, di2...], 'do':[do1, do2...], 'ai':[a1, ai2...], 'ao':[ao1, ao2...]}
            # to
            # [[d1, do1, ai1, ao1],[d2, do2, ai2, ao2],...]
            response = sqlToJson_offline(['di', 'do', 'ai', 'ao'],
                                         [[conn['di'][i], conn['do'][i], conn['ai'][i], conn['ao'][i]] for i in range(len(conn['di']))])
            response['types'] = ['VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING']
            self._query_type = 'modules'
            print('connections: \n'+str(response))
        #debug_log(response)
        self._jsonResponse = makeJsonResponse(0, "", response)
        return json.dumps({"responseSize": len(self._jsonResponse)})
Пример #2
0
from file_generator import FileGenerator
from template_generator import generate_templates

processed_path = 'processed'
generate_templates(ar_file_name="backup/arconfig.ar.bup",
                   io_file_name="backup/iomap.io.bup",
                   templates_path="templates")

f = FileGenerator(template_path='templates')

subject_modules = [
    'X20AI2622', 'X20AT2222', 'X20DI9371', 'X20AO2622', 'X20DO9322'
]
for module in subject_modules:
    f.add_module(module, [1, 2])

f.store_files(processed_path)
Пример #3
0
class DB:

    _user = None
    _password = None
    _host = None
    _database = None
    _cnx = None
    _jsonResponse = None
    _query_type = 'modules'
    _fileGenerator = FileGenerator(template_path='templates')

    def connect(self, user, password, host, port, database):
        self._user = user
        self._password = password
        self._host = host
        self._database = database
        self._port = port
        if(args.sqlType == 'mssql'):
            import pyodbc
            server = str(self._host) + ',' + str(self._port)
            self._cnx = pyodbc.connect(driver='{SQL Server Native Client 11.0}',
                                       server=server,
                                       database=self._database,
                                       uid=self._user, pwd=self._password)
        else:
            import mysql.connector
            self._cnx = mysql.connector.connect(user=self._user, password=self._password,
                                                host=self._host,
                                                database=self._database,
                                                port=self._port)

    def disconnect(self):
        try:
            self._cnx.close()
            return makeJsonResponse(0, "disconnected", "")
        except Exception as ex:
            debug_print(1, str(ex))
            debug_print(1, 'not connected to sql server')
            return makeJsonResponse(1, "not connected to sql server", "")

    def getData(self):
        return self._jsonResponse

    def query(self, sql):
        try:
            if args.sqlType == 'mssql':
                cursor = self._cnx.cursor()
            else:
                cursor = self._cnx.cursor(buffered=True)
        except Exception as ex:
            debug_print(1, str(ex))
            return makeJsonResponse(1, "not connected to sql server", "")

        begin = re.search('FROM', sql).start()
        end = re.search('WHERE', sql).start()
        table_name = sql[begin + 5:end - 1]
        print('table name: ' + table_name)
        ID_project = re.findall(r'\d+', sql)[-1]

        # change all queries to selecting all module entries from a given project
        sql = "SELECT * FROM Configurations WHERE ID_project = '"+ID_project+"'"
        print('ID_project = '+ID_project)
        for statement in re.sub(r'(\)\s*);', r'\1%;%', sql).split('%;%'):
            print('executing: ' + statement)
            cursor.execute(statement)
        data = []
        response = {}
        # Always try to fetch data independent of insert / select
        try:
            data = cursor.fetchall()
        except Exception as ex:
            pass
        self._cnx.commit()

        data_processed = []

        # First query - returns the list of all necessary module configuration files to automation studio
        if self._query_type == 'modules':
            self._fileGenerator = FileGenerator(template_path='templates')
            for row in data:
                module_sub_idx = row[1]
                module_name = row[2].strip()
                active_ports = eval(row[3].strip())

                modules = self._fileGenerator.add_module(module_name, active_ports, module_sub_idx=module_sub_idx)
                # method add_module returns a list of generated module info (including file names and contents). If the
                # subject module is IO, the list contains 2 modules- subject and test
                if len(modules) == 2:
                    test_module = modules[1]
                    sub_module = modules[0]
                    data_processed.append([row[1], test_module.file_name+'.ar', test_module.content_ar,
                                           test_module.file_name+'.io', test_module.content_io,
                                           sub_module.file_name+'.ar', sub_module.content_ar,
                                           sub_module.file_name+'.io', sub_module.content_io])
                else:
                    sub_module = modules[0]
                    data_processed.append([row[1], 'empty.ar', '', 'empty.io', '',
                                           sub_module.file_name + '.ar', sub_module.content_ar,
                                           sub_module.file_name + '.io', sub_module.content_io])
            response = sqlToJson(['ID', 'test_name_ar', 'test_file_ar', 'test_name_io', 'test_file_io', 'sub_name_ar',
                                  'sub_file_ar',  'sub_name_io', 'sub_file_io'], data_processed, cursor.description)
            # overwriting variable types- cursor.description contains information on the actual database
            # table that has only 4 columns.
            response['types'] = ['TINY', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING',
                                 'VAR_STRING', 'VAR_STRING']
            self._query_type = 'conf'
            print('sending module configuration files: ')
            for row in response['data']:
                print([row[content][0:min(len(row), 100)] if content != 'ID' else row[content] for content in row])

        # second query - the data returned from the database after this query is discarded.
        # It generates main configuration file based on the previous query with all the module data
        elif self._query_type == 'conf':
            response = sqlToJson(['config'], [[self._fileGenerator.generate_main_file()]], cursor.description)
            response['types'] = ['VAR_STRING']
            print('main configuration file: \n'+str(response))
            self._query_type = 'io'

        # third query - based on active ports data received in the first query, a table of
        # desired connections is generated. The table has a following structure:
        # [[di_conn[0], do_conn[0], ai_conn[0], ao_conn[0]],[di_conn[1], do_conn[1], ai_conn[1], ao_conn[1]], ...]
        # it is necessary that all the connection types (di, do, ai, ao) have the same length.
        elif self._query_type == 'io':
            conn = self._fileGenerator.connections

            # to ensure that connection lists of each type have the same length, they are filled with empty strings
            max_len = max([len(conn[key]) for key in conn])
            for key in conn:
                while len(conn[key]) < max_len:
                    conn[key].append('')

            # the connection list is converted from
            # {'di':[di1, di2...], 'do':[do1, do2...], 'ai':[a1, ai2...], 'ao':[ao1, ao2...]}
            # to
            # [[d1, do1, ai1, ao1],[d2, do2, ai2, ao2],...]
            response = sqlToJson(['di', 'do', 'ai', 'ao'],
                                 [[conn['di'][i], conn['do'][i], conn['ai'][i], conn['ao'][i]] for i in range(len(conn['di']))],
                                 cursor.description)
            response['types'] = ['VAR_STRING', 'VAR_STRING', 'VAR_STRING', 'VAR_STRING']
            self._query_type = 'modules'
            print('connections: \n'+str(response))
        cursor.close()
        #debug_log(response)
        self._jsonResponse = makeJsonResponse(0, "", response)
        return json.dumps({"responseSize": len(self._jsonResponse)})
from file_generator import FileGenerator

f = FileGenerator('templates')
modules = ['X20DC1376', 'X20AT2222', 'X20AI2622', 'X20DI9371']
i = 2
for module in modules:
    f.add_module(module, [], i)
    i += 1

f.store_files('processed_test')