Esempio n. 1
0
    def dhub(self, procs_file_name):
        """
        Return a configured ``DataHub`` using the given SQL procs file.

        """
        data_source = {
            self.key: {
                # @@@ this should depend on self.type
                # @@@ shouldn't have to specify this here and below
                "hub": "MySQL",
                "master_host": {
                    "host": self.host,
                    "user": settings.DATAZILLA_DATABASE_USER,
                    "passwd": settings.DATAZILLA_DATABASE_PASSWORD,
                    },
                "default_db": self.name,
                "procs": [
                    os.path.join(SQL_PATH, procs_file_name),
                    os.path.join(SQL_PATH, "generic.json"),
                    ],
                }
            }

        if self.read_only_host:
            data_source[self.key]['read_host'] = {
                "host": self.read_only_host,
                "user": settings.DATAZILLA_RO_DATABASE_USER,
                "passwd": settings.DATAZILLA_RO_DATABASE_PASSWORD,
                }

        BaseHub.add_data_source(data_source)
        # @@@ the datahub class should depend on self.type
        return MySQL(self.key)
Esempio n. 2
0
    def dhub(self, procs_file_name):
        """
        Return a configured ``DataHub`` using the given SQL procs file.

        """
        data_source = {
            self.key: {
                # @@@ this should depend on self.type
                # @@@ shouldn't have to specify this here and below
                "hub":
                "MySQL",
                "master_host": {
                    "host": self.host,
                    "user": settings.TREEHERDER_DATABASE_USER,
                    "passwd": settings.TREEHERDER_DATABASE_PASSWORD,
                },
                "read_host": {
                    "host": self.read_only_host,
                    "user": settings.TREEHERDER_RO_DATABASE_USER,
                    "passwd": settings.TREEHERDER_RO_DATABASE_PASSWORD,
                },
                "require_host_type":
                True,
                "default_db":
                self.name,
                "procs": [
                    os.path.join(SQL_PATH, procs_file_name),
                    os.path.join(SQL_PATH, "generic.json"),
                ],
            }
        }

        BaseHub.add_data_source(data_source)
        # @@@ the datahub class should depend on self.type
        return MySQL(self.key)
Esempio n. 3
0
    def dhub(self, procs_file_name):
        """
        Return a configured ``DataHub`` using the given SQL procs file.

        """
        master_host_config = {
            "host": settings.DATABASES["default"]["HOST"],
            "user": settings.DATABASES["default"]["USER"],
            "passwd": settings.DATABASES["default"].get("PASSWORD") or "",
        }
        if "OPTIONS" in settings.DATABASES["default"]:
            master_host_config.update(settings.DATABASES["default"]["OPTIONS"])

        read_host_config = {
            "host": settings.DATABASES["read_only"]["HOST"],
            "user": settings.DATABASES["read_only"]["USER"],
            "passwd": settings.DATABASES["read_only"].get("PASSWORD") or "",
        }
        if "OPTIONS" in settings.DATABASES["read_only"]:
            read_host_config.update(settings.DATABASES["read_only"]["OPTIONS"])

        data_source = {
            self.key: {
                "hub": "MySQL",
                "master_host": master_host_config,
                "read_host": read_host_config,
                "require_host_type": True,
                "default_db": self.name,
                "procs": [os.path.join(SQL_PATH, procs_file_name), os.path.join(SQL_PATH, "generic.json")],
            }
        }

        BaseHub.add_data_source(data_source)
        return MySQL(self.key)
Esempio n. 4
0
   def __init__(self, sqlFileName):

      self.DATAZILLA_DATABASE_NAME     = os.environ["DATAZILLA_DATABASE_NAME"]
      self.DATAZILLA_DATABASE_USER     = os.environ["DATAZILLA_DATABASE_USER"]
      self.DATAZILLA_DATABASE_PASSWORD = os.environ["DATAZILLA_DATABASE_PASSWORD"]
      self.DATAZILLA_DATABASE_HOST     = os.environ["DATAZILLA_DATABASE_HOST"]
      self.DATAZILLA_DATABASE_PORT     = os.environ["DATAZILLA_DATABASE_PORT"]

      self.sqlFileName = sqlFileName

      try:
         self.DEBUG = os.environ["DATAZILLA_DEBUG"] is not None
      except KeyError:
         self.DEBUG = False

      self.rootPath = os.path.dirname(os.path.abspath(__file__))

      ####
      #Configuration of datasource hub:
      #	1 Build the datasource struct
      # 	2 Add it to the BaseHub
      #	3 Instantiate a MySQL hub for all derived classes
      ####
      dataSource = { self.DATAZILLA_DATABASE_NAME : { "hub":"MySQL",
                                                      "master_host":{"host":self.DATAZILLA_DATABASE_HOST,
                                                                     "user":self.DATAZILLA_DATABASE_USER,
                                                                     "passwd":self.DATAZILLA_DATABASE_PASSWORD},
                                                                     "default_db":self.DATAZILLA_DATABASE_NAME,
                                                      "procs": ["%s%s%s" % (self.rootPath,  "/sql/", sqlFileName)]
                                                    } }
      BaseHub.addDataSource(dataSource)
      self.dhub = MySQL(self.DATAZILLA_DATABASE_NAME)
Esempio n. 5
0
    def __init__(self):
        procs_path = os.path.join(
            os.path.dirname(os.path.dirname(__file__)),
            'sql', 'reference.json')

        master_host_config = {
            "host": settings.DATABASES['default']['HOST'],
            "user": settings.DATABASES['default']['USER'],
            "passwd": settings.DATABASES['default'].get('PASSWORD') or '',
        }
        if 'OPTIONS' in settings.DATABASES['default']:
            master_host_config.update(settings.DATABASES['default']['OPTIONS'])

        read_host_config = {
            "host": settings.DATABASES['read_only']['HOST'],
            "user": settings.DATABASES['read_only']['USER'],
            "passwd": settings.DATABASES['read_only'].get('PASSWORD') or '',
        }
        if 'OPTIONS' in settings.DATABASES['read_only']:
            read_host_config.update(settings.DATABASES['read_only']['OPTIONS'])

        data_source = {
            'reference': {
                "hub": "MySQL",
                "master_host": master_host_config,
                "read_host": read_host_config,
                "require_host_type": True,
                "default_db": settings.DATABASES['default']['NAME'],
                "procs": [procs_path]
            }
        }

        BaseHub.add_data_source(data_source)
        self.dhub = DataHub.get("reference")
        self.DEBUG = settings.DEBUG

        # Support structure for reference data signatures
        self.build_signature_placeholders = []

        # Support structures for building option collection data structures
        self.oc_hash_lookup = dict()
        self.oc_where_in_list = []
        self.oc_placeholders = []
        self.oc_unique_collections = []

        # Support structures for building option data structures
        self.o_lookup = set()
        self.o_placeholders = []
        self.o_unique_options = []
        self.o_where_in_list = []

        # reference id lookup structure
        self.id_lookup = {}
Esempio n. 6
0
    def test_parse_data_sources(self):

        # Instantiating base hub triggers data_sources.json parsing
        BaseHub()
        if self.data_source not in BaseHub.data_sources:
            msg = "The required data source, %s, was not found in %s" % (self.data_source, BaseHub.source_list_file)
            self.fail(msg)
Esempio n. 7
0
    def loadvars():

        #####
        #Only load the database sources once when the module
        #is imported
        #####
        if not Model.projectHub:

            Model.DATAZILLA_DATABASE_NAME = settings.DATAZILLA_DATABASE_NAME
            Model.DATAZILLA_DATABASE_USER = settings.DATAZILLA_DATABASE_USER
            Model.DATAZILLA_DATABASE_PASSWORD = settings.DATAZILLA_DATABASE_PASSWORD
            Model.DATAZILLA_DATABASE_HOST = settings.DATAZILLA_DATABASE_HOST
            Model.DATAZILLA_DATABASE_PORT = settings.DATAZILLA_DATABASE_PORT

            ####
            #Configuration of datasource hub:
            # 1 Build the datasource struct
            # 2 Add it to the BaseHub
            # 3 Instantiate a MySQL hub for all derived classes
            ####
            Model.rootPath = os.path.dirname(os.path.abspath(__file__))

            dataSource = { Model.DATAZILLA_DATABASE_NAME :

                            { "hub":"MySQL",
                              "master_host":

                                { "host":Model.DATAZILLA_DATABASE_HOST,
                                  "user":Model.DATAZILLA_DATABASE_USER,
                                  "passwd":Model.DATAZILLA_DATABASE_PASSWORD
                                },

                              "default_db":Model.DATAZILLA_DATABASE_NAME,
                              "procs": ["%s/%s" % (Model.rootPath,
                                                   'sources.json')]
                         } }

            BaseHub.addDataSource(dataSource)
            dzHub = MySQL(Model.DATAZILLA_DATABASE_NAME)

            Model.databaseSources = dzHub.execute(proc='sources.get_datasources',
                                                  key_column='project',
                                                  return_type='dict')

            Model.loadProjectHub(Model.databaseSources)
Esempio n. 8
0
    def loadProjectHub(databaseSources):

        for s in databaseSources:

            project = databaseSources[s]['project']

            dataSource = { project :
                { "hub":"MySQL",
                  "master_host":{"host":databaseSources[s]['host'],
                  "user":Model.DATAZILLA_DATABASE_USER,
                  "passwd":Model.DATAZILLA_DATABASE_PASSWORD},
                  "default_db":databaseSources[s]['name'],
                  "procs": ["%s/%s" % (Model.rootPath, 'graphs.json')]
                } }

            BaseHub.addDataSource(dataSource)
            hub = MySQL( project )
            Model.projectHub[ project ] = hub
Esempio n. 9
0
    def _get_dhub(self):
        dataSource = {
            self.project : {
                "hub":"MySQL",
                "master_host":{
                    "host": settings.CLOUDSQL_INSTANCE,
                    # FIXME: CloudSQL has no users, but datasource requires it
                    "user": "******",
                    },
                "default_db": settings.CLOUDSQL_DATABASE,
                "procs": [os.path.join(SQL_PATH, self.procs_file_name)]
                }
            }
        BaseHub.addDataSource(dataSource)

        try:
            return CloudSQL(self.project)
        except KeyError:
            raise KeyError("Failed to create CloudSQL")
Esempio n. 10
0
    def handle(self, *args, **options):

        ##Load data views##
        views_file_obj = open("%s%s" %
                              (settings.ROOT, "/templates/data/views.json"))
        try:
            data_view_file = views_file_obj.read()
        finally:
            views_file_obj.close()
        ##Strip out comments and newlines##
        t = BaseHub.strip_python_comments(data_view_file)
        data_views = BaseHub.deserialize_json(data_view_file)

        Command.build_nav(data_views)

        #Uncomment to see datastructure for debugging
        #pp = pprint.PrettyPrinter(indent=3)
        #self.stdout.write( pp.pformat(data_views) )

        menu_file_obj = open(
            "%s%s" % (settings.ROOT, "/html/nav/nav_menu.html"), 'w+')
        try:
            menu_file_obj.write('<ul class="bh-viewtext">\n%s\n</ul>' %
                                (bh_unorderedlist(data_views)))
        finally:
            menu_file_obj.close()

        ##Write out json for the nav_lookup_hash##
        jstring = json.dumps(Command.nav_lookup_hash, ensure_ascii=False)

        html = """<input id="bh_nav_json" type="hidden" value="{{ json_data }}" />"""
        t = Template(html)
        c = Context({'json_data': jstring})
        templateString = t.render(c)

        nav_lookup_file_obj = open(
            "%s%s" % (settings.ROOT, "/templates/bughunter.navlookup.html"),
            'w+')
        try:
            nav_lookup_file_obj.write(templateString)
        finally:
            nav_lookup_file_obj.close()
Esempio n. 11
0
    def dhub(self, procs_file_name):
        """
        Return a configured ``DataHub`` using the given SQL procs file.

        """
        master_host_config = {
            "host": settings.DATABASES['default']['HOST'],
            "user": settings.DATABASES['default']['USER'],
            "passwd": settings.DATABASES['default'].get('PASSWORD') or '',
        }
        if 'OPTIONS' in settings.DATABASES['default']:
            master_host_config.update(settings.DATABASES['default']['OPTIONS'])

        read_host_config = {
            "host": settings.DATABASES['read_only']['HOST'],
            "user": settings.DATABASES['read_only']['USER'],
            "passwd": settings.DATABASES['read_only'].get('PASSWORD') or '',
        }
        if 'OPTIONS' in settings.DATABASES['read_only']:
            read_host_config.update(settings.DATABASES['read_only']['OPTIONS'])

        data_source = {
            self.key: {
                "hub":
                "MySQL",
                "master_host":
                master_host_config,
                "read_host":
                read_host_config,
                "require_host_type":
                True,
                "default_db":
                self.name,
                "procs": [
                    os.path.join(SQL_PATH, procs_file_name),
                    os.path.join(SQL_PATH, "generic.json"),
                ],
            }
        }

        BaseHub.add_data_source(data_source)
        return MySQL(self.key)
Esempio n. 12
0
    def dhub(self, procs_file_name):
        """
        Return a configured ``DataHub`` using the given SQL procs file.

        """
        data_source = {
            self.key: {
                "hub": "MySQL",
                "master_host": {
                    "host": self.host,
                    "user": settings.DATAZILLA_DATABASE_USER,
                    "passwd": settings.DATAZILLA_DATABASE_PASSWORD,
                    },
                "default_db": self.name,
                "procs": [os.path.join(SQL_PATH, procs_file_name)],
                }
            }
        BaseHub.addDataSource(data_source)
        # @@@ the datahub class should depend on self.type
        return MySQL(self.key)
Esempio n. 13
0
    def get_proc(self, data_source, proc):
        """
        Pass through to the BaseHub.get_proc() method.

        Parameters:
           data_source - data source to retrive proc from
           proc - full proc path ex: mysql.selects.get_stuff

        Returns:
           proc datastructure from the data source
        """
        return BaseHub.get_proc(data_source, proc)
Esempio n. 14
0
    def get_proc(self, data_source, proc):
        """
        Pass through to the BaseHub.get_proc() method.

        Parameters:
           data_source - data source to retrive proc from
           proc - full proc path ex: mysql.selects.get_stuff

        Returns:
           proc datastructure from the data source
        """
        return BaseHub.get_proc(data_source, proc)
Esempio n. 15
0
    def validate_data_source(self, data_source_name):
        """
        Iterates through data_source_req_keys and confirms required
        key/value pairs.  Probably a better way of doing this but
        not thinking of anything more elegent at the moment.  Attempting
        to provide the caller with clear messaging regarding missing fields
        in the data source file.

        Parameters:
           data_source_name - name of the datasource to test

        Returns:
           None
        """
        for key in self.data_source_req_keys:
            if key is 'req':
                msg = 'the %s source object in %s' % (data_source_name, BaseHub.source_list_file)
                # Confirm required keys
                BaseHub.check_keys(self.data_source_req_keys[key], BaseHub.data_sources[data_source_name], True, msg)
            elif key is 'databases':

                if key in BaseHub.data_sources[data_source_name]:
                    for i in range(len(BaseHub.data_sources[data_source_name][key])):
                        db = BaseHub.data_sources[data_source_name][key][i]
                        msg = 'the %s.%s index position %i in %s' % (data_source_name, key, i, BaseHub.source_list_file)
                        BaseHub.check_keys(self.data_source_req_keys[key], db, True, msg)
            else:
                msg = 'the %s.%s in %s' % (data_source_name, key, BaseHub.source_list_file)
                if key in BaseHub.data_sources[data_source_name]:
                    BaseHub.check_keys(self.data_source_req_keys[key], BaseHub.data_sources[data_source_name][key], True, msg)
Esempio n. 16
0
    def dhub(self, procs_file_name):
        """
        Return a configured ``DataHub`` using the given SQL procs file.

        """
        master_host_config = {
            "host": self.host,
            "user": settings.TREEHERDER_DATABASE_USER,
            "passwd": settings.TREEHERDER_DATABASE_PASSWORD,
        }
        if 'OPTIONS' in settings.DATABASES['default']:
            master_host_config.update(settings.DATABASES['default']['OPTIONS'])

        read_host_config = {
            "host": self.read_only_host,
            "user": settings.TREEHERDER_RO_DATABASE_USER,
            "passwd": settings.TREEHERDER_RO_DATABASE_PASSWORD,
        }
        if 'OPTIONS' in settings.DATABASES['read_only']:
            read_host_config.update(settings.DATABASES['read_only']['OPTIONS'])

        data_source = {
            self.key: {
                # @@@ this should depend on self.type
                # @@@ shouldn't have to specify this here and below
                "hub": "MySQL",
                "master_host": master_host_config,
                "read_host": read_host_config,
                "require_host_type": True,
                "default_db": self.name,
                "procs": [
                    os.path.join(SQL_PATH, procs_file_name),
                    os.path.join(SQL_PATH, "generic.json"),
                ],
            }
        }

        BaseHub.add_data_source(data_source)
        # @@@ the datahub class should depend on self.type
        return MySQL(self.key)
Esempio n. 17
0
    def handle(self, *args, **options):

        ##Load data views##
        views_file_obj = open("%s%s" % (settings.ROOT, "/templates/data/views.json"))
        try:
            data_view_file = views_file_obj.read()
        finally:
            views_file_obj.close()
        ##Strip out comments and newlines##
        t = BaseHub.stripPythonComments(data_view_file)
        data_views = BaseHub.deserializeJson(data_view_file)

        Command.build_nav(data_views)

        #Uncomment to see datastructure for debugging
        #pp = pprint.PrettyPrinter(indent=3)
        #self.stdout.write( pp.pformat(data_views) )

        menu_file_obj = open("%s%s" % (settings.ROOT, "/media/html/nav_menu.html"), 'w+')
        try:
            menu_file_obj.write( '<ul class="dv-viewtext">\n%s\n</ul>' % (dv_unorderedlist(data_views)) )
        finally:
            menu_file_obj.close()

        ##Write out json for the nav_lookup_hash##
        jstring = json.dumps( Command.nav_lookup_hash, ensure_ascii=False )

        html = """<input id="dv_nav_json" type="hidden" value="{{ json_data }}" />"""
        t = Template(html)
        c = Context({ 'json_data':jstring })
        templateString = t.render(c)

        nav_lookup_file_obj = open("%s%s" % (settings.ROOT, "/templates/graphs.navlookup.html"), 'w+')
        try:
            nav_lookup_file_obj.write(templateString)
        finally:
            nav_lookup_file_obj.close()
Esempio n. 18
0
    def dhub(self, procs_file_name):
        """
        Return a configured ``DataHub`` using the given SQL procs file.

        """
        master_host_config = {
            "host": settings.DATABASES['default']['HOST'],
            "user": settings.DATABASES['default']['USER'],
            "passwd": settings.DATABASES['default']['PASSWORD'],
        }
        if 'OPTIONS' in settings.DATABASES['default']:
            master_host_config.update(settings.DATABASES['default']['OPTIONS'])

        read_host_config = {
            "host": settings.DATABASES['read_only']['HOST'],
            "user": settings.DATABASES['read_only']['USER'],
            "passwd": settings.DATABASES['read_only']['PASSWORD'],
        }
        if 'OPTIONS' in settings.DATABASES['read_only']:
            read_host_config.update(settings.DATABASES['read_only']['OPTIONS'])

        data_source = {
            self.key: {
                "hub": "MySQL",
                "master_host": master_host_config,
                "read_host": read_host_config,
                "require_host_type": True,
                "default_db": self.name,
                "procs": [
                    os.path.join(SQL_PATH, procs_file_name),
                    os.path.join(SQL_PATH, "generic.json"),
                ],
            }
        }

        BaseHub.add_data_source(data_source)
        return MySQL(self.key)
Esempio n. 19
0
    def validate_data_source(self, data_source_name):
        """
        Iterates through data_source_req_keys and confirms required
        key/value pairs.  Probably a better way of doing this but
        not thinking of anything more elegent at the moment.  Attempting
        to provide the caller with clear messaging regarding missing fields
        in the data source file.

        Parameters:
           data_source_name - name of the datasource to test

        Returns:
           None
        """
        for key in self.data_source_req_keys:
            if key is 'req':
                msg = 'the %s source object in %s' % (data_source_name,
                                                      BaseHub.source_list_file)
                # Confirm required keys
                BaseHub.check_keys(self.data_source_req_keys[key],
                                   BaseHub.data_sources[data_source_name],
                                   True, msg)
            elif key is 'databases':

                if key in BaseHub.data_sources[data_source_name]:
                    for i in range(
                            len(BaseHub.data_sources[data_source_name][key])):
                        db = BaseHub.data_sources[data_source_name][key][i]
                        msg = 'the %s.%s index position %i in %s' % (
                            data_source_name, key, i, BaseHub.source_list_file)
                        BaseHub.check_keys(self.data_source_req_keys[key], db,
                                           True, msg)
            else:
                msg = 'the %s.%s in %s' % (data_source_name, key,
                                           BaseHub.source_list_file)
                if key in BaseHub.data_sources[data_source_name]:
                    BaseHub.check_keys(
                        self.data_source_req_keys[key],
                        BaseHub.data_sources[data_source_name][key], True, msg)
Esempio n. 20
0
####
#Configuration of datasource hub:
#	1 Build the datasource struct
# 	2 Add it to the BaseHub
#	3 Instantiate a MySQL hub
####
dataSource = { SISYPHUS_DATABASE : { "hub":"MySQL",
                                     "master_host":{"host":SISYPHUS_DATABASE_HOST,
                                                    "user":SISYPHUS_DATABASE_USER,
                                                    "passwd":SISYPHUS_DATABASE_PASSWORD},
                                                    "default_db":SISYPHUS_DATABASE,
                                     "procs": ["%s%s" % (ROOT, "/procs/bughunter.json")]
              }
}
BaseHub.add_data_source(dataSource)
DHUB = MySQL(SISYPHUS_DATABASE)

LOGGING = {
    'version': 1,
    'disable_existing_loggers': False,
    'handlers': {
        'file': {
            'level': 'DEBUG',
            'class': 'logging.FileHandler',
            'filename': 'debug.log',
        },
    },
    'loggers': {
        'django': {
            'handlers': ['file'],
Esempio n. 21
0
    def __init__(self):
        procs_path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                                  'sql', 'reference.json')
        data_source = {
            'reference': {
                "hub": "MySQL",
                "master_host": {
                    "host": settings.DATABASES['default']['HOST'],
                    "user": settings.DATABASES['default']['USER'],
                    "passwd": settings.DATABASES['default']['PASSWORD']
                },
                "read_host": {
                    "host": settings.DATABASES['read_only']['HOST'],
                    "user": settings.DATABASES['read_only']['USER'],
                    "passwd": settings.DATABASES['read_only']['PASSWORD']
                },
                "require_host_type": True,
                "default_db": settings.DATABASES['default']['NAME'],
                "procs": [procs_path]
            }
        }
        BaseHub.add_data_source(data_source)
        self.dhub = DataHub.get("reference")
        self.DEBUG = settings.DEBUG

        # Support structure for reference data signatures
        self.reference_data_signature_lookup = {}
        self.build_signature_placeholders = []

        # Support structures for building build platform SQL
        self.build_platform_lookup = {}
        self.build_where_filters = []
        self.build_platform_placeholders = []
        self.build_unique_platforms = []

        # Support structures for building machine platform SQL
        self.machine_platform_lookup = {}
        self.machine_where_filters = []
        self.machine_platform_placeholders = []
        self.machine_unique_platforms = []

        # Support structures for building job group SQL
        self.job_group_lookup = {}
        self.job_group_where_filters = []
        self.job_group_placeholders = []
        self.job_group_names_and_symbols = []

        # Support structures for building job types SQL
        self.job_type_lookup = {}
        self.job_type_where_filters = []
        self.job_type_placeholders = []
        self.job_type_names_and_symbols = []

        # Use this structure to map the job to the group id
        self.job_type_to_group_lookup = {}

        # Support structures for building product SQL
        self.product_lookup = set()
        self.product_where_in_list = []
        self.product_placeholders = []
        self.unique_products = []

        # Support structures for building device SQL
        self.device_lookup = set()
        self.device_where_in_list = []
        self.device_placeholders = []
        self.unique_devices = []

        # Support structures for building machine SQL
        self.machine_name_lookup = set()
        self.machine_where_in_list = []
        self.machine_name_placeholders = []
        self.machine_unique_names = []
        self.machine_timestamp_update_placeholders = []

        # Support structures for building option collection data structures
        self.oc_hash_lookup = dict()
        self.oc_where_in_list = []
        self.oc_placeholders = []
        self.oc_unique_collections = []

        # Support structures for building option data structures
        self.o_lookup = set()
        self.o_placeholders = []
        self.o_unique_options = []
        self.o_where_in_list = []

        # reference id lookup structure
        self.id_lookup = {}
Esempio n. 22
0
    def __init__(self):
        procs_path = os.path.join(
            os.path.dirname(os.path.dirname(__file__)),
            'sql', 'reference.json')
        data_source = {
            'reference': {
                "hub": "MySQL",
                "master_host": {
                    "host": settings.DATABASES['default']['HOST'],
                    "user": settings.DATABASES['default']['USER'],
                    "passwd": settings.DATABASES['default']['PASSWORD']
                },
                "default_db": settings.DATABASES['default']['NAME'],
                "procs": [procs_path]
            }
        }
        BaseHub.add_data_source(data_source)
        self.dhub = DataHub.get("reference")
        self.DEBUG = settings.DEBUG

        # Support structures for building build platform SQL
        self.build_platform_lookup = {}
        self.build_where_filters = []
        self.build_platform_placeholders = []
        self.build_unique_platforms = []

        # Support structures for building machine platform SQL
        self.machine_platform_lookup = {}
        self.machine_where_filters = []
        self.machine_platform_placeholders = []
        self.machine_unique_platforms = []

        # Support structures for building job group SQL
        self.job_group_lookup = {}
        self.job_group_where_filters = []
        self.job_group_placeholders = []
        self.job_group_names_and_symbols = []

        # Support structures for building job types SQL
        self.job_type_lookup = {}
        self.job_type_where_filters = []
        self.job_type_placeholders = []
        self.job_type_names_and_symbols = []

        #Use this structure to map the job to the group id
        self.job_type_to_group_lookup = {}

        # Support structures for building product SQL
        self.product_lookup = set()
        self.product_where_in_list = []
        self.product_placeholders = []
        self.unique_products = []

        # Support structures for building machine SQL
        self.machine_name_lookup = set()
        self.machine_where_in_list = []
        self.machine_name_placeholders = []
        self.machine_unique_names = []
        self.machine_timestamp_update_placeholders = []

        # Support structures for building option collection data structures
        self.oc_hash_lookup = dict()
        self.oc_where_in_list = []
        self.oc_placeholders = []
        self.oc_unique_collections = []

        # Support structures for building option data structures
        self.o_lookup = set()
        self.o_placeholders = []
        self.o_unique_options = []
        self.o_where_in_list = []

        # reference id lookup structure
        self.id_lookup = {}
Esempio n. 23
0
    def __init__(self, data_source_name):
        """
        A derived class of BaseHub, serves as a base class for any Relational
        Database hubs.
        """
        BaseHub.__init__(self)

        # allowed keys in execute
        self.execute_keys = set([
            'db', 'proc', 'nocommit', 'sql', 'host_type', 'placeholders',
            'replace', 'replace_quote', 'limit', 'offset', 'chunk_size',
            'chunk_source', 'chunk_min', 'chunk_total', 'executemany',
            'return_type', 'key_column', 'callback', 'debug_show', 'debug_noex'
        ])

        # Default values for execute kwargs
        self.default_host_type = 'master_host'
        self.default_return_type = 'tuple'

        # replace string base for replace functionality in execute
        self.replace_string = 'REP'

        # set of return types that require a key_column
        self.return_type_key_columns = set(
            ['dict', 'dict_json', 'set', 'set_json'])

        # One of these keys must be provided to execute
        self.execute_required_keys = set(['proc', 'sql'])

        # This data structure is used to map the return_type provided to
        # execute() to the derived hub method.  Derived hub's have to map
        # their methods by setting the appropriate function reference to
        # its associated key in valid_return_types.
        self.valid_return_types = {
            'iter': None,
            'dict': None,
            'dict_json': None,
            'tuple': None,
            'tuple_json': None,
            'set': None,
            'set_json': None,
            'table': None,
            'table_json': None,
            'rowcount': None,
            'callback': None
        }

        # Dictionary of required keys for RDBS datasources
        self.data_source_req_keys = dict(
            # required keys
            req=set(['hub', 'master_host']),
            # optional keys but if present have additional key requirements
            databases=set(['name', 'procs']),
            master_host=set(['host', 'user']),
            read_host=set(['host', 'user']),
            dev_host=set(['host', 'user']))

        # List of SQL tokens that must follow a WHERE statement
        self.post_where_tokens = [
            'GROUP BY', 'HAVING', 'ORDER BY', 'LIMIT', 'OFFSET', 'PROCEDURE',
            'INTO', 'FOR UPDATE'
        ]

        # Validate the information in data_sources is complete
        # so we can provide the caller with useful messaging
        # regarding what is missing when a class is instantiated.
        self.validate_data_source(data_source_name)

        self.pretty_sql_regex = re.compile('\s+', re.DOTALL)

        self.default_placeholder = '?'

        __all__ = [
            'load_procs',  # noqa
            'get_proc',
            'get_data',
            'validate_data_source',
            'set_execute_rules',
            'get_execute_data'
        ]
Esempio n. 24
0
 def load_procs(self, data_source):
     BaseHub.load_procs(data_source)
Esempio n. 25
0
    def __init__(self, data_source_name):
        """
        A derived class of BaseHub, serves as a base class for any Relational
        Database hubs.
        """
        BaseHub.__init__(self)

        # allowed keys in execute
        self.execute_keys = set(['db',
                                 'proc',
                                 'nocommit',
                                 'sql',
                                 'host_type',
                                 'placeholders',
                                 'replace',
                                 'replace_quote',
                                 'limit',
                                 'offset',
                                 'chunk_size',
                                 'chunk_source',
                                 'chunk_min',
                                 'chunk_total',
                                 'executemany',
                                 'return_type',
                                 'key_column',
                                 'callback',
                                 'debug_show',
                                 'debug_noex'])

        # Default values for execute kwargs
        self.default_host_type = 'master_host'
        self.default_return_type = 'tuple'

        # replace string base for replace functionality in execute
        self.replace_string = 'REP'

        # set of return types that require a key_column
        self.return_type_key_columns = set(['dict', 'dict_json', 'set', 'set_json'])

        # One of these keys must be provided to execute
        self.execute_required_keys = set(['proc', 'sql'])

        # This data structure is used to map the return_type provided to
        # execute() to the derived hub method.  Derived hub's have to map
        # their methods by setting the appropriate function reference to
        # its associated key in valid_return_types.
        self.valid_return_types = {'iter': None,
                                   'dict': None,
                                   'dict_json': None,
                                   'tuple': None,
                                   'tuple_json': None,
                                   'set': None,
                                   'set_json': None,
                                   'table': None,
                                   'table_json': None,
                                   'rowcount': None,
                                   'callback': None}

        # Dictionary of required keys for RDBS datasources
        self.data_source_req_keys = dict(
            # required keys
            req=set(['hub', 'master_host']),
            # optional keys but if present have additional key requirements
            databases=set(['name', 'procs']),
            master_host=set(['host', 'user']),
            read_host=set(['host', 'user']),
            dev_host=set(['host', 'user']))

        # List of SQL tokens that must follow a WHERE statement
        self.post_where_tokens = ['GROUP BY', 'HAVING', 'ORDER BY', 'LIMIT', 'OFFSET', 'PROCEDURE', 'INTO', 'FOR UPDATE']

        # Validate the information in data_sources is complete
        # so we can provide the caller with useful messaging
        # regarding what is missing when a class is instantiated.
        self.validate_data_source(data_source_name)

        self.pretty_sql_regex = re.compile('\s+', re.DOTALL)

        self.default_placeholder = '?'

        __all__ = ['load_procs',  # noqa
                   'get_proc',
                   'get_data',
                   'validate_data_source',
                   'set_execute_rules',
                   'get_execute_data']
Esempio n. 26
0
 def loadProcs(self, dataSource):
    BaseHub.loadProcs(dataSource)
Esempio n. 27
0
 def load_procs(self, data_source):
     BaseHub.load_procs(data_source)