def database_usage(context={}):
    LOG.info("Notifying Database usage with context %s" % context)
    subject = _("[DBAAS] Database is almost full")
    template = "database_notification"
    addr_from = Configuration.get_by_name("email_addr_from")
    team = context.get("team")
    if team and team.email:
        addr_to = [
            team.email, Configuration.get_by_name("new_user_notify_email")]
    else:
        addr_to = Configuration.get_by_name("new_user_notify_email")

    context['domain'] = get_domain()

    send_mail_template(subject, template, addr_from, addr_to,
                       fail_silently=False, attachments=None, context=context)
示例#2
0
def get_clone_args(origin_database, dest_database):
    
    #origin
    origin_instance=origin_database.databaseinfra.instances.all()[0]
    
    db_orig=origin_database.name
    user_orig=origin_database.databaseinfra.user
    #pass_orig="PASSWORD_ORIGIN=%s" % origin_database.databaseinfra.password
    pass_orig=origin_database.databaseinfra.password
    host_orig=origin_instance.address
    port_orig=origin_instance.port
    
    #destination
    dest_instance=dest_database.databaseinfra.instances.all()[0]
    
    db_dest=dest_database.name
    user_dest=dest_database.databaseinfra.user
    #pass_dest="PASSWORD_DEST=%s" % dest_database.databaseinfra.password
    pass_dest=dest_database.databaseinfra.password
    host_dest=dest_instance.address
    port_dest=dest_instance.port
    
    path_of_dump=Configuration.get_by_name('database_clone_dir')
    
    args=[db_orig, user_orig, pass_orig, host_orig, str(int(port_orig)), 
            db_dest, user_dest, pass_dest, host_dest, str(int(port_dest)), 
            path_of_dump
    ]
    
    return args
    def do(self, workflow_dict):
        try:

            if 'databaseinfra' not in workflow_dict \
                or 'clone' not in workflow_dict :
                return False

            args = get_clone_args(workflow_dict['clone'], workflow_dict['database'])
            script_name = factory_for(workflow_dict['clone'].databaseinfra).clone()

            python_bin= Configuration.get_by_name('python_venv_bin')

            return_code, output = call_script(script_name, working_dir=settings.SCRIPTS_PATH
                , args=args, split_lines=False, python_bin=python_bin)

            LOG.info("Script Output: {}".format(output))
            LOG.info("Return code: {}".format(return_code))

            if return_code != 0:
                workflow_dict['exceptions']['traceback'].append(output)
                return False

            return True
        except Exception:
            traceback = full_stack()

            workflow_dict['exceptions']['error_codes'].append(DBAAS_0017)
            workflow_dict['exceptions']['traceback'].append(traceback)

            return False
def database_analyzing(context={}):
    LOG.info("Notifying Database alayzing with context %s" % context)
    subject = _("[DBAAS] Database overestimated")
    template = "analyzing_notification"
    addr_from = Configuration.get_by_name("email_addr_from")
    send_email = Configuration.get_by_name("send_analysis_email")
    team = context.get("team")
    if team and team.email and send_email:
        addr_to = [
            team.email, Configuration.get_by_name("new_user_notify_email")]
    else:
        addr_to = Configuration.get_by_name("new_user_notify_email")

    context['domain'] = get_domain()

    send_mail_template(subject, template, addr_from, addr_to,
                       fail_silently=False, attachments=None, context=context)
def databaseinfra_ending(context={}):
    LOG.info("Notifying DatabaseInfra ending with context %s" % context)
    subject = _("[DBAAS] DatabaseInfra is almost full")
    template = "infra_notification"
    addr_from = Configuration.get_by_name("email_addr_from")
    addr_to = Configuration.get_by_name_as_list("new_user_notify_email")

    context['domain'] = get_domain()

    send_mail_template(subject, template, addr_from, addr_to,
                       fail_silently=False, attachments=None, context=context)
def get_configuration(context, configuration_name, context_var_name):
    """
    Usage: {% get_configuration config_name context_var %}

    Search config name on system configuration and set context_var on
    page context
    """
    config_val = Configuration.get_by_name(configuration_name) or ''

    context[context_var_name] = config_val

    return ''
示例#7
0
def external_links(request):
    iaas_status = Configuration.get_by_name('iaas_status')
    iaas_quota = Configuration.get_by_name('iaas_quota')

    try:
        credential = get_credentials_for(
            environment=Environment.objects.first(),
            credential_type=CredentialType.GRAFANA
        )

        sofia_dashboard = "{}/{}?var-datasource={}".format(
            credential.endpoint,
            credential.get_parameter_by_name('sofia_dbaas_dashboard'),
            credential.get_parameter_by_name('datasource')
        )
    except IndexError:
        sofia_dashboard = ""

    return {'iaas_status': iaas_status,
            'iaas_quota': iaas_quota,
            'sofia_main_dashboard': sofia_dashboard}
    def __mongo_client__(self, instance, default_timeout=False):
        connection_address = self.__get_admin_connection(instance)
        if not self.databaseinfra and instance:
            self.databaseinfra = instance.databaseinfra
        try:
            # mongo uses timeout in mili seconds
            if default_timeout:
                connection_timeout_in_miliseconds = (
                    MONGO_CONNECTION_DEFAULT_TIMEOUT * 1000)
                server_selection_timeout_in_seconds = (
                    MONGO_SERVER_SELECTION_DEFAULT_TIMEOUT * 1000)
                socket_timeout_in_miliseconds = MONGO_SOCKET_TIMEOUT * 1000
            else:
                connection_timeout_in_miliseconds = (
                    Configuration.get_by_name_as_int(
                        'mongo_connect_timeout',
                        default=MONGO_CONNECTION_DEFAULT_TIMEOUT) * 1000)
                server_selection_timeout_in_seconds = (
                    Configuration.get_by_name_as_int(
                        'mongo_server_selection_timeout',
                        default=MONGO_SERVER_SELECTION_DEFAULT_TIMEOUT) * 1000)
                socket_timeout_in_miliseconds = (
                    Configuration.get_by_name_as_int(
                        'mongo_socket_timeout', default=MONGO_SOCKET_TIMEOUT) *
                    1000)

            if self.databaseinfra.ssl_configured and \
               self.databaseinfra.ssl_mode >= self.databaseinfra.PREFERTLS:
                tls = True
                tlsCAFile = Configuration.get_by_name('root_cert_file')
            else:
                tls = False
                tlsCAFile = None

            client = pymongo.MongoClient(
                connection_address,
                connectTimeoutMS=connection_timeout_in_miliseconds,
                serverSelectionTimeoutMS=server_selection_timeout_in_seconds,
                socketTimeoutMS=socket_timeout_in_miliseconds,
                tls=tls,
                tlsCAFile=tlsCAFile)
            if (not instance) or (instance and instance.instance_type !=
                                  instance.MONGODB_ARBITER):  # noqa
                if self.databaseinfra.user and self.databaseinfra.password:
                    LOG.debug('Authenticating databaseinfra %s',
                              self.databaseinfra)
                    client.admin.authenticate(self.databaseinfra.user,
                                              self.databaseinfra.password)
            return client
        except TypeError:
            raise AuthenticationError(message='Invalid address: ' %
                                      connection_address)
示例#9
0
    def __mysql_client__(self, instance, database='mysql'):
        connection_address, connection_port = self.__get_admin_connection(instance)
        try:
            LOG.debug('Connecting to mysql databaseinfra %s', self.databaseinfra)
            # mysql uses timeout in seconds
            connection_timeout_in_seconds = int(Configuration.get_by_name('mysql_connect_timeout') or 5)

            client = mysqldb.connect(host=connection_address, port=int(connection_port),
                                     user=self.databaseinfra.user, passwd=self.databaseinfra.password,
                                     db=database, connect_timeout=connection_timeout_in_seconds)
            LOG.debug('Successfully connected to mysql databaseinfra %s' % (self.databaseinfra))
            return client
        except Exception, e:
            raise e
示例#10
0
def database_usage(context={}):
    LOG.info("Notifying Database usage with context %s" % context)
    subject = _("[DBAAS] Database is almost full")
    template = "database_notification"
    addr_from = Configuration.get_by_name("email_addr_from")
    team = context.get("team")
    if team and team.email:
        addr_to = [
            team.email,
            Configuration.get_by_name("new_user_notify_email")
        ]
    else:
        addr_to = Configuration.get_by_name("new_user_notify_email")

    context['domain'] = get_domain()

    send_mail_template(subject,
                       template,
                       addr_from,
                       addr_to,
                       fail_silently=False,
                       attachments=None,
                       context=context)
def notify_new_user_creation(user=None):
    subject=_("[DBAAS] a new user has just been created: %s" % user.username)
    template="new_user_notification"
    addr_from=Configuration.get_by_name("email_addr_from")
    addr_to=Configuration.get_by_name_as_list("new_user_notify_email")
    context={}
    context['user'] = user
    domain = get_domain()
    context['url'] = domain + reverse('admin:account_team_changelist')
    LOG.debug("user: %s | addr_from: %s | addr_to: %s" % (user, addr_from, addr_to))
    if user and addr_from and addr_to:
        send_mail_template(subject, template, addr_from, addr_to, fail_silently=False, attachments=None, context=context)
    else:
        LOG.warning("could not send email for new user creation")
def databaseinfra_ending(plan,environment,used,capacity,percent):
    LOG.info("Notifying DatabaseInfra ending")
    subject=_("[DBAAS] DatabaseInfra is almost full")
    template="infra_notification"
    addr_from=Configuration.get_by_name("email_addr_from")
    addr_to=Configuration.get_by_name_as_list("new_user_notify_email")
    context={}
    context['domain'] = get_domain()
    context['plan'] = plan
    context['environment'] = environment
    context['used'] = used
    context['capacity'] = capacity
    context['percent'] = percent
    send_mail_template(subject, template, addr_from, addr_to, fail_silently=False, attachments=None, context=context)
    
def database_analyzing(context={}):
    LOG.info("Notifying Database alayzing with context %s" % context)
    subject = _("[DBAAS] Database overestimated")
    template = "analyzing_notification"
    addr_from = Configuration.get_by_name("email_addr_from")
    send_email = Configuration.get_by_name("send_analysis_email")
    team = context.get("team")
    if team and team.email and send_email:
        addr_to = [
            team.email,
            Configuration.get_by_name("new_user_notify_email")
        ]
    else:
        addr_to = Configuration.get_by_name("new_user_notify_email")

    context['domain'] = get_domain()

    send_mail_template(subject,
                       template,
                       addr_from,
                       addr_to,
                       fail_silently=False,
                       attachments=None,
                       context=context)
def databaseinfra_ending(context={}):
    LOG.info("Notifying DatabaseInfra ending with context %s" % context)
    subject = _("[DBAAS] DatabaseInfra is almost full")
    template = "infra_notification"
    addr_from = Configuration.get_by_name("email_addr_from")
    addr_to = Configuration.get_by_name_as_list("new_user_notify_email")

    context['domain'] = get_domain()

    send_mail_template(subject,
                       template,
                       addr_from,
                       addr_to,
                       fail_silently=False,
                       attachments=None,
                       context=context)
示例#15
0
def notify_new_user_creation(user=None):
    subject = _("[DBAAS] a new user has just been created: %s" % user.username)
    template = "new_user_notification"
    addr_from = Configuration.get_by_name("email_addr_from")
    addr_to = Configuration.get_by_name_as_list("new_user_notify_email")
    context = {}
    context['user'] = user
    domain = get_domain()
    context['url'] = domain + reverse('admin:account_team_changelist')
    LOG.debug("user: %s | addr_from: %s | addr_to: %s" %
              (user, addr_from, addr_to))
    if user and addr_from and addr_to:
        send_mail_template(
            subject, template, addr_from, addr_to,
            fail_silently=False, attachments=None, context=context
        )
    else:
        LOG.warning("could not send email for new user creation")
def notify_team_change_for(user=None):
    LOG.info("Notifying team change for user %s" % user)
    subject=_("[DBAAS] your team has been updated!")
    template="team_change_notification"
    addr_from=Configuration.get_by_name("email_addr_from")
    if user.email:
        #addr_to=Configuration.get_by_name_as_list("new_user_notify_email") + [user.email]
        addr_to=[user.email]
        context={}
        context['user'] = user
        domain = get_domain()
        context['url'] = domain
        context['teams'] = [team.name for team in user.team_set.all()]
        if user and addr_from and addr_to:
            send_mail_template(subject, template, addr_from, addr_to, fail_silently=False, attachments=None, context=context)
        else:
            LOG.warning("could not send email for team change")
    else:
        LOG.warning("user %s has no email set and therefore cannot be notified!")
示例#17
0
def notify_team_change_for(user=None):
    LOG.info("Notifying team change for user %s" % user)
    subject = _("[DBAAS] your team has been updated!")
    template = "team_change_notification"
    addr_from = Configuration.get_by_name("email_addr_from")
    if user.email:
        addr_to = [user.email]
        context = {}
        context['user'] = user
        domain = get_domain()
        context['url'] = domain
        context['teams'] = [team.name for team in user.team_set.all()]
        if user and addr_from and addr_to:
            send_mail_template(
                subject, template, addr_from, addr_to,
                fail_silently=False, attachments=None, context=context
            )
        else:
            LOG.warning("could not send email for team change")
    else:
        LOG.warning(
            "user %s has no email set and therefore cannot be notified!")
示例#18
0
    def do(self, workflow_dict):
        try:

            if 'databaseinfra' not in workflow_dict \
                    or 'clone' not in workflow_dict:
                return False

            args = get_clone_args(workflow_dict['clone'],
                                  workflow_dict['database'])
            script_name = factory_for(
                workflow_dict['clone'].databaseinfra).clone()

            python_bin = Configuration.get_by_name('python_venv_bin')

            return_code, output = call_script(
                script_name,
                working_dir=settings.SCRIPTS_PATH,
                args=args,
                split_lines=False,
                python_bin=python_bin)

            LOG.info("Script Output: {}".format(output))
            LOG.info("Return code: {}".format(return_code))

            if return_code != 0:
                workflow_dict['exceptions']['traceback'].append(output)
                return False

            return True
        except Exception:
            traceback = full_stack()

            workflow_dict['exceptions']['error_codes'].append(DBAAS_0017)
            workflow_dict['exceptions']['traceback'].append(traceback)

            return False
    def do(self, workflow_dict):
        try:
            region_migration_dir = Configuration.get_by_name('region_migration_dir')
            if not region_migration_dir:
                region_migration_dir = '/tmp'

            workflow_dict['region_migration_dir_infra_name'] = "{}/{}".format(region_migration_dir, workflow_dict['databaseinfra'].name)

            for index, source_instance in enumerate(workflow_dict['source_instances']):

                source_host = source_instance.hostname
                source_cs_host_attr = CS_HostAttr.objects.get(host=source_host)

                hostname = source_host.hostname.split('.')[0]
                localpath = "{}/{}".format(workflow_dict['region_migration_dir_infra_name'], hostname)
                os.makedirs(localpath)

                LOG.info('Get source host files to {}'.format(localpath))

                if not scp_get_file(server=source_host.address,
                                    username=source_cs_host_attr.vm_user,
                                    password=source_cs_host_attr.vm_password,
                                    localpath="{}/my.cnf".format(localpath),
                                    remotepath="/etc/my.cnf"):
                    raise Exception("FTP Error")

                if not scp_get_file(server=source_host.address,
                                    username=source_cs_host_attr.vm_user,
                                    password=source_cs_host_attr.vm_password,
                                    localpath="{}/mysql_statsd.conf".format(localpath),
                                    remotepath="/etc/mysql_statsd/mysql_statsd.conf"):
                    raise Exception("FTP Error")

                if not scp_get_file(server=source_host.address,
                                    username=source_cs_host_attr.vm_user,
                                    password=source_cs_host_attr.vm_password,
                                    localpath="{}/td-agent.conf".format(localpath),
                                    remotepath="/etc/td-agent/td-agent.conf"):
                    raise Exception("FTP Error")

                target_host = source_host.future_host
                LOG.info(target_host)
                target_cs_host_attr = CS_HostAttr.objects.get(host=target_host)

                if not scp_put_file(server=target_host.address,
                                    username=target_cs_host_attr.vm_user,
                                    password=target_cs_host_attr.vm_password,
                                    localpath="{}/my.cnf".format(localpath),
                                    remotepath="/etc/my.cnf"):
                    raise Exception("FTP Error")

                if not scp_put_file(server=target_host.address,
                                    username=target_cs_host_attr.vm_user,
                                    password=target_cs_host_attr.vm_password,
                                    localpath="{}/mysql_statsd.conf".format(localpath),
                                    remotepath="/etc/mysql_statsd/mysql_statsd.conf"):
                    raise Exception("FTP Error")

                if not scp_put_file(server=target_host.address,
                                    username=target_cs_host_attr.vm_user,
                                    password=target_cs_host_attr.vm_password,
                                    localpath="{}/td-agent.conf".format(localpath),
                                    remotepath="/etc/td-agent/td-agent.conf"):
                    raise Exception("FTP Error")

                script = test_bash_script_error()
                script += build_server_id_conf_script()

                context_dict = {
                    'SERVERID': index + 3,
                }

                script = build_context_script(context_dict, script)

                output = {}
                LOG.info(script)
                return_code = exec_remote_command(server=target_host.address,
                                                  username=target_cs_host_attr.vm_user,
                                                  password=target_cs_host_attr.vm_password,
                                                  command=script,
                                                  output=output)
                LOG.info(output)
                if return_code != 0:
                    raise Exception(str(output))

            shutil.rmtree(workflow_dict['region_migration_dir_infra_name'])

            return True
        except Exception:
            traceback = full_stack()

            workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
            workflow_dict['exceptions']['traceback'].append(traceback)

            return False
def email_to(team):
    if team and team.email:
        return [team.email, Configuration.get_by_name("new_user_notify_email")]
    return Configuration.get_by_name("new_user_notify_email")
def email_from():
    return Configuration.get_by_name("email_addr_from")
def external_links(request):
    iaas_status = Configuration.get_by_name('iaas_status')
    iaas_quota = Configuration.get_by_name('iaas_quota')
    return {'iaas_status': iaas_status, 'iaas_quota': iaas_quota }
示例#23
0
    def database_dex_analyze_view(self, request, database_id):
        import os
        import string

        def generate_random_string(length, stringset=string.ascii_letters + string.digits):
            return ''.join([stringset[i % len(stringset)]
                            for i in [ord(x) for x in os.urandom(length)]])

        database = Database.objects.get(id=database_id)

        if database.status != Database.ALIVE or not database.database_status.is_alive:
            self.message_user(
                request, "Database is not alive cannot be analyzed", level=messages.ERROR)
            url = reverse('admin:logical_database_changelist')
            return HttpResponseRedirect(url)

        if database.is_being_used_elsewhere():
            self.message_user(
                request, "Database cannot be analyzed because it is in use by another task.", level=messages.ERROR)
            url = reverse('admin:logical_database_changelist')
            return HttpResponseRedirect(url)

        parsed_logs = ''

        arq_path = Configuration.get_by_name(
            'database_clone_dir') + '/' + database.name + generate_random_string(20) + '.txt'

        arq = open(arq_path, 'w')
        arq.write(parsed_logs)
        arq.close()

        uri = 'mongodb://{}:{}@{}:{}/admin'.format(database.databaseinfra.user,
                                                   database.databaseinfra.password,
                                                   database.databaseinfra.instances.all()[
                                                       0].address,
                                                   database.databaseinfra.instances.all()[0].port)

        old_stdout = sys.stdout
        sys.stdout = mystdout = StringIO()

        md = dex.Dex(db_uri=uri, verbose=False, namespaces_list=[],
                     slowms=0, check_indexes=True, timeout=0)

        md.analyze_logfile(arq_path)

        sys.stdout = old_stdout

        dexanalyzer = loads(
            mystdout.getvalue().replace("\"", "&&").replace("'", "\"").replace("&&", "'"))

        os.remove(arq_path)

        import ast
        final_mask = """<div>"""

        for result in dexanalyzer['results']:

            final_mask += "<h3> Collection: " + result['namespace'] + "</h3>"
            final_mask += \
                """<li> Query: """ +\
                str(ast.literal_eval(result['queryMask'])['$query']) +\
                """</li>""" +\
                """<li> Index: """ +\
                result['recommendation']['index'] +\
                """</li>""" +\
                """<li> Command: """ +\
                result['recommendation']['shellCommand'] +\
                """</li>"""

            final_mask += """<br>"""

        final_mask += """</ul> </div>"""

        return render_to_response("logical/database/dex_analyze.html", locals(), context_instance=RequestContext(request))
示例#24
0
def get_clone_args(origin_database, dest_database):

    # origin
    origin_instance = origin_database.databaseinfra.instances.all()[0]

    db_orig = origin_database.name
    pass_orig = origin_database.databaseinfra.password
    host_orig = origin_instance.address
    port_orig = origin_instance.port

    # destination
    dest_instance = dest_database.databaseinfra.instances.all()[0]

    db_dest = dest_database.name
    pass_dest = dest_database.databaseinfra.password
    host_dest = dest_instance.address
    port_dest = dest_instance.port

    path_of_dump = Configuration.get_by_name('database_clone_dir')

    if origin_database.databaseinfra.engine.engine_type.name != "redis":
        user_orig = origin_database.databaseinfra.user
        user_dest = dest_database.databaseinfra.user

        args = [
            db_orig, user_orig, pass_orig, host_orig,
            str(int(port_orig)), db_dest, user_dest, pass_dest, host_dest,
            str(int(port_dest)), path_of_dump
        ]
    else:

        sys_credentials = get_credentials_for(origin_database.environment,
                                              models.CredentialType.VM)
        sys_user_orig = sys_user_dest = sys_credentials.user
        sys_pass_orig = sys_pass_dest = sys_credentials.password

        if path_of_dump.endswith('/'):
            path_of_dump += 'dump.rdb'
        else:
            path_of_dump += '/dump.rdb'

        args = [
            '--remove_dump', "60", pass_orig, host_orig,
            str(int(port_orig)), sys_user_orig, sys_pass_orig,
            '/data/data/dump.rdb', pass_dest, host_dest,
            str(int(port_dest)), sys_user_dest, sys_pass_dest,
            '/data/data/dump.rdb', path_of_dump
        ]

        if dest_database.plan.is_ha:
            cluster_info = []

            for instance in dest_database.databaseinfra.instances.filter(
                    instance_type=Instance.REDIS):
                cluster_info.append({
                    "sys_user": sys_user_dest,
                    "sys_pass": sys_pass_dest,
                    "remote_path": "/data/data/dump.rdb",
                    "host": instance.address,
                    "redis_pass": pass_dest,
                    "redis_port": str(int(port_dest))
                })

            args = [
                '--remove_dump', "60", pass_orig, host_orig,
                str(int(port_orig)), sys_user_orig, sys_pass_orig,
                '/data/data/dump.rdb', pass_dest, host_dest,
                str(int(port_dest)), sys_user_dest, sys_pass_dest,
                '/data/data/dump.rdb', path_of_dump, '--cluster_info',
                str(cluster_info)
            ]

    return args
def email_to(team):
    if team and team.email:
        return [team.email, Configuration.get_by_name("new_user_notify_email")]
    return Configuration.get_by_name("new_user_notify_email")
def email_from():
    return Configuration.get_by_name("email_addr_from")
示例#27
0
def get_clone_args(origin_database, dest_database):

    # origin
    origin_instance = origin_database.databaseinfra.instances.all()[0]

    db_orig = origin_database.name
    pass_orig = origin_database.databaseinfra.password
    host_orig = origin_instance.address
    port_orig = origin_instance.port

    # destination
    dest_instance = dest_database.databaseinfra.instances.all()[0]

    db_dest = dest_database.name
    pass_dest = dest_database.databaseinfra.password
    host_dest = dest_instance.address
    port_dest = dest_instance.port

    path_of_dump = Configuration.get_by_name('database_clone_dir')

    if origin_database.databaseinfra.engine.engine_type.name != "redis":
        user_orig = origin_database.databaseinfra.user
        user_dest = dest_database.databaseinfra.user

        args = [db_orig, user_orig, pass_orig, host_orig, str(int(port_orig)),
                db_dest, user_dest, pass_dest, host_dest, str(int(port_dest)),
                path_of_dump
                ]
    else:

        sys_credentials = get_credentials_for(
            origin_database.environment, models.CredentialType.VM)
        sys_user_orig = sys_user_dest = sys_credentials.user
        sys_pass_orig = sys_pass_dest = sys_credentials.password

        if path_of_dump.endswith('/'):
            path_of_dump += 'dump.rdb'
        else:
            path_of_dump += '/dump.rdb'

        args = ['--remove_dump', "60", pass_orig, host_orig,
                str(int(port_orig)), sys_user_orig, sys_pass_orig,
                '/data/data/dump.rdb', pass_dest, host_dest,
                str(int(port_dest)), sys_user_dest, sys_pass_dest,
                '/data/data/dump.rdb', path_of_dump
                ]

        if dest_database.plan.is_ha:
            cluster_info = []

            for instance in dest_database.databaseinfra.instances.filter(instance_type=Instance.REDIS):
                cluster_info.append({"sys_user": sys_user_dest, "sys_pass": sys_pass_dest,
                                     "remote_path": "/data/data/dump.rdb", "host": instance.address,
                                     "redis_pass": pass_dest, "redis_port": str(int(port_dest))})

            args = ['--remove_dump', "60", pass_orig, host_orig,
                    str(int(port_orig)), sys_user_orig, sys_pass_orig,
                    '/data/data/dump.rdb', pass_dest, host_dest,
                    str(int(port_dest)), sys_user_dest, sys_pass_dest,
                    '/data/data/dump.rdb', path_of_dump, '--cluster_info', str(
                        cluster_info)
                    ]

    return args
示例#28
0
    def do(self, workflow_dict):
        try:
            flipper_fox_migration_dir = Configuration.get_by_name(
                'flipper_fox_migration_dir')
            if not flipper_fox_migration_dir:
                flipper_fox_migration_dir = '/tmp'

            workflow_dict['flipper_fox_migration_dir_infra_name'] = "{}/{}".format(
                flipper_fox_migration_dir, workflow_dict['databaseinfra'].name)

            for index, source_instance in enumerate(workflow_dict['source_instances']):

                source_host = source_instance.hostname
                source_cs_host_attr = CS_HostAttr.objects.get(host=source_host)

                hostname = source_host.hostname.split('.')[0]
                localpath = "{}/{}".format(
                    workflow_dict['flipper_fox_migration_dir_infra_name'], hostname)
                os.makedirs(localpath)

                LOG.info('Get source host files to {}'.format(localpath))

                if not scp_get_file(server=source_host.address,
                                    username=source_cs_host_attr.vm_user,
                                    password=source_cs_host_attr.vm_password,
                                    localpath="{}/my.cnf".format(localpath),
                                    remotepath="/etc/my.cnf"):
                    raise Exception("FTP Error")

                target_host = source_host.future_host
                LOG.info(target_host)
                target_cs_host_attr = CS_HostAttr.objects.get(host=target_host)

                if not scp_put_file(server=target_host.address,
                                    username=target_cs_host_attr.vm_user,
                                    password=target_cs_host_attr.vm_password,
                                    localpath="{}/my.cnf".format(localpath),
                                    remotepath="/etc/my.cnf"):
                    raise Exception("FTP Error")

                script = test_bash_script_error()
                script += build_server_id_conf_script()

                context_dict = {
                    'SERVERID': index + 5,
                }

                script = build_context_script(context_dict, script)

                output = {}
                LOG.info(script)
                return_code = exec_remote_command(server=target_host.address,
                                                  username=target_cs_host_attr.vm_user,
                                                  password=target_cs_host_attr.vm_password,
                                                  command=script,
                                                  output=output)
                LOG.info(output)
                if return_code != 0:
                    raise Exception(str(output))

            shutil.rmtree(workflow_dict['flipper_fox_migration_dir_infra_name'])

            return True
        except Exception:
            traceback = full_stack()

            workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
            workflow_dict['exceptions']['traceback'].append(traceback)

            return False
示例#29
0
    def do(self, workflow_dict):
        try:

            initial_script = '#!/bin/bash\n\ndie_if_error()\n{\n    local err=$?\n    if [ "$err" != "0" ]; then\n        echo "$*"\n        exit $err\n    fi\n}'

            region_migration_dir = Configuration.get_by_name(
                'region_migration_dir')
            if not region_migration_dir:
                region_migration_dir = '/tmp'

            workflow_dict['region_migration_dir_infra_name'] = "{}/{}".format(
                region_migration_dir, workflow_dict['databaseinfra'].name)

            for index, source_instance in enumerate(
                    workflow_dict['source_instances']):

                source_host = source_instance.hostname
                source_cs_host_attr = CS_HostAttr.objects.get(host=source_host)

                hostname = source_host.hostname.split('.')[0]
                localpath = "{}/{}".format(
                    workflow_dict['region_migration_dir_infra_name'], hostname)
                os.makedirs(localpath)

                LOG.info('Get source host files to {}'.format(localpath))

                if not scp_get_file(
                        server=source_host.address,
                        username=source_cs_host_attr.vm_user,
                        password=source_cs_host_attr.vm_password,
                        localpath="{}/mongodb.key".format(localpath),
                        remotepath="/data/mongodb.key"):
                    raise Exception("FTP Error")

                if not scp_get_file(
                        server=source_host.address,
                        username=source_cs_host_attr.vm_user,
                        password=source_cs_host_attr.vm_password,
                        localpath="{}/mongodb.conf".format(localpath),
                        remotepath="/data/mongodb.conf"):
                    raise Exception("FTP Error")

                if not scp_get_file(
                        server=source_host.address,
                        username=source_cs_host_attr.vm_user,
                        password=source_cs_host_attr.vm_password,
                        localpath="{}/td-agent.conf".format(localpath),
                        remotepath="/etc/td-agent/td-agent.conf"):
                    raise Exception("FTP Error")

                target_host = source_host.future_host
                LOG.info(target_host)
                target_cs_host_attr = CS_HostAttr.objects.get(host=target_host)

                if not scp_put_file(
                        server=target_host.address,
                        username=target_cs_host_attr.vm_user,
                        password=target_cs_host_attr.vm_password,
                        localpath="{}/mongodb.key".format(localpath),
                        remotepath="/data/mongodb.key"):
                    raise Exception("FTP Error")

                if not scp_put_file(
                        server=target_host.address,
                        username=target_cs_host_attr.vm_user,
                        password=target_cs_host_attr.vm_password,
                        localpath="{}/mongodb.conf".format(localpath),
                        remotepath="/data/mongodb.conf"):
                    raise Exception("FTP Error")

                if not scp_put_file(
                        server=target_host.address,
                        username=target_cs_host_attr.vm_user,
                        password=target_cs_host_attr.vm_password,
                        localpath="{}/td-agent.conf".format(localpath),
                        remotepath="/etc/td-agent/td-agent.conf"):
                    raise Exception("FTP Error")

                script = initial_script
                script += '\nmkdir /data/data'
                script += '\ndie_if_error "Error creating data dir"'

                script += '\nchown mongodb:mongodb /data'
                script += '\ndie_if_error "Error changing datadir permission"'
                script += '\nchown -R mongodb:mongodb /data/*'
                script += '\ndie_if_error "Error changing datadir permission"'

                script += '\nchmod 600 /data/mongodb.key'
                script += '\ndie_if_error "Error changing mongodb key file permission"'

                script += '\necho ""; echo $(date "+%Y-%m-%d %T") "- Starting the database"'
                script += '\n/etc/init.d/mongodb start > /dev/null'
                script += '\ndie_if_error "Error starting database"'
                script = build_context_script({}, script)
                output = {}
                LOG.info(script)
                return_code = exec_remote_command(
                    server=target_host.address,
                    username=target_cs_host_attr.vm_user,
                    password=target_cs_host_attr.vm_password,
                    command=script,
                    output=output)
                LOG.info(output)
                if return_code != 0:
                    raise Exception, str(output)

            shutil.rmtree(workflow_dict['region_migration_dir_infra_name'])

            return True
        except Exception:
            traceback = full_stack()

            workflow_dict['exceptions']['error_codes'].append(DBAAS_0019)
            workflow_dict['exceptions']['traceback'].append(traceback)

            return False
示例#30
0
        try:
            database_logs = json.loads(database_logs)
        except Exception, e:
            pass
        else:
            for database_log in database_logs:
                try:
                    items = database_log['items']
                except KeyError, e:
                    pass
                else:
                    parsed_logs = "\n".join(
                        (item['message'] for item in items))

        arq_path = Configuration.get_by_name(
            'database_clone_dir'
        ) + '/' + database.name + generate_random_string(20) + '.txt'

        arq = open(arq_path, 'w')
        arq.write(parsed_logs)
        arq.close()

        uri = 'mongodb://{}:{}@{}:{}/admin'.format(
            database.databaseinfra.user, database.databaseinfra.password,
            database.databaseinfra.instances.all()[0].address,
            database.databaseinfra.instances.all()[0].port)

        old_stdout = sys.stdout
        sys.stdout = mystdout = StringIO()

        md = dex.Dex(db_uri=uri,
示例#31
0
        parsed_logs = ''
        database_logs = provider.get_logs_for_group(environment, lognit_environment, uri)
        try:
            database_logs = json.loads(database_logs)
        except Exception, e:
            pass
        else:
            for database_log in database_logs:
                try:
                    items = database_log['items']
                except KeyError, e:
                    pass
                else:
                    parsed_logs = "\n".join((item['message'] for item in items))

        arq_path = Configuration.get_by_name('database_clone_dir') + '/' + database.name + generate_random_string(20) + '.txt'

        arq = open(arq_path,'w')
        arq.write(parsed_logs)
        arq.close()

        uri = 'mongodb://{}:{}@{}:{}/admin'.format(database.databaseinfra.user,
                                                   database.databaseinfra.password,
                                                   database.databaseinfra.instances.all()[
                                                       0].address,
                                                   database.databaseinfra.instances.all()[0].port)

        old_stdout = sys.stdout
        sys.stdout = mystdout = StringIO()

        md = dex.Dex(db_uri=uri, verbose=False, namespaces_list=[],
    def do(self, workflow_dict):
        try:
            
            initial_script = '#!/bin/bash\n\ndie_if_error()\n{\n    local err=$?\n    if [ "$err" != "0" ]; then\n        echo "$*"\n        exit $err\n    fi\n}'
            
            region_migration_dir = Configuration.get_by_name('region_migration_dir')
            if not region_migration_dir:
                region_migration_dir = '/tmp'
            
            workflow_dict['region_migration_dir_infra_name'] = "{}/{}".format(region_migration_dir, workflow_dict['databaseinfra'].name)
            
            for index, source_instance in enumerate(workflow_dict['source_instances']):
                
                source_host = source_instance.hostname
                source_cs_host_attr = CS_HostAttr.objects.get(host = source_host)
                
                hostname = source_host.hostname.split('.')[0]
                localpath = "{}/{}".format(workflow_dict['region_migration_dir_infra_name'], hostname)
                os.makedirs(localpath)
                
                LOG.info('Get source host files to {}'.format(localpath))
                
                if not scp_get_file(server = source_host.address,
                                    username = source_cs_host_attr.vm_user,
                                    password = source_cs_host_attr.vm_password,
                                    localpath = "{}/mongodb.key".format(localpath),
                                    remotepath = "/data/mongodb.key"):
                    raise Exception("FTP Error")

                if not scp_get_file(server = source_host.address,
                                    username = source_cs_host_attr.vm_user,
                                    password = source_cs_host_attr.vm_password,
                                    localpath = "{}/mongodb.conf".format(localpath),
                                    remotepath = "/data/mongodb.conf"):
                    raise Exception("FTP Error")

                if not scp_get_file(server = source_host.address,
                                    username = source_cs_host_attr.vm_user,
                                    password = source_cs_host_attr.vm_password,
                                    localpath = "{}/td-agent.conf".format(localpath),
                                    remotepath = "/etc/td-agent/td-agent.conf"):
                    raise Exception("FTP Error")
                
                target_host = source_host.future_host
                LOG.info(target_host)
                target_cs_host_attr = CS_HostAttr.objects.get(host = target_host)

                if not scp_put_file(server = target_host.address,
                                    username = target_cs_host_attr.vm_user,
                                    password = target_cs_host_attr.vm_password,
                                    localpath = "{}/mongodb.key".format(localpath),
                                    remotepath = "/data/mongodb.key"):
                    raise Exception("FTP Error")

                if not scp_put_file(server = target_host.address,
                                    username = target_cs_host_attr.vm_user,
                                    password = target_cs_host_attr.vm_password,
                                    localpath = "{}/mongodb.conf".format(localpath),
                                    remotepath = "/data/mongodb.conf"):
                    raise Exception("FTP Error")

                if not scp_put_file(server = target_host.address,
                                    username = target_cs_host_attr.vm_user,
                                    password = target_cs_host_attr.vm_password,
                                    localpath = "{}/td-agent.conf".format(localpath),
                                    remotepath = "/etc/td-agent/td-agent.conf"):
                    raise Exception("FTP Error")
                
                script = initial_script
                script += '\nmkdir /data/data'
                script += '\ndie_if_error "Error creating data dir"'
                
                script += '\nchown mongodb:mongodb /data'
                script += '\ndie_if_error "Error changing datadir permission"'
                script += '\nchown -R mongodb:mongodb /data/*'
                script += '\ndie_if_error "Error changing datadir permission"'

                script += '\nchmod 600 /data/mongodb.key'
                script += '\ndie_if_error "Error changing mongodb key file permission"'

                script += '\necho ""; echo $(date "+%Y-%m-%d %T") "- Starting the database"'
                script += '\n/etc/init.d/mongodb start > /dev/null'
                script += '\ndie_if_error "Error starting database"'
                script = build_context_script({}, script)
                output = {}
                LOG.info(script)
                return_code = exec_remote_command(server=target_host.address,
                                                  username=target_cs_host_attr.vm_user,
                                                  password=target_cs_host_attr.vm_password,
                                                  command=script,
                                                  output=output)
                LOG.info(output)
                if return_code != 0:
                    raise Exception, str(output)

            shutil.rmtree(workflow_dict['region_migration_dir_infra_name'])

            return True
        except Exception:
            traceback = full_stack()

            workflow_dict['exceptions']['error_codes'].append(DBAAS_0019)
            workflow_dict['exceptions']['traceback'].append(traceback)

            return False
示例#33
0
 def cluster_command(self):
     return Configuration.get_by_name('redis_trib_path')
示例#34
0
    def do(self, workflow_dict):
        try:
            region_migration_dir = Configuration.get_by_name('region_migration_dir')
            if not region_migration_dir:
                region_migration_dir = '/tmp'

            workflow_dict['region_migration_dir_infra_name'] = "{}/{}".format(region_migration_dir, workflow_dict['databaseinfra'].name)

            for index, source_instance in enumerate(workflow_dict['source_instances']):

                source_host = source_instance.hostname
                source_cs_host_attr = CS_HostAttr.objects.get(host=source_host)

                hostname = source_host.hostname.split('.')[0]
                localpath = "{}/{}".format(workflow_dict['region_migration_dir_infra_name'], hostname)
                os.makedirs(localpath)

                LOG.info('Get source host files to {}'.format(localpath))

                if not scp_get_file(server=source_host.address,
                                    username=source_cs_host_attr.vm_user,
                                    password=source_cs_host_attr.vm_password,
                                    localpath="{}/mongodb.key".format(localpath),
                                    remotepath="/data/mongodb.key"):
                    raise Exception("FTP Error")

                if not scp_get_file(server=source_host.address,
                                    username=source_cs_host_attr.vm_user,
                                    password=source_cs_host_attr.vm_password,
                                    localpath="{}/mongodb.conf".format(localpath),
                                    remotepath="/data/mongodb.conf"):
                    raise Exception("FTP Error")

                if not scp_get_file(server=source_host.address,
                                    username=source_cs_host_attr.vm_user,
                                    password=source_cs_host_attr.vm_password,
                                    localpath="{}/td-agent.conf".format(localpath),
                                    remotepath="/etc/td-agent/td-agent.conf"):
                    raise Exception("FTP Error")

                target_host = source_host.future_host
                LOG.info(target_host)
                target_cs_host_attr = CS_HostAttr.objects.get(host=target_host)

                target_instance = source_instance.future_instance
                if target_instance.instance_type == target_instance.MONGODB_ARBITER:
                    LOG.info("Cheking host ssh...")
                    host_ready = check_ssh(server=target_host.address,
                                           username=target_cs_host_attr.vm_user,
                                           password=target_cs_host_attr.vm_password,
                                           wait=5, interval=10)

                    if not host_ready:
                        raise Exception(str("Host %s is not ready..." % target_host))

                if not scp_put_file(server=target_host.address,
                                    username=target_cs_host_attr.vm_user,
                                    password=target_cs_host_attr.vm_password,
                                    localpath="{}/mongodb.key".format(localpath),
                                    remotepath="/data/mongodb.key"):
                    raise Exception("FTP Error")

                if not scp_put_file(server=target_host.address,
                                    username=target_cs_host_attr.vm_user,
                                    password=target_cs_host_attr.vm_password,
                                    localpath="{}/mongodb.conf".format(localpath),
                                    remotepath="/data/mongodb.conf"):
                    raise Exception("FTP Error")

                if not scp_put_file(server=target_host.address,
                                    username=target_cs_host_attr.vm_user,
                                    password=target_cs_host_attr.vm_password,
                                    localpath="{}/td-agent.conf".format(localpath),
                                    remotepath="/etc/td-agent/td-agent.conf"):
                    raise Exception("FTP Error")

                script = test_bash_script_error()
                script += build_permission_script()
                script += build_start_database_script()
                script = build_context_script({}, script)

                output = {}
                LOG.info(script)
                return_code = exec_remote_command(server=target_host.address,
                                                  username=target_cs_host_attr.vm_user,
                                                  password=target_cs_host_attr.vm_password,
                                                  command=script,
                                                  output=output)
                LOG.info(output)
                if return_code != 0:
                    raise Exception(str(output))

            shutil.rmtree(workflow_dict['region_migration_dir_infra_name'])

            return True
        except Exception:
            traceback = full_stack()

            workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
            workflow_dict['exceptions']['traceback'].append(traceback)

            return False
示例#35
0
 def cluster_command(self):
     if self.current_redis == self.REDIS4:
         return Configuration.get_by_name('redis_trib_path')
     elif self.current_redis == self.REDIS5:
         return '/usr/local/redis/src/redis-cli'
 def cluster_command(self):
     return Configuration.get_by_name('redis_trib_path')
示例#37
0
    def database_dex_analyze_view(self, request, database_id):
        import json
        import random
        from dbaas_laas.provider import LaaSProvider
        from util import get_credentials_for
        from util.laas import get_group_name
        from dbaas_credentials.models import CredentialType
        import os
        import string
        from datetime import datetime, timedelta

        def generate_random_string(length,
                                   stringset=string.ascii_letters +
                                   string.digits):
            return ''.join([
                stringset[i % len(stringset)]
                for i in [ord(x) for x in os.urandom(length)]
            ])

        database = Database.objects.get(id=database_id)

        if database.status != Database.ALIVE or not database.database_status.is_alive:
            self.message_user(request,
                              "Database is not alive cannot be analyzed",
                              level=messages.ERROR)
            url = reverse('admin:logical_database_changelist')
            return HttpResponseRedirect(url)

        if database.is_beeing_used_elsewhere():
            self.message_user(
                request,
                "Database cannot be analyzed because it is in use by another task.",
                level=messages.ERROR)
            url = reverse('admin:logical_database_changelist')
            return HttpResponseRedirect(url)

        credential = get_credentials_for(environment=database.environment,
                                         credential_type=CredentialType.LAAS)

        db_name = database.name
        environment = database.environment
        endpoint = credential.endpoint
        username = credential.user
        password = credential.password
        lognit_environment = credential.get_parameter_by_name(
            'lognit_environment')

        provider = LaaSProvider()

        group_name = get_group_name(database)
        today = (datetime.now()).strftime('%Y%m%d')
        yesterday = (datetime.now() - timedelta(days=1)).strftime('%Y%m%d')
        uri = "group:{} text:query date:[{} TO {}] time:[000000 TO 235959]".format(
            group_name, yesterday, today)

        parsed_logs = ''
        database_logs = provider.get_logs_for_group(environment,
                                                    lognit_environment, uri)
        try:
            database_logs = json.loads(database_logs)
        except Exception as e:
            pass
        else:
            for database_log in database_logs:
                try:
                    items = database_log['items']
                except KeyError as e:
                    pass
                else:
                    parsed_logs = "\n".join(
                        (item['message'] for item in items))

        arq_path = Configuration.get_by_name(
            'database_clone_dir'
        ) + '/' + database.name + generate_random_string(20) + '.txt'

        arq = open(arq_path, 'w')
        arq.write(parsed_logs)
        arq.close()

        uri = 'mongodb://{}:{}@{}:{}/admin'.format(
            database.databaseinfra.user, database.databaseinfra.password,
            database.databaseinfra.instances.all()[0].address,
            database.databaseinfra.instances.all()[0].port)

        old_stdout = sys.stdout
        sys.stdout = mystdout = StringIO()

        md = dex.Dex(db_uri=uri,
                     verbose=False,
                     namespaces_list=[],
                     slowms=0,
                     check_indexes=True,
                     timeout=0)

        md.analyze_logfile(arq_path)

        sys.stdout = old_stdout

        dexanalyzer = loads(mystdout.getvalue().replace("\"", "&&").replace(
            "'", "\"").replace("&&", "'"))

        os.remove(arq_path)

        import ast
        final_mask = """<div>"""

        print dexanalyzer['results']

        for result in dexanalyzer['results']:

            final_mask += "<h3> Collection: " + result['namespace'] + "</h3>"
            final_mask += \
                """<li> Query: """ +\
                str(ast.literal_eval(result['queryMask'])['$query']) +\
                """</li>""" +\
                """<li> Index: """ +\
                result['recommendation']['index'] +\
                """</li>""" +\
                """<li> Command: """ +\
                result['recommendation']['shellCommand'] +\
                """</li>"""

            final_mask += """<br>"""

        final_mask += """</ul> </div>"""

        return render_to_response("logical/database/dex_analyze.html",
                                  locals(),
                                  context_instance=RequestContext(request))