예제 #1
0
    def __init__(self,
                 webshell_url,
                 webshell_param=None,
                 encoded=True,
                 os=DEFAULT_OS):
        try:
            webshell_url = web_utils.validate_url(webshell_url)
            if '/' not in webshell_url or not webshell_url.endswith('.php'):
                raise web_utils.InvalidRequestError(
                    "NO FILE TO PHP FILE FOUND")
        except web_utils.InvalidRequestError:
            print(
                f"[-] - The provided url: '{webshell_url}'' does not seem to be a valid url with a php file endpoint"
            )
            sys.exit(0)

        if not web_utils.connection_check(webshell_url):
            sys.exit(1)

        self.webshell_url = webshell_url
        if not webshell_param:
            webshell_param = self.DEFAULT_WEBSHELL_PARAM
        self.webshell_param = webshell_param
        self.encoded = encoded
        if os.upper() != "WINDOWS" and os.upper() != "UNIX":
            print(
                "[-] - The provided OS is not UNIX or WINDOWS... Defaulting to UNIX"
            )
            self.os = self.DEFAULT_OS
        else:
            self.os = os
예제 #2
0
 def __init__(self,
              os='linux',
              mode='defensive',
              reset_iface=0,
              unlink_all_ifaces=0,
              iface=None,
              da_iface=None,
              command=None):
     self.iface = None
     self.da_iface = da_iface
     self.gateway_IP = None
     self.iface_MAC = None
     self.reset_iface = reset_iface
     self.os = os.upper()
     self.mode = mode.upper()
     self.unlink_all_ifaces = unlink_all_ifaces
     self.command = command
     if iface in netifaces.interfaces():
         self.iface = iface
         self.iface_MAC = getmac.get_mac_address(interface=self.iface)
     else:
         raise Exception
     for network in netifaces.gateways()[2]:
         if self.iface in network:
             self.gateway_IP = network[0]
             self.gateway_MAC = getmac.get_mac_address(ip=self.gateway_IP)
예제 #3
0
파일: views.py 프로젝트: angrek/dashboard
def stacked_column_total(request, os, zone, service, period, time_range):
    request.GET.get('os')
    request.GET.get('zone')
    request.GET.get('service')
    request.GET.get('period')
    request.GET.get('time_range')
    data = {}

    #url = 'http://www.cnn.com/' + str(zone)
    #return HttpResponseRedirect(url)
    #sys(exit)

    zone, zone_label1, zone_label2, zone_url1, zone_url2, zone_title = get_zone(zone)


    if os == 'aix':
        os_label = os.upper()
    elif os == "linux":
        os_label = os.capitalize()

    title = "Total number of Linux and AIX servers by " + period
    
    #doing this to cut down on the amount of calls to datetime
    today = datetime.date.today().strftime('%Y-%m-%d')

    #time_interval is the list of dates to gather data from, whether by day, week, month 
    time_interval = []
    #number_of_servers = []

    #interval is the offset for timedelta to get last sunday every week, every month or whatever
    interval = 0


    #Populate time_interval with the dates for the labels and queries
    time_interval.append(datetime.date.today().strftime('%Y-%m-%d'))

    if period == "day":
        day_offset = 1
    elif period == 'week':
        day_offset = int(datetime.date.today().weekday()) + 1
    else:
        day_offset = int(datetime.date.today().strftime('%d'))

    for x in range (1, (int(time_range))):
        end_date = (datetime.date.today() - datetime.timedelta(days = (day_offset + interval))).strftime('%Y-%m-%d')
        
        #####APPEND HERE#####
        time_interval.append(end_date)

        if period == 'week':
            interval = interval + 7
        elif period == 'day':
            interval = interval + 1
        elif period == 'month':
            if x == 0:
                #get the first day of the month, we're just adding today on the end of the graph here
                interval = interval + (int(datetime.date.today().strftime('%d')) )
            else:
            #this goes back and finds the first day of each of the last months in the time range and adjusts the year if it has to
            #the graph for days or weeks doesn't have to do this because it's a set 1 and 7 interval whereas days of the month vary
                my_year = int(datetime.date.today().strftime('%Y'))
                my_month = int(datetime.date.today().strftime('%m'))

                #We need to go to last year
                if (my_month - x) < 1:
                    my_year = my_year -1
                    my_month = my_month + 12
                next_month_back = calendar.monthrange(my_year, (my_month - x))[1]
                interval = interval + next_month_back
        else:
            #Not sure what to do here, 404? sys.exit?
            sys.exit()


    ################################################################
    #  Divider for gettin version list
    ################################################################

    #Here we'll go through each label date and use those to find which versions are on those specific dates
    version_list = []


    for my_date in time_interval:

        #FIXME Well crap.... I don't want this in here but I need that date to make the predicates....how...can I add it in after??
        if zone == 'all':
            predicates = [('active', True), ('decommissioned', False), ('date', my_date)]
        else:
            predicates = [('active', True), ('decommissioned', False), ('zone', zone), ('date', my_date)]

        q_list = [Q(x) for x in predicates]

        #if os == 'aix':
        #   temp_list = HistoricalAIXData.objects.filter(reduce(operator.and_, q_list)).values_list(service , flat=True).distinct()
        #elif os == 'linux':
        #    temp_list = HistoricalLinuxData.objects.filter(reduce(operator.and_, q_list)).values_list(service , flat=True).distinct()
        #else:
        #    sys.exit()

        temp_list1 = HistoricalAIXData.objects.filter(reduce(operator.and_, q_list)).values_list(service , flat=True).distinct()
        temp_list2 = HistoricalLinuxData.objects.filter(reduce(operator.and_, q_list)).values_list(service , flat=True).distinct()
        temp_list = list(chain(temp_list1, temp_list2))

            
        temp_list = list(set(temp_list)) #quick way to make sure you have all uniques
        version_list = version_list + temp_list  #add em up

    version_list = list(set(version_list))


    #################################################################
    # Divider for iterating over the versions
    ################################################################

    #Ok, this is a bit different, we're going to have to iterate over the date and push the number of servers into a list across dates
    version_counter = 0
    date_counter = 0
    my_array = [[], [], [], [], [], [], [], [], [], [], [], [], [], []]

    for version in version_list:
        for date in time_interval:
            #Using django Q objects to create a dynamic query here
            if zone == 'all':
                predicates = [('active', True), ('decommissioned', False), (service, version), ('date', date)]
            else:
                predicates = [('active', True), ('decommissioned', False), (service, version), ('zone', zone), ('date', date)]


            q_list = [Q(x) for x in predicates]

            #if os == 'aix':
            #    num = HistoricalAIXData.objects.filter(reduce(operator.and_, q_list)).count()
            #elif os == 'linux':
            #    num = HistoricalLinuxData.objects.filter(reduce(operator.and_, q_list)).count()

            num1 = HistoricalAIXData.objects.filter(reduce(operator.and_, q_list)).count()
            num2 = HistoricalLinuxData.objects.filter(reduce(operator.and_, q_list)).count()
            num = num1 + num2


            if date_counter == 0:
                my_array[version_counter] = [num]
            else:
                my_array[version_counter].append(num)
            date_counter += 1

        #FIXME Need a proper call rather than hardcoding it
        if service == 'zone':
            if version == 1:
                version = 'NonProduction'
            elif version == 2:
                version = 'Production'
            elif version == 3:
                version = 'Unsure'
        data[version] = my_array[version_counter]
        version_counter += 1
    time_interval.reverse()


    #remove empty sets
    my_array = [x for x in my_array if x]

    #reverse each list in the list of lists (of lists in lists....AHHH!)
    for p in my_array:
            p.reverse()
        
    name = "Test Name"
    y_axis_title = 'Number of Servers'
    percentage = 0
    return render(request, 'server/stacked_column.html', {'data': data, 'name': name, 'title': title, 'y_axis_title':y_axis_title, 'version_list':version_list, 'time_interval':time_interval, 'my_array':my_array, 'os':os, 'service':service, 'zone_label1':zone_label1, 'zone_label2':zone_label2, 'zone_url1':zone_url1, 'zone_url2':zone_url2})
예제 #4
0
def triage(os, build, component, username, password, job_list, format,
           ignore_list, restart_list, dispatcher_token, server_pool_id,
           add_pool_id):
    fmt = format
    if format == '' or format is None:
        fmt = 'csd'
    job_name = ''
    ignore_jobs = ''

    connection_error = [
        'ServerUnavailableException', 'unable to reach the host',
        'This test requires buckets',
        'ValueError: No JSON object could be decoded'
    ]

    if restart_list is not None and restart_list != '':
        execute_restart(restart_list, dispatcher_token, os, build, component,
                        server_pool_id, add_pool_id)
        exit(0)

    if job_list is not None and job_list != '':
        job_arr = job_list.split(",")
        jobs_list = ''
        for j in job_arr:
            jobs_list += ("'" + j.lower() + "'" + ',')
        job_name = ' and lower(name) in [' + jobs_list[:len(jobs_list) -
                                                       1] + '] '
    if ignore_list is not None and ignore_list != '':
        ignore_arr = ignore_list.split(",")
        ignore_jobs_list = ''
        for j in ignore_arr:
            ignore_jobs_list += ("'" + j.lower() + "'" + ',')

        ignore_jobs = ' and lower(name) not in [' + ignore_jobs_list[:len(
            ignore_jobs_list) - 1] + '] '

    cluster = Cluster('couchbase://172.23.99.54')
    authenticator = PasswordAuthenticator(username, password)
    cluster.authenticate(authenticator)
    cb = cluster.open_bucket('server')
    query = N1QLQuery("select * from server where os='" + os.upper() +
                      "' and `build`='" + build +
                      "' and result='UNSTABLE' and component='" +
                      component.upper() + "' " + job_name + ignore_jobs +
                      " order by name")
    # centos-query_covering_tokens centos-query_monitoring_moi centos-query_vset-ro-moi centos-query_clusterops_moi centos-query_tokens
    '''
        main data structure to store all jobs fails info. Format is:
        failers[
                {
                 'name': job name,
                 'ini_file': ini file name,
                 'fail_reasons': {
                                    'name': 'connection',
                                    'cases':[
                                             {
                                                name: case name,
                                                conf_file: config file name,
                                                stack_trace: stack trace,
                                                cmd: run command 
                                                dump_file: core panic downloaded file name
                                             }        
                                            ]
                                    },
                                    {
                                     'name': 'no_logs',
                                     'cases': []
                                    },
                                     'name': 'all_the_rest',
                                     'cases': []               
                 },
                 {
                 }
                ]
    '''
    failers = []

    for row in cb.n1ql_query(query):
        build_id = row['server']['build_id']
        job_name = row['server']['name']
        '''Constructing skeleton of main data structure'''
        job_dict = {}
        job_dict['name'] = job_name
        job_dict['fail_reasons'] = {}

        fail_reason_connection = dict()
        fail_reason_connection['name'] = 'connection'
        fail_reason_connection['cases'] = []
        job_dict['fail_reasons']['connection'] = fail_reason_connection

        fail_reason_no_logs = dict()
        fail_reason_no_logs['name'] = 'no_logs'
        fail_reason_no_logs['cases'] = []
        job_dict['fail_reasons']['no_logs'] = fail_reason_no_logs

        fail_reason_all_the_rest = dict()
        fail_reason_all_the_rest['name'] = 'all_the_rest'
        fail_reason_all_the_rest['cases'] = []
        job_dict['fail_reasons']['all_the_rest'] = fail_reason_all_the_rest
        '''Load json logs for last job call'''
        logs_url = "http://qa.sc.couchbase.com/job/test_suite_executor/" + str(
            build_id) + "/testReport/api/json"
        response = urllib.request.urlopen(logs_url)

        logs_data = {}
        try:
            logs_data = json.loads(response.read())
        except ValueError as ve:
            a = 1

        if (str(logs_data)) == '{}':
            job_dict['fail_reasons']['no_logs']['cases'].append('no_logs_case')
            failers.append(job_dict)
            continue
        if (len(logs_data['suites'])) == 0:
            job_dict['fail_reasons']['no_logs']['cases'].append('no_logs_case')
            failers.append(job_dict)
            continue
        suites = logs_data['suites']
        '''Load full text logs for last job call'''
        job_logs = []
        full_job_logs = []
        if 'd' in fmt:
            full_job_logs = download_job_logs(build_id)

            job_logs = extract_cmd_lines(full_job_logs)
            ini_filename = extract_ini_filename(full_job_logs)
            job_dict['ini_file'] = ini_filename

        for suite in suites:
            test_cases = suite['cases']
            '''Case run results parser'''
            for case in test_cases:
                if case['status'] == 'FAILED':
                    case_data = dict()
                    case_attrs = str(case['name'].encode('utf-8')).split(",")
                    case_name = str(case_attrs[0])

                    case_conf_file = ''
                    for s in case_attrs:
                        if "conf_file:" in s:
                            case_conf_file = s.split(":")[1]
                    raw_stack_trace = str(case['errorStackTrace'])
                    new_stack_trace = format_stack_trace(raw_stack_trace)
                    '''Collecting case data'''
                    if 'd' in fmt:
                        json_params = collect_json_params(case_attrs)
                        case_cmd = extract_cmd(case_name, json_params,
                                               job_logs)
                        case_data['cmd_line'] = case_cmd
                        if 'new core dump(s) were found and collected. Check testrunner logs folder.' in str(
                                case['errorStackTrace']):
                            core_dump_filename = find_and_download_core_dump(
                                case_cmd, full_job_logs)
                            case_data['dump_file'] = core_dump_filename

                    case_data['name'] = case_name
                    case_data['conf_file'] = case_conf_file
                    case_data['stack_trace'] = new_stack_trace
                    '''Identifying fail reason'''
                    problem_not_identified = True
                    for err in connection_error:
                        if problem_not_identified and err in raw_stack_trace:
                            job_dict['fail_reasons']['connection'][
                                'cases'].append(case_data)
                            problem_not_identified = False
                            break

                    if problem_not_identified:
                        job_dict['fail_reasons']['all_the_rest'][
                            'cases'].append(case_data)

        failers.append(job_dict)
예제 #5
0
def setup3():
    version = [
        sys.version_info.major, sys.version_info.minor, sys.version_info.micro
    ]
    print("Python {}.{}.{} detected".format(*version))
    prompt = True
    while (prompt):
        if platform == "linux" or platform == "linux2":
            answer = input(
                "Linux OS detected. Is this correct? (Y/N) [Default: Y] ")
            answer = answer.upper()
            if answer in yesList:
                prompt = False
                setup_linux()
            elif answer in noList:
                os = input(
                    "Please enter your OS - use linux or linux2, win32, or darwin (OSX)"
                )
                os = os.upper()
                if os in osList:
                    if os == "LINUX" or os == "LINUX2":
                        prompt = False
                        setup_linux()
                    if os == "WIN32":
                        prompt = False
                        setup_windows()
                    if os == "DARWIN":
                        prompt = False
                        setup_osx()
                else:
                    print("That is not a valid OS")
            else:
                print("That is not a valid input.")
        elif platform == "win32":
            answer = input(
                "Windows OS detected. Is this correct? (Y/N) [Default: Y] ")
            answer = answer.upper()
            if answer in yesList:
                prompt = False
                setup_windows()
            elif answer in noList:
                os = input(
                    "Please enter your OS - use linux or linux2, win32, or darwin (OSX)"
                )
                os = os.upper()
                if os in osList:
                    if os == "LINUX" or os == "LINUX2":
                        prompt = False
                        setup_linux()
                    if os == "WIN32":
                        prompt = False
                        setup_windows()
                    if os == "DARWIN":
                        prompt = False
                        setup_osx()
                else:
                    print("That is not a valid OS")
            else:
                print("That is not a valid input.")
        elif platform == "darwin":
            answer = input(
                "Mac OSX detected. Is this correct? (Y/N) [Default: Y] ")
            answer = answer.upper()
            if answer in yesList:
                prompt = False
                setup_osx()
            elif answer in noList:
                os = input(
                    "Please enter your OS - use linux or linux2, win32, or darwin (OSX)"
                )
                os = os.upper()
                if os in osList:
                    if os == "LINUX" or os == "LINUX2":
                        prompt = False
                        setup_linux()
                    if os == "WIN32":
                        prompt = False
                        setup_windows()
                    if os == "DARWIN":
                        prompt = False
                        setup_osx()
                else:
                    print("That is not a valid OS")
            else:
                print("That is not a valid input.")
        else:
            print("This OS is not supported. Exiting script...")