예제 #1
0
    def handle_project(self, project, **options):

        debug = options.get("debug", None)
        max_iterations = int(options.get("iterations", 50))

        ptm = PerformanceTestModel(project)

        sql_targets = {}

        cycle_iterations = max_iterations

        while cycle_iterations > 0:

            sql_targets = ptm.cycle_data(sql_targets)

            if debug:
                print "Iterations: {0}".format(str(cycle_iterations))
                print "sql_targets"
                print sql_targets

            cycle_iterations -= 1

            if sql_targets['total_count'] == 0:
                cycle_iterations = 0

        ptm.disconnect()
예제 #2
0
파일: views.py 프로젝트: hfeeki/datazilla
def graphs(request, project=""):

    ####
    #Load any signals provided in the page
    ####
    signals = []
    time_ranges = utils.get_time_ranges()

    for s in SIGNALS:
        if s in request.POST:
            signals.append( { 'value':urllib.unquote( request.POST[s] ),
                              'name':s } )
    ###
    #Get reference data
    ###
    ptm = PerformanceTestModel(project)
    json_data = ptm.get_test_reference_data()
    ptm.disconnect()

    time_key = 'days_30'

    data = { 'time_key':time_key,
             'reference_json':json_data,
             'signals':signals }

    ####
    #Caller has provided the view parent of the signals, load in page.
    #This occurs when a data view is in its Pane form and is detached
    #to exist on it's own page.
    ####
    parent_index_key = 'dv_parent_dview_index'
    if parent_index_key in request.POST:
        data[parent_index_key] = request.POST[parent_index_key]

    return render_to_response('graphs.views.html', data)
예제 #3
0
def load_test_collection(project):

    ptm = PerformanceTestModel(project)

    products = ptm.get_products('id')

    product_names = {}

    for id in products:

        if products[ id ]['product'] and \
           products[ id ]['version'] and \
           products[ id ]['branch']:

            name = get_test_collection_name(
                products[ id ]['product'],
                products[ id ]['version'],
                products[ id ]['branch']
                )

            product_names[name] = id

    test_collection_names = ptm.get_test_collection_set()

    new_name_set = set(
        product_names.keys()
        ).difference( test_collection_names )

    for new_name in new_name_set:
        id = ptm.set_test_collection(new_name, "")
        ptm.set_test_collection_map(id, product_names[ new_name ])

    ptm.cache_ref_data()

    ptm.disconnect()
예제 #4
0
def load_test_collection(project):

    ptm = PerformanceTestModel(project)

    products = ptm.get_products('id')

    product_names = {}

    for id in products:

        if products[ id ]['product'] and \
           products[ id ]['version'] and \
           products[ id ]['branch']:

            name = get_test_collection_name(products[id]['product'],
                                            products[id]['version'],
                                            products[id]['branch'])

            product_names[name] = id

    test_collection_names = ptm.get_test_collection_set()

    new_name_set = set(product_names.keys()).difference(test_collection_names)

    for new_name in new_name_set:
        id = ptm.set_test_collection(new_name, "")
        ptm.set_test_collection_map(id, product_names[new_name])

    ptm.cache_ref_data()

    ptm.disconnect()
예제 #5
0
def cache_test_summaries(project):

    ptm = PerformanceTestModel(project)

    ###
    #New reference data could be found in the cached data
    #summary structures. Update the reference data cached
    #every time the sumary data is cached.
    ###
    ptm.cache_ref_data()
    ptm.cache_default_project()

    data_iter = ptm.get_all_summary_cache()

    key_test = []
    for d in data_iter:
        for data in d:
            key = utils.get_summary_cache_key(
                project,
                data['item_id'],
                data['item_data'],
            )
            rv = cache.set(key, zlib.compress(data['value']))
            if not rv:
                msg = "ERROR: Failed to store object in memcache: %s, %s\n" % \
                        ( str(data['item_id']), data['item_data'] )
                sys.stderr.write(msg)

    ptm.disconnect()
예제 #6
0
    def handle_project(self, project, **options):

        debug = options.get("debug", None)
        max_iterations = int(options.get("iterations", 50))

        ptm = PerformanceTestModel(project)

        sql_targets = {}

        cycle_iterations = max_iterations

        while cycle_iterations > 0:

            sql_targets = ptm.cycle_data(sql_targets)

            if debug:
                print "Iterations: {0}".format(str(cycle_iterations))
                print "sql_targets"
                print sql_targets

            cycle_iterations -= 1

            if sql_targets['total_count'] == 0:
                cycle_iterations = 0

        ptm.disconnect()
예제 #7
0
파일: views.py 프로젝트: hfeeki/datazilla
def set_test_data(request, project=""):
    """
    Post a JSON blob of data for the specified project.

    Store the JSON in the objectstore where it will be held for
    later processing.

    """
    #####
    #This conditional provides backwords compatibility with
    #the talos production environment.  It should
    #be removed after the production environment
    #is uniformaly using the new url format.
    ####
    if project == 'views':
        project = 'talos'

    # default to bad request if the JSON is malformed or not present
    status = 400

    try:
        json_data = request.POST['data']
    except KeyError:
        result = {"status":"No POST data found"}
    else:
        unquoted_json_data = urllib.unquote(json_data)

        error = None

        try:
            json.loads( unquoted_json_data )
        except ValueError as e:
            error = "Malformed JSON: {0}".format(e.message)
            result = {"status": "Malformed JSON", "message": error}
        else:
            result = {
                "status": "well-formed JSON stored",
                "size": len(unquoted_json_data),
            }

        try:
            dm = PerformanceTestModel(project)
            id = dm.store_test_data(unquoted_json_data, error)
            dm.disconnect()
        except Exception as e:
            status = 500
            result = {"status": "Unknown error", "message": str(e)}
        else:

            location = "/{0}/refdata/objectstore/json_blob/{1}".format(
                project, str(id)
                )

            result['url'] = request.build_absolute_uri(location)

            if not error:
                status = 200


    return HttpResponse(json.dumps(result), mimetype=APP_JS, status=status)
예제 #8
0
파일: summary.py 프로젝트: rniwa/datazilla
def cache_test_summaries(project):

    ptm = PerformanceTestModel(project)

    ###
    #New reference data could be found in the cached data
    #summary structures. Update the reference data cached 
    #every time the sumary data is cached.
    ###
    ptm.cache_ref_data()
    ptm.cache_default_project()

    data_iter = ptm.get_all_summary_cache()

    for d in data_iter:
        for data in d:
            key = utils.get_summary_cache_key(
                project,
                data['item_id'],
                data['item_data'],
                )

            rv = cache.set(key, zlib.compress( data['value'] ))
            if not rv:
                msg = "ERROR: Failed to store object in memcache: %s, %s\n" % \
                        ( str(data['item_id']), data['item_data'] )
                sys.stderr.write(msg)

    ptm.disconnect()
예제 #9
0
    def handle_project(self, project, options):
        self.stdout.write("Processing project {0}\n".format(project))

        loadlimit = int(options.get("loadlimit", 1))

        dm = PerformanceTestModel(project)
        dm.process_objects(loadlimit)
        dm.disconnect()
예제 #10
0
파일: views.py 프로젝트: hfeeki/datazilla
def dataview(request, project="", method=""):

    proc_path = "perftest.views."

    ##Full proc name including base path in json file##
    full_proc_path = "%s%s" % (proc_path, method)

    if settings.DEBUG:
        ###
        #Write IP address and datetime to log
        ###
        print "Client IP:%s" % (request.META['REMOTE_ADDR'])
        print "Request Datetime:%s" % (str(datetime.datetime.now()))

    json = ""
    if method in DATAVIEW_ADAPTERS:
        dm = PerformanceTestModel(project)
        pt_dhub = dm.sources["perftest"].dhub

        if 'adapter' in DATAVIEW_ADAPTERS[method]:
            json = DATAVIEW_ADAPTERS[method]['adapter'](project,
                                                        method,
                                                        request,
                                                        dm)
        else:
            if 'fields' in DATAVIEW_ADAPTERS[method]:
                fields = []
                for f in DATAVIEW_ADAPTERS[method]['fields']:
                    if f in request.GET:
                        fields.append( int( request.GET[f] ) )

                if len(fields) == len(DATAVIEW_ADAPTERS[method]['fields']):
                    json = pt_dhub.execute(proc=full_proc_path,
                                           debug_show=settings.DEBUG,
                                           placeholders=fields,
                                           return_type='table_json')

                else:
                    json = '{ "error":"{0} fields required, {1} provided" }'.format(
                        (str(len(DATAVIEW_ADAPTERS[method]['fields'])),
                        str(len(fields))) )

            else:

                json = pt_dhub.execute(proc=full_proc_path,
                                       debug_show=settings.DEBUG,
                                       return_type='table_json')

        dm.disconnect();

    else:
        json = '{ "error":"Data view name %s not recognized" }' % method

    return HttpResponse(json, mimetype=APP_JS)
예제 #11
0
def dataview(request, project="", method=""):

    proc_path = "perftest.views."

    ##Full proc name including base path in json file##
    full_proc_path = "%s%s" % (proc_path, method)

    if settings.DEBUG:
        ###
        #Write IP address and datetime to log
        ###
        print "Client IP:%s" % (request.META['REMOTE_ADDR'])
        print "Request Datetime:%s" % (str(datetime.datetime.now()))

    json = ""
    if method in DATAVIEW_ADAPTERS:
        dm = PerformanceTestModel(project)
        pt_dhub = dm.sources["perftest"].dhub

        if 'adapter' in DATAVIEW_ADAPTERS[method]:
            json = DATAVIEW_ADAPTERS[method]['adapter'](project, method,
                                                        request, dm)
        else:
            if 'fields' in DATAVIEW_ADAPTERS[method]:
                fields = []
                for f in DATAVIEW_ADAPTERS[method]['fields']:
                    if f in request.GET:
                        fields.append(int(request.GET[f]))

                if len(fields) == len(DATAVIEW_ADAPTERS[method]['fields']):
                    json = pt_dhub.execute(proc=full_proc_path,
                                           debug_show=settings.DEBUG,
                                           placeholders=fields,
                                           return_type='table_json')

                else:
                    json = '{ "error":"{0} fields required, {1} provided" }'.format(
                        (str(len(DATAVIEW_ADAPTERS[method]['fields'])),
                         str(len(fields))))

            else:

                json = pt_dhub.execute(proc=full_proc_path,
                                       debug_show=settings.DEBUG,
                                       return_type='table_json')

        dm.disconnect()

    else:
        json = '{ "error":"Data view name %s not recognized" }' % method

    return HttpResponse(json, mimetype=APP_JS)
예제 #12
0
    def handle_project(self, project, **options):

        self.stdout.write("Processing project {0}\n".format(project))

        pushlog_project = options.get("pushlog_project", 'pushlog')
        loadlimit = int(options.get("loadlimit", 1))
        debug = options.get("debug", None)

        test_run_ids = []
        ptm = PerformanceTestModel(project)
        test_run_ids = ptm.process_objects(loadlimit)
        ptm.disconnect()

        """
        metrics_exclude_projects = set(['b2g', 'games', 'jetperf', 'marketapps', 'microperf', 'stoneridge', 'test', 'webpagetest'])
        if project not in metrics_exclude_projects:
            #minimum required number of replicates for
            #metrics processing
            replicate_min = 5
            compute_test_run_metrics(
                project, pushlog_project, debug, replicate_min, test_run_ids
                )
        """

        mtm = MetricsTestModel(project)
        revisions_without_push_data = mtm.load_test_data_all_dimensions(
            test_run_ids)

        if revisions_without_push_data:

            revision_nodes = {}
            plm = PushLogModel(pushlog_project)

            for revision in revisions_without_push_data:

                node = plm.get_node_from_revision(
                    revision, revisions_without_push_data[revision])

                revision_nodes[revision] = node

            plm.disconnect()
            mtm.set_push_data_all_dimensions(revision_nodes)

        mtm.disconnect()
예제 #13
0
def build_test_summaries(project):

    ptm = PerformanceTestModel(project)

    time_ranges = utils.get_time_ranges()

    products = ptm.get_products()

    for product_name in products:

        for tr in ['days_7', 'days_30']:

            table = ptm.get_test_run_summary(str(time_ranges[tr]['start']),
                                             str(time_ranges[tr]['stop']),
                                             [products[product_name]], [], [])

            json_data = json.dumps(table)
            ptm.set_summary_cache(products[product_name], tr, json_data)

    ptm.disconnect()
예제 #14
0
    def handle_project(self, project, **options):
        self.stdout.write("Processing project {0}\n".format(project))

        pushlog_project = options.get("pushlog_project", 'pushlog')
        loadlimit = int(options.get("loadlimit", 1))
        debug = options.get("debug", None)

        test_run_ids = []
        ptm = PerformanceTestModel(project)
        test_run_ids = ptm.process_objects(loadlimit)
        ptm.disconnect()

        metrics_exclude_projects = set(['b2g', 'stoneridge'])

        if project not in metrics_exclude_projects:
            #minimum required number of replicates for
            #metrics processing
            replicate_min = 5
            compute_test_run_metrics(
                project, pushlog_project, debug, replicate_min, test_run_ids
                )
예제 #15
0
def graphs(request, project=""):

    ####
    #Load any signals provided in the page
    ####
    signals = []
    time_ranges = utils.get_time_ranges()

    for s in SIGNALS:
        if s in request.POST:
            signals.append({
                'value': urllib.unquote(request.POST[s]),
                'name': s
            })
    ###
    #Get reference data
    ###
    ptm = PerformanceTestModel(project)
    json_data = ptm.get_test_reference_data()
    ptm.disconnect()

    time_key = 'days_30'

    data = {
        'time_key': time_key,
        'reference_json': json_data,
        'signals': signals
    }

    ####
    #Caller has provided the view parent of the signals, load in page.
    #This occurs when a data view is in its Pane form and is detached
    #to exist on it's own page.
    ####
    parent_index_key = 'dv_parent_dview_index'
    if parent_index_key in request.POST:
        data[parent_index_key] = request.POST[parent_index_key]

    return render_to_response('graphs.views.html', data)
예제 #16
0
def build_test_summaries(project):

    ptm = PerformanceTestModel(project)

    time_ranges = utils.get_time_ranges()

    products = ptm.get_products()

    for product_name in products:

        for tr in ['days_7', 'days_30']:

            table = ptm.get_test_run_summary(str( time_ranges[tr]['start']),
                                         str( time_ranges[tr]['stop']),
                                         [ products[ product_name ] ],
                                         [],
                                         [])

            json_data = json.dumps( table )
            ptm.set_summary_cache( products[ product_name ], tr, json_data )

    ptm.disconnect()
    def handle(self, *args, **options):
        """ Transfer data to a development project based on the args value. """

        host = options.get("host")
        dev_project = options.get("dev_project")
        prod_project = options.get("prod_project")
        branch = options.get("branch")
        days_ago = options.get("days_ago")
        logfile = options.get("logfile")

        if not host:
            self.println("You must supply a host name to retrieve data from " +
                     "--host hostname")
            return

        if not dev_project:
            self.println("You must supply a dev_project name to load data in.")
            return

        if not branch:
            self.println("You must supply a branch name to retrieve data for.")
            return

        #Set timeout so we don't hang
        timeout = 120
        socket.setdefaulttimeout(timeout)

        revisions_uri = 'refdata/pushlog/list'
        params = 'days_ago={0}&branches={1}'.format(days_ago, branch)
        url = "https://{0}/{1}?{2}".format(host, revisions_uri, params)

        json_data = ""

        #Retrieve revisions to iterate over
        try:
            response = urllib.urlopen(url)
            json_data = response.read()
        except socket.timeout:
            self.stdout.write( "URL: {0}\nTimedout {1} seconds\n".format(
                url, timeout
                ) )
            sys.exit(0)

        data = json.loads(json_data)
        all_keys = data.keys()
        all_keys.sort()

        ##Keep track of revision already loaded##
        file_obj = open(logfile, 'w+')
        revisions_seen = set()
        for line in file_obj.readlines():
            revisions_seen.add(line.strip())

        revisions = []

        for key in all_keys:
            for revision in data[key]['revisions']:
                if revision not in revisions_seen:
                    revisions.append(revision)

        dm = PerformanceTestModel(dev_project)

        for revision in revisions:

            rawdata_uri = '{0}/testdata/raw/{1}/{2}/'.format(
                prod_project, branch, revision
                )
            rawdata_url = "https://{0}/{1}".format(host, rawdata_uri)

            raw_json_data = ""

            try:
                rawdata_response = urllib.urlopen(rawdata_url)
                raw_json_data = rawdata_response.read()
            except socket.timeout:
                self.stdout.write( "URL: {0}\nTimedout {1} seconds\n".format(
                    rawdata_url, timeout) )
                sys.exit(0)

            test_object_data = json.loads(raw_json_data)

            for test_object in test_object_data:
                id = dm.store_test_data( json.dumps(test_object), "" )
                self.stdout.write( "Revision:{0} Id:{1}\n".format(revision, str(id)))

            #Record the revision as loaded
            file_obj.write(revision + "\n")

        file_obj.close()
        dm.disconnect()
예제 #18
0
def set_test_data(request, project=""):
    """
    Post a JSON blob of data for the specified project.

    Store the JSON in the objectstore where it will be held for
    later processing.

    """
    #####
    #This conditional provides backwords compatibility with
    #the talos production environment.  It should
    #be removed after the production environment
    #is uniformaly using the new url format.
    ####
    if project == 'views':
        project = 'talos'

    # default to bad request if the JSON is malformed or not present
    status = 400

    try:
        json_data = request.POST['data']
    except KeyError:
        result = {"status": "No POST data found"}
    else:
        unquoted_json_data = urllib.unquote(json_data)

        error = None
        deserialized_json = {}

        try:
            deserialized_json = json.loads(unquoted_json_data)
        except ValueError as e:
            error = "Malformed JSON: {0}".format(e.message)
            result = {"status": "Malformed JSON", "message": error}
        else:
            result = {
                "status": "well-formed JSON stored",
                "size": len(unquoted_json_data),
            }

        try:
            dm = PerformanceTestModel(project)

            dm.pre_process_data(unquoted_json_data, deserialized_json)

            id = dm.store_test_data(unquoted_json_data, error)

            dm.disconnect()
        except Exception as e:
            status = 500
            result = {"status": "Unknown error", "message": str(e)}
        else:

            location = "/{0}/refdata/objectstore/json_blob/{1}".format(
                project, str(id))

            result['url'] = request.build_absolute_uri(location)

            if not error:
                status = 200

    return HttpResponse(json.dumps(result), mimetype=APP_JS, status=status)
예제 #19
0
    def handle(self, *args, **options):
        """ Transfer data to a development project based on the args value. """

        host = options.get("host")
        dev_project = options.get("dev_project")
        prod_project = options.get("prod_project")
        branch = options.get("branch")
        days_ago = options.get("days_ago")
        logfile = options.get("logfile")

        if not host:
            self.println("You must supply a host name to retrieve data from " +
                         "--host hostname")
            return

        if not dev_project:
            self.println("You must supply a dev_project name to load data in.")
            return

        if not branch:
            self.println("You must supply a branch name to retrieve data for.")
            return

        #Set timeout so we don't hang
        timeout = 120
        socket.setdefaulttimeout(timeout)

        revisions_uri = 'refdata/pushlog/list'
        params = 'days_ago={0}&branches={1}'.format(days_ago, branch)
        url = "https://{0}/{1}?{2}".format(host, revisions_uri, params)

        json_data = ""

        #Retrieve revisions to iterate over
        try:
            response = urllib.urlopen(url)
            json_data = response.read()
        except socket.timeout:
            self.stdout.write("URL: {0}\nTimedout {1} seconds\n".format(
                url, timeout))
            sys.exit(0)

        data = json.loads(json_data)
        all_keys = data.keys()
        all_keys.sort()

        ##Keep track of revision already loaded##
        file_obj = open(logfile, 'w+')
        revisions_seen = set()
        for line in file_obj.readlines():
            revisions_seen.add(line.strip())

        revisions = []

        for key in all_keys:
            for revision in data[key]['revisions']:
                if revision not in revisions_seen:
                    revisions.append(revision)

        dm = PerformanceTestModel(dev_project)

        for revision in revisions:

            rawdata_uri = '{0}/testdata/raw/{1}/{2}/'.format(
                prod_project, branch, revision)
            rawdata_url = "https://{0}/{1}".format(host, rawdata_uri)

            raw_json_data = ""

            try:
                rawdata_response = urllib.urlopen(rawdata_url)
                raw_json_data = rawdata_response.read()
            except socket.timeout:
                self.stdout.write("URL: {0}\nTimedout {1} seconds\n".format(
                    rawdata_url, timeout))
                sys.exit(0)

            test_object_data = json.loads(raw_json_data)

            for test_object in test_object_data:
                id = dm.store_test_data(json.dumps(test_object), "")
                self.stdout.write("Revision:{0} Id:{1}\n".format(
                    revision, str(id)))

            #Record the revision as loaded
            file_obj.write(revision + "\n")

        file_obj.close()
        dm.disconnect()