Ejemplo n.º 1
0
    def run(self):
        (options, args) = self.parser.parse_args()
        self.options = options
        self.args = args
        runner1, runner2, profile1, profile2 = self.get_runners()

        if sys.platform == 'linux2':
            try:
                from Xlib import X, display
                d = display.Display()
                s = d.screen()
                root = s.root
                root.warp_pointer(0, 0)
                d.sync()
            except ImportError:
                print "Xlib is not installed. Mouse may interfere with screenshots."

        if options.test is True:
            all_sites = test_all_sites
            store = False
        else:
            all_sites = None
            store = True

        if options.report is None:
            db = couchquery.Database("http://127.0.0.1:5984/sitecompare")
        else:
            if '@' in options.report:
                user, password = options.report.strip('http://').split(
                    '@')[0].split(':')
                url = 'http://' + options.report.split('@')[1]
                http = httplib2.Http('.cache')
                http.add_credentials(user, password)
                db = couchquery.Database(url, http=http)
            else:
                db = couchquery.Database(options.report)

        c = CompareSites(runner1,
                         runner2,
                         options.report,
                         self.run_info,
                         all_sites=all_sites,
                         store=store,
                         db=db)
        c.start()

        sleep(2)
        c.stop()
        sleep(3)
Ejemplo n.º 2
0
def setup():
    from brasstacks import fennec
    olddb = couchquery.Database('http://localhost:5984/fennec_old')
    db = couchquery.Database('http://localhost:5984/fennec')
    db.sync_design_doc('fennec', fennec.design_doc)
    db.sync_design_doc('fennecFailures', fennec.failures_design_doc)
    a = Stub()
    a.add_resource('fennec', fennec.FennecApplication(db))
    global thread
    global httpd
    httpd = make_server('', 8888, a)
    thread = Thread(target=httpd.serve_forever)
    thread.start()
    sleep(1)
    return olddb, db
Ejemplo n.º 3
0
def sync_cli(db=None):
    db = [i for i in sys.argv if i.startswith('http')]
    assert len(db) is 1
    dbname = db[0]
    db = couchquery.Database(dbname, cache=Cache())
    sys.argv.remove(dbname)

    sync(db, sys.argv)
Ejemplo n.º 4
0
def setup_module(module):
    #     from brasstacks import crashtest
    crashdb = couchquery.Database(crashtestcouch + '/crashtest')
    resultdb = couchquery.Database(crashtestcouch + '/crashtest_results')
    #     crashdb.sync_design_doc("crashes", crashtest.crashes_design_doc)
    #     resultdb.sync_design_doc("jobs", crashtest.jobs_design_doc)
    #     resultdb.sync_design_doc("results", crashtest.results_design_doc)
    #     a = Stub()
    #     a.add_resource('crashtest', crashtest.CrashTestApplication(crashdb, resultdb))
    #     httpd = make_server('', 8888, a)
    #     thread = Thread(target=httpd.serve_forever)
    #     thread.start()
    #
    #     # Global these boys
    #     module.thread = thread
    #     module.httpd = httpd
    module.crashdb = crashdb
    module.resultdb = resultdb
Ejemplo n.º 5
0
def main():

    document = 'sample_doc_for_testing_and_dev'
    products = ['Fennec', 'Firefox']
    platforms = ['Maemo-n810', 'WinMo']  #, 'Linux', 'Mac']
    testtypes = ['crashtests', 'mochitests']  #, 'xpcshell', 'reftests']
    build = timestamp = ''

    # db = couchquery.Database("http://pythonesque.org:5984/logcompare", cache=Cache())
    db = couchquery.Database(
        "http://*****:*****@happyhans.couch.io/logcompare",
        cache=Cache())
    # create documents
    doccount = random.randint(40, 70)
    for i in range(0, doccount):

        # create metadata
        buildstructure = {}

        buildstructure['build'] = random.randint(999000, 999999)
        buildstructure['product'] = random.choice(products)
        buildstructure['os'] = random.choice(platforms)
        buildstructure['testtype'] = random.choice(testtypes)
        buildstructure['timestamp'] = str(datetime.datetime.now())
        buildstructure['document'] = document
        buildstructure['tinderboxID'] = random.choice(
            [-1, str(datetime.datetime.now())])

        # create tests
        tests = {}
        offset = random.randrange(1, 5)
        testcount = random.randint(100, 500)
        for x in range(0, testcount):
            failcount = random.randint(0, 10)
            todocount = random.randint(0, 3)
            notes = []
            for y in range(0, (failcount + todocount)):
                # notes.append("This test should have returned TRUE but returned FALSE")
                notes.append("Message!")
            tests['test_' + str(offset + x) + '.js'] = {
                'pass': random.randint(0, 5),
                'fail': failcount,
                'todo': todocount,
                'note': notes
            }
        print json.dumps(buildstructure, indent=2)
        buildstructure['tests'] = tests

        # outputFile = "C:/_Code/python/outputdata" + str(i) + ".html"
        # outFile = open(outputFile, 'w')
        # outFile.write(json.dumps(buildstructure, indent=2))
        # outFile.close()

        db.create(buildstructure)

    print "done uploading results"
Ejemplo n.º 6
0
 def __init__(self, uri, flag='c', writeback=False, raiseconflicts=False, http_cache=None):
     self._db = couchquery.Database(uri, cache=http_cache)
     if flag == 'n':
         couchquery.deletedb(self._db)
     if flag in ('n', 'c'):
         if not self._db.exists():
             couchquery.createdb(self._db)
     self._flag = flag
     self._writeback = writeback
     self._raiseconflicts = raiseconflicts
     self._cache = {}
Ejemplo n.º 7
0
def cli():
    db = [i for i in sys.argv if i.startswith('http')]
    assert len(db) is 1
    dbname = db[0]
    db = couchquery.Database(dbname, cache=Cache())
    sys.argv.remove(dbname)

    sync(db, sys.argv)
    a = get_application(db, sys.argv)
    httpd = make_server('', 8888, a)
    print "Serving on http://localhost:8888/"
    httpd.serve_forever()
Ejemplo n.º 8
0
    def test_simple_set_get(self):
        d = shelve.open(URI)
        d['item1'] = []
        d.close()

        d = shelve.open(URI)
        self.assertEqual(d['item1'], [])
        d.close()

        # Make sure the data actually ended up in the database
        db = couchquery.Database(URI)
        doc = db.get('item1')
        value = pickle.loads(str(doc.value))
        self.assertEqual(value, [])
Ejemplo n.º 9
0
    def test_confict_behavior(self):
        # First prove that we will silently override conflicts
        d = shelve.open(URI)
        d['item1'] = []

        db = couchquery.Database(URI)
        doc = db.get('item1')
        doc.value = pickle.dumps("NotAList")
        db.save(doc)

        d['item1'] = [1]
        self.assertEqual(d['item1'], [1])

        d.close()
Ejemplo n.º 10
0
def save(data):
    #TODO: make the db a global variable or parameter to function
    #    db = couchquery.Database("http://pythonesque.org:5984/fennec_test", cache=Cache())
    #    db = couchquery.Database("http://*****:*****@happyhans.couch.io/logcompare", cache=Cache())
    db = couchquery.Database("http://10.2.76.100:5984/fennec_alpha",
                             cache=Cache())
    saved = False
    try:
        starttime = datetime.datetime.now()
        print db.create(data)
        finishtime = datetime.datetime.now()
        print finishtime - starttime
        saved = True
    except CouchDBException, e:
        print "Error occurred while sending data :" + str(e)
Ejemplo n.º 11
0
def get_application(db, names):
    a = Stub()
    a.add_resource('static', FileServerApplication(static_dir))
    if 'sitecompare' in names:
        from brasstacks.sitecompare import SiteCompareApplication
        a.add_resource('sitecompare', SiteCompareApplication(db))
    if 'logcompare' in names:
        from brasstacks.logcompare import LogCompareApplication
        logcompare_application = LogCompareApplication(db)
        a.add_resource('logcompare', logcompare_application)
    if 'users' in names or 'brasstacks' in names:
        from brasstacks.users import UsersApplication
        users_application = UsersApplication(db)
        a.add_resource('users', users_application)
    if 'tcm' in names:
        from brasstacks.tcm import TestCaseManagerApplication
        tcm_application = TestCaseManagerApplication(db)
        a.add_resource('tcm', tcm_application)
    if 'fennec' in names:
        from brasstacks.fennec import FennecApplication
        fennec_application = FennecApplication(db)
        a.add_resource('fennec', fennec_application)
    if 'mozmill' in names:
        from brasstacks.mozmill import MozmillApplication
        mozmill_application = MozmillApplication(db)
        a.add_resource('mozmill', mozmill_application)
    if 'crashtest' in names:
        resultdb = couchquery.Database(db.uri[:-1] + '_results')
        a.add_resource('crashtest',
                       crashtest.CrashTestApplication(db, resultdb))
    if 'firefox' in names:
        from brasstacks.firefox import FirefoxApplication
        firefox_application = FirefoxApplication(db)
        a.add_resource('firefox', firefox_application)

    return a
Ejemplo n.º 12
0
def main():
    print "going"

    #define your couchdb here
    TARGET_HTTP = '115.146.86.188'
    update_HTTP = '127.0.0.1'
    TARGET_DB = 'melbourne_suburb'
    weather_DB = 'weatherdb'
    updatedb = 'coastshop'
    COUCHDB_LINK = 'http://' + TARGET_HTTP + ':5984/' + TARGET_DB
    weather_LINK = 'http://' + TARGET_HTTP + ':5984/' + weather_DB
    updatedb_LINK = 'http://' + update_HTTP + ':5984/'

    #################################################################################
    # # to query the view for cities and weather
    db = couchquery.Database(COUCHDB_LINK)
    db_weather = couchquery.Database(weather_LINK)
    # db_update = couchquery.Database(updatedb_LINK)

    #################################################################################
    # # to update summary to the new db
    server = couchdb.Server(updatedb_LINK)
    db_update = server[updatedb]

    #################################################################################
    # # necessary action to create 2D dictionary
    def tree():
        return collections.defaultdict(tree)

    #################################################################################
    # # sum each day each city total twitter number
    rows = db.views.mapreduce.coastshop(group_level=2)

    NumPerday = tree()
    pnnDistri = tree()

    for row in rows.items():
        if (row[0][1] == 'coast'):
            NumPerday[row[0][0]]['coast'] = row[1][0]
        elif (row[0][1] == 'shop'):
            NumPerday[row[0][0]]['shop'] = row[1][0]
        else:
            NumPerday[row[0][0]]['other'] = row[1][0]

    #################################################################################
    # # calculate the percentage of positive negative netrual for each day each city
    rows = db.views.mapreduce.coastshop(group_level=3)

    for row in rows.items():
        if row[0][0] in NumPerday:
            pnnDistri[row[0][0]][row[0][1]]['tweets'] = NumPerday[row[0][0]][
                row[0][1]]

            if row[0][2] == 1:

                # pnnDistri[(row[0][0],1)] = [float(row[1][0])/NumPerday[row[0][0]]]
                pnnDistri[row[0][0]][row[0][1]][1] = float(
                    row[1][0]) / NumPerday[row[0][0]][row[0][1]]
                # print pnnDistri[(row[0][0],1)]
            elif row[0][2] == 0:

                # pnnDistri[(row[0][0],0)] = [float(row[1][0])/NumPerday[row[0][0]]]
                pnnDistri[row[0][0]][row[0][1]][0] = float(
                    row[1][0]) / NumPerday[row[0][0]][row[0][1]]
                # print pnnDistri[(row[0][0],0)]
            else:
                # pnnDistri[(row[0][0],-1)] = [float(row[1][0])/NumPerday[row[0][0]]]
                pnnDistri[row[0][0]][row[0][1]][-1] = float(
                    row[1][0]) / NumPerday[row[0][0]][row[0][1]]
                # print pnnDistri[(row[0][0],-1)]

    # print pnnDistri

    #################################################################################
    # # withdraw weather from weather db and integrate with the emtion percentage
    dateSuburb = tree()
    rows = db_weather.views.mapreduce.Melbourne()
    for row in rows.items():
        if row[0][1] in pnnDistri:
            for key, value in pnnDistri[row[0][1]].items():
                smallkey = []
                x = {}
                x['region'] = key
                x['date'] = row[0][1]
                x['condition'] = row[0][2]
                x['high'] = row[0][3]
                x['low'] = row[0][4]
                # if pnnDistri[row[0][1]][1] is not None:
                x['positive'] = pnnDistri[row[0][1]][key][1]
                # if pnnDistri[row[0][1]][1] is not None:
                x['netrual'] = pnnDistri[row[0][1]][key][0]
                # if pnnDistri[row[0][1]][1] is not None:
                x['negative'] = pnnDistri[row[0][1]][key][-1]
                # if pnnDistri[row[0][1]][1] is not None:
                x['tweets'] = pnnDistri[row[0][1]][key]['tweets']
                smallkey = (row[0][1], key)
                dateSuburb[smallkey] = x
                # print x

            # print x

    #################################################################################
    # # convert to list to rearange the order
    diclist = []
    for key, value in dateSuburb.iteritems():
        temp = [key, value]
        diclist.append(temp)
    # for key in diclist:
    # 	print key[0][0],time.strptime(key[0][0],"%d %b %Y")
    # 	timeobject = time.strptime(key[0][0],"%d %b %Y")
    # 	print (datetime.datetime(timeobject.tm_year,timeobject.tm_mon,timeobject.tm_mday)-datetime.datetime(1970,1,1)).total_seconds()
    # for key in diclist:
    # 	print key[0]
    diclist.sort(key=lambda date: time.strptime(date[0][0], "%d %b %Y"))
    # for key in diclist:
    # 	print key

    # #################################################################################
    # # # update to database
    if len(db_update) > 0:
        daterow = []
        for id in db_update:
            doc = db_update[id]
            if ('date' in doc.keys()):
                #################################################################################
                # # use this tuple to index and indentify same day same city
                daterow.append([(doc['date'], doc['region']), id])
        # for key,value in daterow:
        # 	print key,value
        # print daterow
        # for id in db_update:
        # 	doc = db_update[id]
        # 	if('date' in doc.keys()):
        newIndex = [item[0] for item in daterow]

        for key, value in diclist:
            # print key,value
            if (key in newIndex):
                getid = [item[1] for item in daterow if item[0] == key]
                print getid, type(getid[0])
                newid = getid[0]
                doc = db_update[newid]
                if (value['positive'] != doc['positive']
                        or value['negative'] != doc['negative']
                        or value['netrual'] != doc['netrual']
                        or value['tweets'] != doc['tweets']):
                    doc['positive'] = value['positive']
                    doc['negative'] = value['negative']
                    doc['netrual'] = value['netrual']
                    db_update[newid] = doc
            if (key not in newIndex):
                print key
                db_update.save(value)
    else:
        for key, value in diclist:
            db_update.save(value)
Ejemplo n.º 13
0
def writeTempTweetcsv():
    TARGET_HTTP = '115.146.86.188'
    TARGET_DB = 'citysummary'
    COUCHDB_LINK = 'http://' + TARGET_HTTP + ':5984/' + TARGET_DB

    #################################################################################
    # # to query the view for cities and weather
    db = couchquery.Database(COUCHDB_LINK)

    #################################################################################
    # # calculate the percentage of positive negative netrual for each day each city
    citynamelist = [
        'Adelaide', 'Alice Springs', 'Brisbane', 'Canberra', 'Darwin',
        'Hobart', 'Melbourne', 'Perth', 'Sydney'
    ]
    for citynameitem in citynamelist:
        rows = db.views.mapreduce.condition_deal(key=citynameitem)
        rows_other = db.views.mapreduce.condition_deal(key="Adelaide")
        dataframelist = []
        dataframelist_other = []

        #################################################################################
        # # convert date to day month year
        for row, other in zip(rows, rows_other):
            # row[0] = time.strptime(row[0], "%d %m %Y %H %M %S")
            row[0] = datetime.datetime.strptime(row[0], "%d %b %Y")
            row[4] = float(row[4])
            row[5] = float(row[5])
            other[0] = datetime.datetime.strptime(other[0], "%d %b %Y")
            other[4] = float(other[4])
            other[5] = float(other[5])

            dataframelist.append(row)
            dataframelist_other.append(other)

        ##############################################################################
        # withdraw corresponding data
        df = pd.DataFrame(dataframelist,
                          columns=[
                              'date', 'positive', 'netrual', 'negative',
                              'high', 'low', 'diff', 'tweets', 'condition'
                          ])
        indexeddf = df.set_index(['date'])

        df_other = pd.DataFrame(dataframelist_other,
                                columns=[
                                    'date', 'positive', 'netrual', 'negative',
                                    'high', 'low', 'diff', 'tweets',
                                    'condition'
                                ])
        indexeddf_other = df_other.set_index(['date'])

        newone = indexeddf.fillna(0)

        ##############################################################################
        # draw each evariable pair cluster
        twitterinfo = ['positive', 'netrual', 'negative', 'tweets']
        weahterinfo = ['high', 'low', 'diff', 'condition']
        for axisx in twitterinfo:
            for axisy in weahterinfo:
                namelist = axisx + "_" + axisy + "_" + citynameitem
                print axisx, axisy
                drawwDBSCAN(newone[[axisx, axisy]].values, namelist)
Ejemplo n.º 14
0
def writeTempTweetcsv():
    TARGET_HTTP = '115.146.86.188'
    TARGET_DB = 'citysummary'
    COUCHDB_LINK = 'http://' + TARGET_HTTP + ':5984/' + TARGET_DB

    #################################################################################
    # # to query the view for cities and weather
    db = couchquery.Database(COUCHDB_LINK)

    #################################################################################
    # # calculate the percentage of positive negative netrual for each day each city
    citynamelist = [
        'Adelaide', 'Alice Springs', 'Brisbane', 'Canberra', 'Darwin',
        'Hobart', 'Melbourne', 'Perth', 'Sydney'
    ]
    for citynameitem in citynamelist:
        rows = db.views.mapreduce.condition_deal(key=citynameitem)
        rows_other = db.views.mapreduce.condition_deal(key="Adelaide")
        dataframelist = []
        dataframelist_other = []

        #################################################################################
        # # convert date to day month year
        for row, other in zip(rows, rows_other):
            # row[0] = time.strptime(row[0], "%d %m %Y %H %M %S")
            row[0] = datetime.datetime.strptime(row[0], "%d %b %Y")
            row[4] = float(row[4])
            row[5] = float(row[5])
            other[0] = datetime.datetime.strptime(other[0], "%d %b %Y")
            other[4] = float(other[4])
            other[5] = float(other[5])

            dataframelist.append(row)
            dataframelist_other.append(other)

        df = pd.DataFrame(dataframelist,
                          columns=[
                              'date', 'positive', 'netrual', 'negative',
                              'high', 'low', 'diff', 'tweets', 'condition'
                          ])
        indexeddf = df.set_index(['date'])

        # drawCLuster(df[['positive','temperature']].values,"coast")
        # drawGMM(df[['positive','temperature']].values,"coast")

        df_other = pd.DataFrame(dataframelist_other,
                                columns=[
                                    'date', 'positive', 'netrual', 'negative',
                                    'high', 'low', 'diff', 'tweets',
                                    'condition'
                                ])
        indexeddf_other = df_other.set_index(['date'])

        dflookup_coast = pd.DataFrame(indexeddf,
                                      columns=[
                                          'positive', 'netrual', 'negative',
                                          'high', 'low', 'diff', 'tweets',
                                          'condition'
                                      ])

        newone = indexeddf.fillna(0)
        twitterinfo = ['positive', 'netrual', 'negative', 'tweets']
        weahterinfo = ['high', 'low', 'diff', 'condition']

        ##############################################################################
        # set parameters for DBSCAN
        n_neighbors = 5
        n_components = 2
        namelist = "hour_dimention_weahterinfo_" + citynameitem

        ##############################################################################
        # draw cluster according to Y and compareY after mds
        Y = manifold.Isomap(n_neighbors, n_components).fit_transform(
            newone[weahterinfo].values)
        compareY = manifold.Isomap(n_neighbors, n_components).fit_transform(
            newone[twitterinfo].values)
        drawwDBSCAN(Y, compareY, namelist)
Ejemplo n.º 15
0
 def tearDown(self):
     # Make sure we leave with a clean-slate
     db = couchquery.Database(URI)
     response = db.http.get('')
     if response.status == 200:
         couchquery.deletedb(db)
Ejemplo n.º 16
0
 def setUp(self):
     # Make sure we start with a clean-slate
     db = couchquery.Database(URI)
     response = db.http.get('')
     if response.status == 200:
         couchquery.deletedb(db)
Ejemplo n.º 17
0
def writeTempTweetcsv():
    TARGET_HTTP = '115.146.86.188'
    TARGET_DB = 'coastshop_hour'
    COUCHDB_LINK = 'http://' + TARGET_HTTP + ':5984/' + TARGET_DB

    #################################################################################
    # # to query the view for cities and weather
    db = couchquery.Database(COUCHDB_LINK)

    #################################################################################
    # # calculate the percentage of positive negative netrual for each day each city
    regionlist = ['coast', 'shop', 'other']
    for region in regionlist:
        rows = db.views.mapreduce.total_condi(key=region)
        rows_other = db.views.mapreduce.total_condi(key="other")
        dataframelist = []
        dataframelist_other = []

        #################################################################################
        # # convert date to day month year hour minute second
        for row, other in zip(rows, rows_other):
            # row[0] = time.strptime(row[0], "%d %m %Y %H %M %S")
            row[0] = datetime.datetime.strptime(row[0], "%d %m %Y %H %M %S")
            row[6] = float(row[6])
            row[7] = float(row[7])
            row[8] = float(row[8])
            row[9] = float(row[9])
            other[0] = datetime.datetime.strptime(other[0],
                                                  "%d %m %Y %H %M %S")
            other[6] = float(other[6])
            other[7] = float(other[7])
            other[8] = float(other[8])
            other[9] = float(other[9])
            dataframelist.append(row)
            dataframelist_other.append(other)

        ##############################################################################
        # withdraw corresponding data
        df = pd.DataFrame(dataframelist,
                          columns=[
                              'date', 'positive', 'netrual', 'negative',
                              'tweets', 'condition', 'humidity', 'pressure',
                              'temperature', 'windSpeed'
                          ])
        indexeddf = df.set_index(['date'])

        df_other = pd.DataFrame(dataframelist_other,
                                columns=[
                                    'date', 'positive', 'netrual', 'negative',
                                    'tweets', 'condition', 'humidity',
                                    'pressure', 'temperature', 'windSpeed'
                                ])
        indexeddf_other = df_other.set_index(['date'])

        ##############################################################################
        # draw each evariable pair cluster
        twitterinfo = ['positive', 'netrual', 'negative', 'tweets']
        weahterinfo = [
            'condition', 'humidity', 'pressure', 'temperature', 'windSpeed'
        ]
        for axisx in twitterinfo:
            for axisy in weahterinfo:
                namelist = axisx + "_" + axisy + "_" + region
                drawwDBSCAN(indexeddf[[axisx, axisy]].values, namelist)
Ejemplo n.º 18
0
def writeTempTweetcsv():
    TARGET_HTTP = '115.146.86.188'
    TARGET_DB = 'coastshop_hour'
    COUCHDB_LINK = 'http://' + TARGET_HTTP + ':5984/' + TARGET_DB

    #################################################################################
    # # to query the view for cities and weather
    db = couchquery.Database(COUCHDB_LINK)

    #################################################################################
    # # calculate the percentage of positive negative netrual for each day each city
    regionlist = ['coast', 'shop', 'other']
    for region in regionlist:
        rows = db.views.mapreduce.total_condi(key=region)
        rows_other = db.views.mapreduce.total_condi(key="other")
        dataframelist = []
        dataframelist_other = []

        #################################################################################
        # # convert date to day month year hour minute second
        for row, other in zip(rows, rows_other):
            # row[0] = time.strptime(row[0], "%d %m %Y %H %M %S")
            row[0] = datetime.datetime.strptime(row[0], "%d %m %Y %H %M %S")
            row[6] = float(row[6])
            row[7] = float(row[7])
            row[8] = float(row[8])
            row[9] = float(row[9])
            other[0] = datetime.datetime.strptime(other[0],
                                                  "%d %m %Y %H %M %S")
            other[6] = float(other[6])
            other[7] = float(other[7])
            other[8] = float(other[8])
            other[9] = float(other[9])
            dataframelist.append(row)
            dataframelist_other.append(other)

        ##############################################################################
        # withdraw corresponding data
        df = pd.DataFrame(dataframelist,
                          columns=[
                              'date', 'positive', 'netrual', 'negative',
                              'tweets', 'condition', 'humidity', 'pressure',
                              'temperature', 'windSpeed'
                          ])
        indexeddf = df.set_index(['date'])

        df_other = pd.DataFrame(dataframelist_other,
                                columns=[
                                    'date', 'positive', 'netrual', 'negative',
                                    'tweets', 'condition', 'humidity',
                                    'pressure', 'temperature', 'windSpeed'
                                ])
        indexeddf_other = df_other.set_index(['date'])

        twitterinfo = ['positive', 'netrual', 'negative', 'tweets']
        weahterinfo = [
            'condition', 'humidity', 'pressure', 'temperature', 'windSpeed'
        ]

        ##############################################################################
        # set parameters for DBSCAN
        n_neighbors = 10
        n_components = 2
        namelist = "hour_dimention_weahterinfo_" + region

        ##############################################################################
        # draw cluster according to Y and compareY after mds
        Y = manifold.Isomap(n_neighbors, n_components).fit_transform(
            indexeddf[weahterinfo].values)
        compareY = manifold.Isomap(n_neighbors, n_components).fit_transform(
            indexeddf[twitterinfo].values)
        drawwDBSCAN(Y, compareY, namelist)
Ejemplo n.º 19
0
        
    # if tc["branch_id"]["branch_id"] not in branch_map:
    #     print pk, 'is not in branch map.', tc["branch_id"]["branch_id"]
    #     return None
    
def pushterm(s):
    sys.stdout.write(s)
    sys.stdout.flush()    
    
if __name__ == "__main__":
    if sys.argv[-1].startswith("http"):
        uri = sys.argv[-1]
    else:
        uri = 'http://localhost:5984/tcm'
    print "Database is "+uri
    db = couchquery.Database(uri)
    db.sync_design_doc('tcm', tcm.design_doc)    
    
    check = db.views.tcm.casesByLitmus(descending=True, limit=1)
    if len(check) is not 0:
        print "Latest litmus test imported was "+str(check.keys()[0])
        startkey = check.keys()[0]
    else:
        print "Litmus pk query returned "+str(len(check))
    
    for key in range(startkey, endkey):
        if key not in skip:
            result = None
            while result is None:
                try:
                    result = get_testcase(key, db)
Ejemplo n.º 20
0
def getTinderboxIDfromDB():
    #TODO: make the db a global variable or parameter to function
    #    db = couchquery.Database("http://pythonesque.org:5984/fennec_test")
    #    db = couchquery.Database("http://*****:*****@happyhans.couch.io/logcompare")
    db = couchquery.Database("http://10.2.76.100:5984/fennec_alpha")
    return db.views.fennecResults.byTinderboxID()
Ejemplo n.º 21
0
def writeTempTweetcsv():
    TARGET_HTTP = '115.146.86.188'
    TARGET_DB = 'citysummary'
    COUCHDB_LINK = 'http://'+TARGET_HTTP+':5984/'+TARGET_DB

    #################################################################################
    # # to query the view for cities and weather
    db = couchquery.Database(COUCHDB_LINK)

    #################################################################################
    # # calculate the percentage of positive negative netrual for each day each city
    rows = db.views.mapreduce.emotionTemp(key="Melbourne")
    rows_other = db.views.mapreduce.emotionTemp(key="Sydney")
    dataframelist = []
    dataframelist_other = []

    #################################################################################
    # # convert date to day month year
    for row,other in zip(rows,rows_other):
        # row[0] = time.strptime(row[0], "%d %m %Y %H %M %S")
        row[0] = datetime.datetime.strptime(row[0], "%d %b %Y")
        row[4] =float(row[4])
        row[5] =float(row[5])
 
        other[0] = datetime.datetime.strptime(other[0], "%d %b %Y")
        other[4] =float(other[4])
        other[5] =float(other[5])
          
        dataframelist.append(row)
        dataframelist_other.append(other)


    df = pd.DataFrame(dataframelist,columns=['date','positive', 'netrual', 'negative','high','low','diff','tweets','condition'])
    indexeddf = df.set_index(['date'])

    df_other = pd.DataFrame(dataframelist_other,columns=['date','positive', 'netrual', 'negative','high','low','diff','tweets','condition'])
    indexeddf_other = df_other.set_index(['date'])

    matplotlib.style.use('ggplot')

    # dflookup = pd.DataFrame(indexeddf, columns = ['positive', 'netrual', 'negative','tweets','humidity','pressure','temperature','windSpeed'])
    dflookup_coast = pd.DataFrame(indexeddf, columns = ['positive', 'netrual', 'negative','high','low','diff','tweets'])

    dflookup_other = pd.DataFrame(indexeddf_other, columns = ['positive', 'netrual', 'negative','high','low','diff','tweets'])
    # print dflookup_other
    # coast_other = dflookup_coast[['positive', 'netrual', 'negative','tweets']].subtract(dflookup_other[['positive', 'netrual', 'negative','tweets']], axis=0)
    # coast_other[['high','low','diff']] = dflookup_other[['high','low','diff']]

    ############################################################
    # # correlation and convariance matrix   
    dflookup_coast.cov().to_csv("Sydney_day_cov.csv")
    dflookup_coast.corr().to_csv("Sydney_day_corr.csv")


    ################################################
    # # withdraw corresponding columns
    dflookup_coast_tweet = pd.DataFrame(indexeddf_other, columns = ['positive', 'netrual', 'negative','tweets'])
    dflookup_coast_temp = pd.DataFrame(indexeddf_other, columns = ['positive', 'netrual', 'negative','high','low','diff'])
    dflookup_coast_tweet.plot(secondary_y=['tweets'])
    dflookup_coast_temp.plot(secondary_y=['high','low','diff'])

    ###########################################################################
    # # scatter matrix
    scatter_matrix(dflookup_coast, alpha=0.2, figsize=(6, 6), diagonal='kde')

    plt.show()
Ejemplo n.º 22
0
def writeTempTweetcsv():
    TARGET_HTTP = '115.146.86.188'
    TARGET_DB = 'coastshop_hour'
    COUCHDB_LINK = 'http://' + TARGET_HTTP + ':5984/' + TARGET_DB

    #################################################################################
    # # to query the view for cities and weather
    db = couchquery.Database(COUCHDB_LINK)

    #################################################################################
    # # calculate the percentage of positive negative netrual for each day each city
    rows = db.views.mapreduce.totalsum(key="coast")
    rows_other = db.views.mapreduce.totalsum(key="other")
    dataframelist = []
    dataframelist_other = []

    #################################################################################
    # # convert date to day month year hour minute second
    for row, other in zip(rows, rows_other):
        # row[0] = time.strptime(row[0], "%d %m %Y %H %M %S")
        row[0] = datetime.datetime.strptime(row[0], "%d %m %Y %H %M %S")
        row[6] = float(row[6])
        row[7] = float(row[7])
        row[8] = float(row[8])
        row[9] = float(row[9])
        other[0] = datetime.datetime.strptime(other[0], "%d %m %Y %H %M %S")
        other[6] = float(other[6])
        other[7] = float(other[7])
        other[8] = float(other[8])
        other[9] = float(other[9])
        dataframelist.append(row)
        dataframelist_other.append(other)

    df = pd.DataFrame(dataframelist,
                      columns=[
                          'date', 'positive', 'netrual', 'negative', 'tweets',
                          'condition', 'humidity', 'pressure', 'temperature',
                          'windSpeed'
                      ])
    indexeddf = df.set_index(['date'])

    df_other = pd.DataFrame(dataframelist_other,
                            columns=[
                                'date', 'positive', 'netrual', 'negative',
                                'tweets', 'condition', 'humidity', 'pressure',
                                'temperature', 'windSpeed'
                            ])
    indexeddf_other = df_other.set_index(['date'])

    matplotlib.style.use('ggplot')

    #############################################################
    # # withdraw corresponding columns
    dflookup_coast = pd.DataFrame(indexeddf,
                                  columns=[
                                      'positive', 'netrual', 'negative',
                                      'tweets', 'humidity', 'pressure',
                                      'temperature', 'windSpeed'
                                  ])

    dflookup_other = pd.DataFrame(indexeddf_other,
                                  columns=[
                                      'positive', 'netrual', 'negative',
                                      'tweets', 'humidity', 'pressure',
                                      'temperature', 'windSpeed'
                                  ])
    # print dflookup_other
    coast_other = dflookup_coast[[
        'positive', 'netrual', 'negative', 'tweets'
    ]].subtract(dflookup_other[['positive', 'netrual', 'negative', 'tweets']],
                axis=0)
    coast_other[['humidity', 'pressure', 'temperature',
                 'windSpeed']] = dflookup_other[[
                     'humidity', 'pressure', 'temperature', 'windSpeed'
                 ]]

    ############################################################
    # # correlation and convariance matrix
    coast_other.cov().to_csv("coast_other_cov.csv")
    coast_other.corr().to_csv("coast_other_corr.csv")

    coast_other.plot()

    ###########################################################################
    # # scatter matrix
    scatter_matrix(coast_other, alpha=0.2, figsize=(6, 6), diagonal='kde')

    plt.show()
Ejemplo n.º 23
0
import sys
import couchquery
import re

rePrivateNetworks = re.compile(
    r'https?://(localhost|127\.0\.0\.1|192\.168\.[0-9]+\.[0-9]+|172\.16\.[0-9]+\.[0-9]+|10\.[0-9]+\.[0-9]+\.[0-9]+)'
)

db = couchquery.Database('http://localhost:5984/crashtest')

bad_lines = []
bad_results = []


def parse_line(line):
    try:
        # split the line into 16 variables. This will allow comments with
        # embedded tabs to be properly parsed.
        (
            signature,
            url,
            uuid_url,
            client_crash_date,
            date_processed,
            last_crash,
            product,
            version,
            build,
            branch,
            os_name,
            os_version,