Ejemplo n.º 1
0
def settable(*args):
    if args:
        cache.update({ "table" : args[0] })
    with impalaopen(":".join(settings.IMPALA)) as curr:
        query = "select level, count(distinct source) from " + database + "." + args[0]  + "_good_graph group by level order by level desc limit 50;"
        curr.execute(query)
        graph_stat_string = ""
        num_levels = 2
        i = 0
        for line in curr:
            (level,nodes) = line
            if i == 0:
                num_levels = int(level)
            i = i + 1
            graph_stat_string = graph_stat_string + "Level: " + str(level) + ", " + str(nodes) + " nodes "
        
        query = "select min(dt), max(dt), min(cast(intersectx as double)), max(cast(intersectx as double)), min(cast(intersecty as double)), max(cast(intersecty as double)) from " + cache.get().get("database", "") + "." + cache.get().get("table", "") + "_tracks_comms_joined where track_id != 'ship(1.0)' and track_id != 'ais(3.0)'"
        curr.execute(query)
        for line in curr:
            (mindt,maxdt,minlat,maxlat,minlon,maxlon) = line
            cache.update({ "mindt" : mindt,
                           "maxdt" : maxdt,
                           "minlat" : minlat,
                           "minlon" : minlon,
                           "maxlat" : maxlat,
                           "maxlon" : maxlon })

        cache.update({ "graph_stat_string" : graph_stat_string + "" ,
                       "graph_num_levels" : num_levels,
                       "level" : str(num_levels), 
                       "community" : '-' })
    return "0"
Ejemplo n.º 2
0
def settable(*args):
    if args:
        cache.update({ "table" : args[0] })
    with impalaopen(":".join(settings.IMPALA)) as client:
        data = client.execute("select level, count(distinct source) from " + args[0]  + "_good_graph group by level order by level desc limit 50")
        data_result = data.get_data()
        graph_stat_string = ""
        num_levels = 2
        i = 0
        for line in data_result.split('\n'):
            level,nodes, = line.strip().split('\t')
            if i == 0:
                num_levels = int(level)
            i = i + 1
            graph_stat_string = graph_stat_string + "Level: " + level + ", " + nodes + " nodes "
        
        data = client.execute("select min(dt), max(dt), min(cast(intersectx as double)), max(cast(intersectx as double)), min(cast(intersecty as double)), max(cast(intersecty as double)) from " + cache.get().get("table", "") + "_tracks_comms_joined where track_id != 'ship(1.0)' and track_id != 'ais(3.0)'")
        for line in data.get_data().split('\n'):
            mindt,maxdt,minlat,maxlat,minlon,maxlon, = line.strip().split('\t')
            cache.update({ "mindt" : mindt,
                           "maxdt" : maxdt,
                           "minlat" : minlat,
                           "minlon" : minlon,
                           "maxlat" : maxlat,
                           "maxlon" : maxlon })

        cache.update({ "graph_stat_string" : graph_stat_string + "" ,
                       "graph_num_levels" : num_levels,
                       "level" : str(num_levels), 
                       "community" : '-' })
    return "0"
Ejemplo n.º 3
0
def setcomm(*args):
    if not args:
        return tangelo.HTTPStatusCode(400, "No community name")
   
    c = cache.update({ "community" : args[0], 
                       "level" : args[1] })

    comm = c["community"]
    level = c["level"]
    table = c["table"] + "_tracks_comms_joined"
    rows = []
    with impalaopen(":".join(settings.IMPALA)) as curr:
        query = "select count(*) from " + database + "." + table + " where comm_" + str(level) + " = '" + comm + "'"
        curr.execute(query)
        count = curr.fetchall()[0] 
        print count[0]
        query = "select intersectx, intersecty, dt, track_id from " + database + "." + table + " where comm_" + str(level) + " = '" + comm + "' order by track_id, dt asc limit " + str(count[0]) 
        curr.execute(query)
        rows = curr.fetchall()

    whens = {}
    coords = {}
    for i in rows:
        (latitude, longitude, dt, track) = i
        if whens.get(track) == None:
            whens[track] = "<when>" + dt.split('.')[0].replace(' ','T') + "</when>\n"
        else:
            whens[track] = whens[track] + "<when>" + dt.split('.')[0].replace(' ','T') + "</when>\n"

        if coords.get(track) == None:
            coords[track] = "<gx:coord>" + longitude + " " + latitude + " 0</gx:coord>\n"
        else:
            coords[track] = coords[track] + "<gx:coord>" + longitude + " " + latitude + " 0</gx:coord>\n"
    document = "\
<?xml version=\"1.0\" encoding=\"UTF-8\"?> \
<kml xmlns=\"http://www.opengis.net/kml/2.2\" xmlns:gx=\"http://www.google.com/kml/ext/2.2\"><Document>"
    
    for key in whens.keys():
        r = lambda: random.randint(0,255)
        color = '#FF' + ('%02X%02X%02X' % (r(),r(),r()))
        document = document + "<Style id=\"" + key.replace(' ','_') + "\"><IconStyle><color>" + color + "</color><Icon><href>http://maps.google.com/mapfiles/kml/shapes/placemark_circle.png</href></Icon></IconStyle><LabelStyle><scale>0</scale></LabelStyle><LineStyle><color>" + color + "</color><width>3</width></LineStyle></Style>"
        document = document + "<Placemark><name>"+key.replace(' ','_')+"</name><styleUrl>#" + key.replace(' ','_') + "</styleUrl><gx:Track><altitudeMode>relativeToGround</altitudeMode><extrude>1</extrude>"
        document = document + whens[key] + coords[key] + "</gx:Track></Placemark>"
    document = document + "</Document></kml>"
    spit(cherrypy.config.get("webroot") + "/session/output.kml", document, True)
    return "0"
Ejemplo n.º 4
0
 def _add(self, file, version):
     if not cache.update(CCFile(file, version)):
         return
     if [e for e in cfg.getExclude() if fnmatch(file, e)]:
         return
     toFile = path(join(GIT_DIR, file))
     mkdirs(toFile)
     removeFile(toFile)
     try:
         cc_exec(['get','-to', toFile, cc_file(file, version)])
     except:
         if len(file) < 200:
             raise
         debug("Ignoring %s as it may be related to https://github.com/charleso/git-cc/issues/9" % file)
     if not exists(toFile):
         git_exec(['checkout', 'HEAD', toFile])
     else:
         os.chmod(toFile, os.stat(toFile).st_mode | stat.S_IWRITE)
     git_exec(['add', '-f', file], errors=False)
Ejemplo n.º 5
0
 def _add(self, file, version):
     if not cache.update(CCFile(file, version)):
         return
     if [e for e in cfg.getExclude() if fnmatch(file, e)]:
         return
     toFile = path(join(GIT_DIR, file))
     mkdirs(toFile)
     removeFile(toFile)
     try:
         cc_exec(['get', '-to', toFile, cc_file(file, version)])
     except:
         if len(file) < 200:
             raise
         debug(
             "Ignoring %s as it may be related to https://github.com/charleso/git-cc/issues/9"
             % file)
     if not exists(toFile):
         git_exec(['checkout', 'HEAD', toFile])
     else:
         os.chmod(toFile, os.stat(toFile).st_mode | stat.S_IWRITE)
     git_exec(['add', '-f', file], errors=False)
def settable(*args):
    if args:
        cache.update({"table": args[0]})
    with impalaopen(":".join(settings.IMPALA)) as curr:
        query = "select level, count(distinct source) from " + database + "." + args[
            0] + "_good_graph group by level order by level desc limit 50;"
        curr.execute(query)
        graph_stat_string = ""
        num_levels = 2
        i = 0
        for line in curr:
            (level, nodes) = line
            if i == 0:
                num_levels = int(level)
            i = i + 1
            graph_stat_string = graph_stat_string + "Level: " + str(
                level) + ", " + str(nodes) + " nodes "

        query = "select min(dt), max(dt), min(cast(intersectx as double)), max(cast(intersectx as double)), min(cast(intersecty as double)), max(cast(intersecty as double)) from " + cache.get(
        ).get("database", "") + "." + cache.get().get(
            "table", ""
        ) + "_tracks_comms_joined where track_id != 'ship(1.0)' and track_id != 'ais(3.0)'"
        curr.execute(query)
        for line in curr:
            (mindt, maxdt, minlat, maxlat, minlon, maxlon) = line
            cache.update({
                "mindt": mindt,
                "maxdt": maxdt,
                "minlat": minlat,
                "minlon": minlon,
                "maxlat": maxlat,
                "maxlon": maxlon
            })

        cache.update({
            "graph_stat_string": graph_stat_string + "",
            "graph_num_levels": num_levels,
            "level": str(num_levels),
            "community": '-'
        })
    return "0"
Ejemplo n.º 7
0
def run(database="default", table="", host=settings.IMPALA[0], port=settings.IMPALA[1], trackId=None, comm=None, lev=None, minlat=None, maxlat=None, minlon=None, maxlon=None, mintime=None, maxtime=None):
    if mintime != None or minlat != None:
        geo = {"min_lat":minlat,"max_lat":maxlat,"min_lon":minlon,"max_lon":maxlon}
        time = {"min_time":mintime,"max_time":maxtime}
        cache.update({ "level" : cache.get().get("graph_num_levels","") })
        return geoTimeQuery(comm,lev,host,port,geo,time)
    if comm == None:
        cache.update({ "level" : lev })
        return getWholeGephiGraph(comm,lev,host,port)
    response = {}
    table = cache.get().get("table", "") + "_tracks_comms_joined"
    query = "select * from %s" % (table)
        
    if trackId != None:
        query = query + " where track_id = %s" % (trackId)
    elif comm != None and lev != None:
        query = query + " where comm_" + str(lev.replace('"','')) + " = %s" % (comm)
    else:
        return response
        
    query = query + " order by track_id, dt limit 10000000"

    results = None
    with impalaopen(host + ':' + port) as client:
        qResults = client.execute(query)
        results = convert_results(qResults, "true")


    nodemap = {}
    bounds = { "north": -1,
               "south": -1,
               "east": -1,
               "west": -1
    }
    start = -1
    end = -1
    geoResults = []
    trackIndex = 0
        
    #convert table results into LineStrings
    for record in results:
        currentTrack = {}
    
        recordx = float(record["intersecty"])
        recordy = float(record["intersectx"])
            
        found = False
        for track in geoResults:
            if track["track_id"] == record["track_id"]:
                currentTrack = track
                found = True
                break
                
        if found == False:
            currentTrack = { "type": "LineString",
                             "track_id": record["track_id"],
                             "comm": record["comm_" + str(lev.replace('"',''))],
                             "index": trackIndex,
                             "coordinates": [],
                             "timestamps": []
            }
            nodemap[record["track_id"]] = trackIndex
            trackIndex = trackIndex + 1
            geoResults.append(currentTrack)
                
        coords = [ recordx, recordy ]
        currentTrack["coordinates"].append(coords)
        currentTrack["timestamps"].append(record["dt"])
            
        if bounds["north"] == -1 or bounds["north"] < recordy:
            bounds["north"] = float(recordy)
                
        if bounds["south"] == -1 or bounds["south"] > recordy:
            bounds["south"] = float(recordy)
                
        if bounds["east"] == -1 or bounds["east"] < recordx:
            bounds["east"] = float(recordx)
                
        if bounds["west"] == -1 or bounds["west"] > recordx:
            bounds["west"] = float(recordx)
            
        if start == -1 or record["dt"] < start:
            start = record["dt"]
                
        if end == -1 or record["dt"] > end:
            end = record["dt"]
        
    response["result"] = geoResults
    response["bounds"] = bounds
    response["start"] = start
    response["end"] = end
    edges = linkages(comm,lev,nodemap,host,port)
    gephinodes, gephigraph = subgraph(comm,lev,host,port)
    response["graph"] = edges
    response["gephinodes"] = gephinodes
    response["gephigraph"] = gephigraph

    cache.update({ "level" : lev })
    return response
Ejemplo n.º 8
0
                    comment_author,
                    diff_author,
                    colorama.Fore.CYAN,
                    comment_text,
                )
            counts[comment_author] += 1
            if args.just_tally:
                sys.stdout.write('.')
                sys.stdout.flush()
    if args.just_tally:
        print ''
    print '=== Counts ==='
    for key, value in counts.items():
        print '%s: %s' % (key, value)
    print 'Total: %s comments on %s diffs' % (sum(counts.values()), total_diffs)


if __name__ == '__main__':
    colorama.init(autoreset=True)
    cache.load()
    parser = argparse.ArgumentParser(prog='differential-comments')
    parser.add_argument('--team', help='Which team from settings to use',
        **kwargs_or_default(settings.DEFAULT_TEAM))
    parser.add_argument('--days', help='How many days back to go', default=30)
    parser.add_argument('--comment-days', help='How many days back to go for the comments')
    parser.add_argument('--just-tally', help='Just print the final tally', action='store_true')
    parser.add_argument('--just-email', help='Just one user by email address')
    args = parser.parse_args()
    list(args)
    cache.update()
def post(*pargs, **kwargs):
    body = json.loads(cherrypy.request.body.read())
    cache.update(body)
    path = '.'.join(pargs)
    return "0"
Ejemplo n.º 10
0
def setcomm(*args):
    if not args:
        return tangelo.HTTPStatusCode(400, "No community name")

    c = cache.update({"community": args[0], "level": args[1]})

    comm = c["community"]
    level = c["level"]
    table = c["table"] + "_tracks_comms_joined"
    rows = []
    with impalaopen(":".join(settings.IMPALA)) as curr:
        query = "select count(*) from " + database + "." + table + " where comm_" + str(
            level) + " = '" + comm + "'"
        curr.execute(query)
        count = curr.fetchall()[0]
        print count[0]
        query = "select intersectx, intersecty, dt, track_id from " + database + "." + table + " where comm_" + str(
            level
        ) + " = '" + comm + "' order by track_id, dt asc limit " + str(
            count[0])
        curr.execute(query)
        rows = curr.fetchall()

    whens = {}
    coords = {}
    for i in rows:
        (latitude, longitude, dt, track) = i
        if whens.get(track) == None:
            whens[track] = "<when>" + dt.split('.')[0].replace(
                ' ', 'T') + "</when>\n"
        else:
            whens[track] = whens[track] + "<when>" + dt.split('.')[0].replace(
                ' ', 'T') + "</when>\n"

        if coords.get(track) == None:
            coords[
                track] = "<gx:coord>" + longitude + " " + latitude + " 0</gx:coord>\n"
        else:
            coords[track] = coords[
                track] + "<gx:coord>" + longitude + " " + latitude + " 0</gx:coord>\n"
    document = "\
<?xml version=\"1.0\" encoding=\"UTF-8\"?> \
<kml xmlns=\"http://www.opengis.net/kml/2.2\" xmlns:gx=\"http://www.google.com/kml/ext/2.2\"><Document>"

    for key in whens.keys():
        r = lambda: random.randint(0, 255)
        color = '#FF' + ('%02X%02X%02X' % (r(), r(), r()))
        document = document + "<Style id=\"" + key.replace(
            ' ', '_'
        ) + "\"><IconStyle><color>" + color + "</color><Icon><href>http://maps.google.com/mapfiles/kml/shapes/placemark_circle.png</href></Icon></IconStyle><LabelStyle><scale>0</scale></LabelStyle><LineStyle><color>" + color + "</color><width>3</width></LineStyle></Style>"
        document = document + "<Placemark><name>" + key.replace(
            ' ', '_'
        ) + "</name><styleUrl>#" + key.replace(
            ' ', '_'
        ) + "</styleUrl><gx:Track><altitudeMode>relativeToGround</altitudeMode><extrude>1</extrude>"
        document = document + whens[key] + coords[
            key] + "</gx:Track></Placemark>"
    document = document + "</Document></kml>"
    spit(
        cherrypy.config.get("webroot") + "/session/output.kml", document, True)
    return "0"
Ejemplo n.º 11
0
def post(*pargs, **kwargs):
    body = json.loads(cherrypy.request.body.read())
    cache.update(body)
    path = '.'.join(pargs)
    return "0"
Ejemplo n.º 12
0
def run(database="default", table="", host=settings.IMPALA[0], port=settings.IMPALA[1], trackId=None, comm=None, lev=None, minlat=None, maxlat=None, minlon=None, maxlon=None, mintime=None, maxtime=None):
    if mintime != None or minlat != None:
        geo = {"min_lat":minlat,"max_lat":maxlat,"min_lon":minlon,"max_lon":maxlon}
        time = {"min_time":mintime,"max_time":maxtime}
        cache.update({ "level" : cache.get().get("graph_num_levels","") })
        return geoTimeQuery(comm,lev,host,port,geo,time)
    if comm == None:
        cache.update({ "level" : lev })
        return getWholeGephiGraph(comm,lev,host,port)

    response = {}
    database = cache.get().get("database", "")
    table = cache.get().get("table", "") + "_tracks_comms_joined"
    query = "select * from " + database + "." + table
        
    if trackId != None:
        query = query + " where track_id = %s" % (trackId)
    elif comm != None and lev != None:
        query = query + " where comm_" + str(lev.replace('"','')) + " = %s" % (comm)
    else:
        return response
        
    query = query + " order by track_id, dt limit 10000000"

    results = None
    with impalaopen(host + ':' + port) as curr:
        curr.execute(query)
        results = convert_results(curr, "true")

    nodemap = {}
    bounds = { "north": -1,
               "south": -1,
               "east": -1,
               "west": -1
    }
    start = -1
    end = -1
    geoResults = []
    trackIndex = 0
        
    #convert table results into LineStrings
    for record in results:
        currentTrack = {}
    
        if record["intersectx"] == "null" or record["intersecty"] == "null":
            continue
        recordx = float(record["intersecty"])
        recordy = float(record["intersectx"])
            
        found = False
        for track in geoResults:
            if track["track_id"] == record["track_id"]:
                currentTrack = track
                found = True
                break
                
        if found == False:
            currentTrack = { "type": "LineString",
                             "track_id": record["track_id"],
                             "comm": record["comm_" + str(lev.replace('"',''))],
                             "index": trackIndex,
                             "coordinates": [],
                             "timestamps": []
            }
            nodemap[record["track_id"]] = trackIndex
            trackIndex = trackIndex + 1
            geoResults.append(currentTrack)
                
        coords = [ recordx, recordy ]
        currentTrack["coordinates"].append(coords)
        currentTrack["timestamps"].append(record["dt"])
            
        if bounds["north"] == -1 or bounds["north"] < recordy:
            bounds["north"] = float(recordy)
                
        if bounds["south"] == -1 or bounds["south"] > recordy:
            bounds["south"] = float(recordy)
                
        if bounds["east"] == -1 or bounds["east"] < recordx:
            bounds["east"] = float(recordx)
                
        if bounds["west"] == -1 or bounds["west"] > recordx:
            bounds["west"] = float(recordx)
            
        if start == -1 or record["dt"] < start:
            start = record["dt"]
                
        if end == -1 or record["dt"] > end:
            end = record["dt"]
        
    response["result"] = geoResults
    response["bounds"] = bounds
    response["start"] = start
    response["end"] = end
    edges = linkages(comm,lev,nodemap,host,port)
    gephinodes, gephigraph = subgraph(comm,lev,host,port)
    response["graph"] = edges
    response["gephinodes"] = gephinodes
    response["gephigraph"] = gephigraph

    cache.update({ "level" : lev })
    return response
Ejemplo n.º 13
0
    def POST(self):
        print '\nSomeone submitted a Query...'
        form = self.my_form()
        form.validates()
        print 'Form : ',form

        try:
            sz = form.value['data_tx']
            x = str(sz)
            print 'X : ',x
            # s = web.input(xtables = [])
            # dval = str(s.xtables)

            typeX = x.split('^')[0]

            if typeX == '0':
                print 'Type : ',typeX,' Saving'
                username = x.split('^')[1].split('|')[0][0:-2]
                [dval, q] = x.split('^')[1].split('|')[1].split('~')

                uid = str(getUserIndex(username))

                print "Loading Cache..."
                cache = loadCache(vpath+uid+'/cache.pickle')
                print "Saving Cache..."
                saveCache(cache,vpath+uid+'/cache.pickle')
                print "Cache Saved!"
                return json.dumps("Data Saved, "+username+"!")

            elif typeX == '1':
                print 'Type : ',typeX,' Query'
                username = x.split('^')[1].split('|')[0][0:-2]
                [dval, q] = x.split('^')[1].split('|')[1].split('~')

                print 'Username : '******'Dropdown : ', dval
                print 'UserQuery : ', q

                onto_file = vpath+str(getUserIndex(username))+'/'+dval
                print 'onto_file_path : ',onto_file

                uid = str(getUserIndex(username))
                cache = loadCache(vpath+uid+'/cache.pickle')
                saveCache(cache,vpath+uid+'/cache.pickle')

                answer = getresult(cache,onto_file+q)

                if answer == None:
                    print "\n******Answer None!!!"
                    js = myQuery(q, onto_file)
                else:
                    print "\n******Answer NOTNone!!!"
                    js = answer
                print '\nAnswer : ',js

                cacheU = update(cache, onto_file+q, js)
                saveCache(cacheU, vpath+uid+'/cache.pickle')
                return json.dumps("Selected File : "+dval+"\nAnswer to Query : "+js)

            # elif typeX == '2':
            #     print 'Type : ',typeX,' Upload'
            #     username = x.split('^')[1].split('|')[0][0:-2]
            #
            #
            #     x = web.input(myfile={})
            #     print "\n\n^^^^^^^^^^^^^X : ",x
            #     filedir = '/Users/hellosaumil/Desktop/UF'                 # change this to the directory you want to store the file in.
            #     if 'myfile' in x:                                         # to check if the file-object is created
            #
            #         web.debug(x['myfile'].filename) # This is the filename
            #         web.debug(x['myfile'].value) # This is the file contents
            #
            #         filepath=x.myfile.filename.replace('\\','/')          # replaces the windows-style slashes with linux ones.
            #         print "\nFilename1 : ",filepath
            #
            #         filename=filepath.split('/')[-1]                      # splits the and chooses the last part (the filename with extension)
            #         print "\nFilename2 : ",filename
            #         fout = open(filedir +'/'+ filename,'w+')               # creates the file where the uploaded file should be stored
            #         fout.write(x.myfile.file.read())                      # writes the uploaded file to the newly created file.
            #         fout.close()                                          # closes the file, upload complete.
            #
            #     return json.dumps("File(s) Uploaded Successfully, "+username+"!")
            else:
                return json.dumps("Invalid Request, "+username+"!")

        except Exception as e:
            print "******** Exception : ", e

            typeX = '2'
            print 'Type : ',typeX,' Upload'
            # username = x.split('^')[1].split('|')[0][0:-2]


            x = web.input(myfile={})
            print "\n\n^^^^^^^^^^^^^X : ",x
            # filedir = '/Users/hellosaumil/Desktop/UF'                 # change this to the directory you want to store the file in.
            if 'myfile' in x:                                         # to check if the file-object is created


                print "File Contents : ", x['myfile'].value # This is the file contents
                # uid = x['user'].split(':')[1].split('/')[1][1:]
                uid = x['user'].split(':')[1].split('/')[2][1:]
                username = x['user'].split(':')[0].title()

                print "UID : "+uid+" - Username : "******"/uploads"
                print "\nFile-Dir : ",filedir

                filepath=x.myfile.filename.replace('\\','/')          # replaces the windows-style slashes with linux ones.
                print "\nFile-Path : ",filepath

                filename=filepath.split('/')[-1]                      # splits the and chooses the last part (the filename with extension)
                print "File-Name : ",filename
                fout = open(filedir +'/'+ filename,'w+')               # creates the file where the uploaded file should be stored
                fout.write(x.myfile.file.read())                      # writes the uploaded file to the newly created file.
                fout.close()                                          # closes the file, upload complete.

                print "File Uploaded Successfully at "+vpath+uid+", "+username+"!"

                root, dirs, files = os.walk(vpath+uid).next()
                print '\n\nPath $---',root
                print '\nDirs $---',dirs
                print '\nFiles $---',files

                for dirx in dirs:
                    rootd, dirsd, filesd = os.walk(vpath+uid+'/'+dirx).next()
                    print '\n\n\tPathd $x--',rootd
                    print '\n\tDirsd $x--',dirsd
                    print '\n\tFilesd $x--',filesd

                    files = files + ['*UPS*']+ filesd

                vxpath = 'static/Cloud/U' + uid
                filex = '^'.join(files)

                raise web.redirect('/pf?user=%s:%s:%s' % (username, vxpath, filex))