Beispiel #1
0
def scanPassbyList(passbys):
    #  average any pairs of obs (obs that are very close in time)
    # gp.condenseIfDtLessThan = minimum time allowable between successive observations in sec
    cnt = 0
    sec2hr = 1 / (24 * 3600)
    for passby in passbys:
        #    print("passby=",passby)
        whale = passby[0]
        boats = passby[1]
        for boat in boats:  #  this is either a whale object or a boat object
            #      print("target=",target)
            priorJday = boat.jDay[0]
            idx99 = []
            for i in range(1, len(boat.jDay)):
                thisJday = boat.jDay[i]
                if abs(thisJday -
                       priorJday) < gp.condenseIfDtLessThan * sec2hr:
                    cnt += 1
                    print(cnt, "TrackID", boat.trackIDroberin, boat.trackID,
                          "delta t (sec) =",
                          int((thisJday - priorJday) / sec2hr),
                          helpers.getDate(priorJday),
                          helpers.getDate(thisJday))
                    # here we average the thisJday and priorJday obs and mark one's utm's as -99  i.e.  Not and good
                    boat.utmE[i] = (boat.utmE[i] + boat.utmE[i - 1]) / 2.0
                    boat.utmN[i] = (boat.utmN[i] + boat.utmN[i - 1]) / 2.0
                    boat.utmE[i - 1] = -99
                    boat.utmN[
                        i -
                        1] = -99  # temp markers  -- not used but for debugging
                    idx99.append(i - 1)
                priorJday = thisJday
            # now condense out the -99 records
            if idx99 != []:
                boat.jDay = np.delete(boat.jDay, idx99)
                boat.Xroberin = np.delete(boat.Xroberin, idx99)
                boat.Yroberin = np.delete(boat.Yroberin, idx99)
                boat.utmE = np.delete(boat.utmE, idx99)
                boat.utmN = np.delete(boat.utmN, idx99)
                boat.vE = np.delete(boat.vE, idx99)
                boat.vN = np.delete(boat.vN, idx99)
                boat.a = np.delete(boat.a, idx99)
                boat.v = np.delete(boat.v, idx99)
                boat.tortuosity = np.delete(boat.tortuosity, idx99)
                boat.latitude = np.delete(boat.latitude, idx99)
                boat.longitude = np.delete(boat.longitude, idx99)
                boat.Nobs = len(boat.jDay)
            boat.smoothKinematics()
Beispiel #2
0
def startNewAttendance():
    if request.method == 'POST':
        attendance = Attendance(attendanceCollection)

        data = request.get_json()
        courseCode = data['courseCode']

        date = helpers.getDate()

        attendanceExits = attendance.getAttendance(date, courseCode)
        attendanceExits = loads(attendanceExits)

        if (len(attendanceExits) > 0):
            res = jsonify('Attendace has already been taken for today')
            return res

        student = Students(studentCollection)
        students = student.getStudentsForParticleCourse(courseCode)

        stdAttendance = loads(students)
        for student in stdAttendance:
            student['present'] = False

        classAttendance = {'allStudents': stdAttendance, 'unknownStudents': []}
        obj = {
            'courseCode': courseCode,
            'date': date,
            'classAttendance': classAttendance
        }

        att = attendance.addNewAttendance(obj)

        res = jsonify('New Attendance started.')
        return res
Beispiel #3
0
def save_CVS_format(whalePassbyList, boatsPassbyList):
    #  write out tab delimited text file for all the whale data
    debug = 0
    whaleFile = open(gp.whaleCVSfileName, "w")
    header = "classtype\ttrackID\ttrackIDroberin\tsite\twhaleID\tage\tyear\tmonth\tday\thr\tminute\tsec\tjDay\twCalf\tactivityCode\tActivityState\tXroberin\tYroberin\tlatitude\tlongitude\tutmE\tutmN\tvE\tvN\tv\ta\ttortuosity\n"
    whaleFile.write(header)
    for w in whalePassbyList:
        fileline = "%s\t%d\t%d\t%s\t%s\t%d" % (
            w.classType, w.trackID, w.trackIDroberin, w.site, w.whaleID, w.age)
        for i in range(w.Nobs):
            theDate = helpers.getDate(w.jDay[i])
            fileline2 = "\t%d\t%d\t%d\t%d\t%d\t%d\t%0.7f\t%s\t%d\t%s\t%d\t%d\t%0.6f\t%0.6f\t%d\t%d" % (
                theDate[0], theDate[1], theDate[2], theDate[3], theDate[4],
                theDate[5], w.jDay[i], helpers.bytes2str(w.wCalf[i]),
                w.activityCode[i], helpers.bytes2str(
                    w.activityState[i]), w.Xroberin[i], w.Yroberin[i],
                w.latitude[i], w.longitude[i], w.utmE[i], w.utmN[i])
            fileline3 = "\t%0.3f\t%0.3f\t%0.3f\t%0.3f\t%0.3f\n" % (
                w.vE[i], w.vN[i], w.v[i], w.a[i], w.tortuosity[i])

            whaleFile.write(fileline + fileline2 + fileline3)
        if debug == 1:
            debug = 0
#      input("kkkk")
    whaleFile.close()

    boatFile = open(gp.boatCVSfileName, "w")
    header = "classtype\ttrackID\ttrackIDroberin\tsite\tboatID\tboatCode\tboatCodeDefinition\tJASCO_boatType\tyear\tmonth\tday\thr\tminute\tsec\tjDay\tXroberin\tYroberin\tlatitude\tlongitude\tutmE\tutmN\tvE\tvN\tv\ta\ttortuosity\\n"
    boatFile.write(header)
    for boats in boatsPassbyList:  # each entry is a LIST of boat observations in current passby
        for b in boats:
            fileline = "%s\t%d\t%d\t%s\t%s\t%s\t%s\t%s" % (
                b.classType, b.trackID, b.trackIDroberin, b.site, b.boatID,
                b.boatCode, b.boatDefinition, b.JASCOcode)
            for i in range(b.Nobs):
                theDate = helpers.getDate(b.jDay[i])

                fileline2 = "\t%d\t%d\t%d\t%d\t%d\t%d\t%0.7f\t%d\t%d\t%0.6f\t%0.6f\t%d\t%d" % (
                    theDate[0], theDate[1], theDate[2], theDate[3], theDate[4],
                    theDate[5], b.jDay[i], b.Xroberin[i], b.Yroberin[i],
                    b.latitude[i], b.longitude[i], b.utmE[i], b.utmN[i])
                fileline3 = "\t%0.3f\t%0.3f\t%0.3f\t%0.3f\t%0.3f\n" % (
                    b.vE[i], b.vN[i], b.v[i], b.a[i], b.tortuosity[i])
                boatFile.write(fileline + fileline2 + fileline3)
    boatFile.close()
Beispiel #4
0
def getUpdateAttendance():
    if request.method == 'POST':
        attendance = Attendance(attendanceCollection)

        data = request.get_json()
        studentId = data['studentId']
        courseCode = data['courseCode']
        attendanceId = data['attendanceId']

        date = helpers.getDate()
        res = attendance.updateOne(courseCode, attendanceId, studentId, date)
        resObj = loads(res)

        for student in resObj[0]['classAttendance']['allStudents']:
            student.pop('encodings')

        return dumps(resObj)
Beispiel #5
0
def predictWpositions(w):
  # use utms and observation time intervals to interpolate positions, speeds and tortuosity (tau)
  jDayStart = w.jDay[0]      #use whale's start and stop time to set up time axis
  tStartSecs = (jDayStart - int(jDayStart))*24*3600
  jDayStop = w.jDay[-1]
  tStopSecs  = (jDayStop  - int(jDayStop))*24*3600
  passbySecs = int(tStopSecs - tStartSecs)   # length of observation in seconds
  Npts = int(passbySecs/gp.modelDeltaSecs)
  
  day2sec = 24*3600
  idx = 0
  for i in range(Npts-1):
    tMod = w.jDay[0] + i * gp.modelDeltaSecs/day2sec     # setup time axis for model whale
    while tMod > w.jDay[idx+1] and idx < w.Nobs-2:
      idx += 1
    dt = tMod - w.jDay[idx]
    if w.jDay[idx+1] == w.jDay[idx]:
      print(w.trackID, w.jDay[idx], idx, i, tMod,len(w.jDay), helpers.getDate(w.jDay[idx]))
      input("Likely a row was repeated here by error ??mmm")
      frac = 0
    else:  
      frac = dt/(w.jDay[idx+1] - w.jDay[idx])
    dx = w.utmE[idx + 1] - w.utmE[idx]
    dy = w.utmN[idx + 1] - w.utmN[idx]
    w.tModSecs.append(tMod)
    w.xMod.append(w.utmE[idx] + frac * dx)
    w.yMod.append(w.utmN[idx] + frac * dy)

    if frac<= 0.5:
#      deltat = dt*frac
      arg = -(tMod - w.jDay[idx])/((w.jDay[idx+1] - w.jDay[idx])/gp.tortDamping)
      tortuosity = w.tortuosity[idx-1] * np.exp(arg)  # exponential damping of tortuosity interpolation
    else:
#      deltat = -dt*(1-frac)
      arg = -(w.jDay[idx+1] - tMod)/((w.jDay[idx+1] - w.jDay[idx])/gp.tortDamping)
      tortuosity =  w.tortuosity[idx]*np.exp(arg)
    w.tauMod.append(tortuosity)
    
    #  label as long dive or not  --  project forward the value of w.dive from the beginning of this obs interval (idx)
    w.deepdive.append(w.dive[idx])
    w.vxMod.append(w.vE[idx])
    w.vyMod.append(w.vN[idx])
    w.vMod.append(w.v[idx])
    w.aMod.append(w.a[idx])
Beispiel #6
0
def findFaces():
    if request.method == 'POST':
        attendance = Attendance(attendanceCollection)

        data = request.get_json()

        # img_data = data['image']
        courseCode = data['courseCode']

        path = f'api/currentFrame/imageFrame'
        # helpers.base64toImg(img_data, path)

        helpers.single_frame(path)
        # print(d)

        date = helpers.getDate()  # Date of today
        currentClassAttendace = attendance.getAttendance(date, courseCode)
        currentClassAttendanceObj = loads(currentClassAttendace)

        markedAttendace = helpers.findFaces(path, currentClassAttendanceObj)
        attendance.updateStudentAttendance(currentClassAttendanceObj[0]['_id'],
                                           markedAttendace)

        if os.path.exists(f'{path}.jpg'):
            os.remove(f'{path}.jpg')
        else:
            print("The file does not exist")

        completeAttendace = attendance.getAttendanceById(
            currentClassAttendanceObj[0]['_id'])
        completeAttendaceObj = loads(completeAttendace)

        for student in completeAttendaceObj['classAttendance']['allStudents']:
            student.pop('encodings')

        return dumps(completeAttendaceObj)
def process_request(request, page, num_per_page):
	
	# Process the form query
	wheres = []
	ewheres = []
	wheres_args = []
	ewheres_args = []
			
	post = request.GET
	if post.get('issue_id'):
		ids = post['issue_id'].split()
		id_n_sql = []
		id_e_sql = []
		for word in ids:
			
			# TODO: Be able to use >, <, >=, <=
			op = '='			
			
			if word.isdigit():
				id_n_sql.append('n.issue=%s')
				id_e_sql.append('e.issue=%s')
				wheres_args.append(word)
				ewheres_args.append(word)
		wheres.append(' OR '.join(id_n_sql))
		ewheres.append(' OR '.join(id_e_sql))
		
	if post.get('change_date_start'):
		cds = helpers.getDate(post['change_date_start'])
		wheres.append('n.entry_date >= %s')
		wheres_args.append(cds.strftime('%Y-%m-%d 00:00'))
		ewheres.append('e.add_date >= %s')
		ewheres_args.append(cds.strftime('%Y-%m-%d 00:00'))
		
	if post.get('change_date_end'):
		from datetime import timedelta
		cde = helpers.getDate(post['change_date_end']) + timedelta(days=1)
		wheres.append('n.entry_date < %s')
		wheres_args.append(cde.strftime('%Y-%m-%d 00:00'))
		ewheres.append('e.add_date < %s')
		ewheres_args.append(cde.strftime('%Y-%m-%d 00:00'))
		
	if post.get('issue_type'):
		it = get_object_or_404(IssueType, pk=post['issue_type'])
		wheres.append('n.note ILIKE %s')
		wheres_args.append('%%type%%to %%%s%%' % it.type)
		
	if post.get('issue_disposition'):
		id = get_object_or_404(IssueDisposition, pk=post['issue_disposition'])
		wheres.append('n.note ILIKE %s')
		wheres_args.append('%%disposition%%to %%%s%%' % id.disposition)
		
	if post.get('project'):
		p = get_object_or_404(IssueProject, pk=post['project'])
		wheres.append('n.note ILIKE %s')
		wheres_args.append('%%project%%to %%%s%%' % p.name)
				
	rtr = post.get('related_to_relation')
	if rtr and rtr.isdigit():		
		rt = get_object_or_404(Person, pk=rtr)
		wheres.append('n.note ILIKE %s')
		ewheres.append('(r.reply_type=1 AND ea.person=%s)')
		rt_name = '%s %s' % (rt.first_name, rt.last_name)
		wheres_args.append('%%reported by%%to %%%s%%' % rt_name)
		ewheres_args.append(int(rtr))
		
	total_issues = 0
	issues = []
	if post.get('page'):
		# Only show issues after a search has been done. This will help
		# with the page's initial load time.
		issues, total_issues = get_historicalized_notes_and_emails(wheres, ewheres,
				wheres_args, ewheres_args, page, num_per_page)
				
	return issues, total_issues