Esempio n. 1
0
def ANMIvrForOverdueServices_Exotel(mode=1, target=1):
    #mode = 1 Due
    #mode = 2 OverDue
    #mode = 3 Both
    if target == 1:
        benef_type = ANCBenef
    elif target == 2:
        benef_type = IMMBenef

    timezone = 'Asia/Kolkata'
    tz = pytz.timezone(timezone)
    today = utcnow_aware().replace(tzinfo=tz)
    date_then = today.replace(hour=12, minute=0, day=1, second=0).date()
    from sms.sender import SendVoiceCall

    #Voice call for ANC
    if mode == 1:
        benefs = benef_type.objects.filter(due_events__date=date_then).distinct()
    elif mode ==2:
        benefs = benef_type.objects.filter(odue_events__date=date_then).distinct()
    else:
        abenefs = benef_type.objects.filter(odue_events__date=date_then).distinct() & ANCBenef.objects.filter(due_events__date=date_then).distinct()
    anms = CareProvider.objects.filter(beneficiaries__in=benefs).distinct()
    
    callerid = ""
    app_url = settings.EXOTEL_APP_URL
    for anm in amns:
        try:
            connect_customer_to_app(customer_no=due_anms.notify_number, callerid=callerid, app_url=app_url)
        except:
            sys.exec_info()[0]
Esempio n. 2
0
File: server.py Progetto: mplang/p2p
def server(listen_port):
    try:
        r.start_server(listen_port)
        stdin_thread = threading.Thread(target=stdin_listener, args=())
        stdin_thread.start()
        while True:
            # Server loop
            curr_time = time.time()
            for activity in activity_tracker:
                if curr_time - activity_tracker[activity] > 3600:
                    # Clean database of hosts with activity more than one hour ago.
                    with stdout_lock:
                        print("Remove host from database: {}".format(activity[0]))
                    database_remove_host(activity[0])
            try:
                message = r.receive_data()
                process_message(message)
            except queue.Empty:
                continue
            except:
                with stdout_lock:
                    print("Unexpected error:", sys.exec_info()[0])
    except KeyboardInterrupt:
        with stdout_lock:
            print("Goodbye!")
    except:
        with stdout_lock:
            print("Unexpected error:", sys.exec_info()[0])
    finally:
        r.close()
        os._exit(1)
def getAll(IP=''):
    global myCursor, myConnection
    opType = "GET_ALL"
    allData = []
    count = 0
    retFlag = -1
    if not myCursor:
        startConnection()
    value = ''
    try:
        sqlSmt = "SELECT * FROM " + DBTABLE
        #msg(0, opType, "SQL: " + sqlSmt, IP)
        for row in myCursor.execute(sqlSmt):
            allData.append((row[0], row[1]))
            count = count + 1
        msg(0, opType, str(count) + " row(s) fetched", IP)
        if count == 0:
            retFlag = 1
        else:
            retFlag = 0
    except:
        retFlag = -1
        print sys.exec_info()
        msg(1, opType, "getting all key-value pairs failed!")
    return retFlag, count, allData
Esempio n. 4
0
 def reboot(self, id):
     """Reboot the instance."""
     dbaas = common.get_client()
     try:
         result = dbaas.management.reboot(id)
     except:
         print sys.exec_info()[1]
Esempio n. 5
0
def receieData(s):
	data = ""
	try:
		data = s.recvfrom(65565)
	except timeout:
		data = ""
	except:
		print("an error occured")
		sys.exec_info()
	return data[0]
Esempio n. 6
0
 def transfer(self, filename, servername, remotedir, userinfo):
     try:
         self.do_transfer(filename, servername, remotedir, userinfo)
         print("%s of %s successful" % (self.mode, filename))
     except:
         print("%s of %s has failed" % (self.mode, filename), end=" ")
         print(sys.exec_info()[0], sys.exec_info()[1])
     self.mutex.acquire()
     self.threads -= 1
     self.mutex.release()
def ReadWebContent(UrlLink):

    try:

        ContentObj = urllib2.urlopen(UrlLink, timeout=120)

        WebContent = ContentObj.read()
    except:

        print 'Unable to open ' + UrlLink
        print sys.exec_info()
        WebContent = ''

    return WebContent
Esempio n. 8
0
 def sendSuccess(self, resp, command, data, prepend=None):
     logger.debug("SUCCESS! "+command+":"+data)
     #logger.debug("response: '%s'" % (resp,))
     if prepend:
         w = "%s:%s %s:%s\r\n" % (prepend, command, fencode(resp), data)
     else:
         w = "%s:%s:%s\r\n" % (command, fencode(resp), data)
     self.transport.write(w)
     self.commands[command][CONCURR] -= 1
     try:
         self.serviceQueue(command)
     except:
         print sys.exec_info()
     return resp
Esempio n. 9
0
def convertvideo(video):
    if video is None:
        return "Kein Video im Upload gefunden"
    filename = video.videoupload
    print "Konvertiere Quelldatei: %s" + filename
    if filename is None:
        return "Video mit unbekanntem Dateinamen"
    sourcefile = "%s%s" % (settings.MEDIA_ROOT, filename)
    flvfilename = "%s.flv" % video.id
    thumbnailfilename = "%svideos/flv/%s.png" % (settings.MEDIA_ROOT, video.id)
    targetfile = "%svideos/flv/%s" % (settings.MEDIA_ROOT, flvfilename)
    ffmpeg = "ffmpeg -y -i %s -acodec mp3 -ar 22050 -ab 32 -f flv -s 320x240 %s" % (sourcefile, thumbnailfilename)
    grabimage = "ffmpeg -y -i %s -vframs 1 -ss 00:00:02 -an -vcodec png -f rawvideo -s 320x240 %s" % (sourcefile, thumbnailfilename)
    flvtool = "flvtool2 -U %s" % targetfile
    print ("Source: %s" % sourcefile)
    print ("Target: %s" % targetfile)
    print ("FFMPEG: %s" % ffmpeg)
    print ("FLVTOOL: %s" % flvtool)
    try:
        ffmpegresult = commands.getoutput(ffmpeg)
        print "-----------------FFMPEG------------------"
        print ffmpegresult
        # Check if file exists and is > 0 Bytes
        try:
            s = os.stat (targetfile)
            print s
            fsize = s.st_size
            if (fsize == 0):
                print "File is 0 Bytes gross"
                os.remove (targetfile)
                return ffmpegresult
            print "Dateigroesse ist %i" %fsize
        except:
            print sys.exec_info()
            print "File %s scheint nicht zu existieren" % targetfile
            return ffmpegresult
        flvresult = commands.getoutput(flvtool)
        print "--------------FLVTOOL----------------"
        print flvresult
        grab = commands.getoutput (grabimage)
        print "--------------GRAB IMAGE----------------"
        print grab
    except:
        print sys.exec_info()
        return sys.exec_info[1]
    video.flvfilename = flvfilename
    video.save()
    return None
            
Esempio n. 10
0
def copy_lldbpy_file_to_lldb_pkg_dir( vDictArgs, vstrFrameworkPythonDir, vstrCfgBldDir ):
	dbg = utilsDebug.CDebugFnVerbose( "Python script copy_lldbpy_file_to_lldb_pkg_dir()" );
	bOk = True;
	bDbg = vDictArgs.has_key( "-d" );
	strMsg = "";
	
	strSrc = vstrCfgBldDir + "/lldb.py";
	strSrc = os.path.normcase( strSrc );
	strDst = vstrFrameworkPythonDir + "/__init__.py";
	strDst = os.path.normcase( strDst );
	
	if not os.path.exists( strSrc ):
		strMsg = strErrMsgLLDBPyFileNotNotFound % strSrc;
		return (bOk, strMsg);
	
	try:
		if bDbg:
			print(strMsgCopyLLDBPy % (strSrc, strDst));
		shutil.copyfile( strSrc, strDst );
	except IOError as e:
		bOk = False;
		strMsg = "I/O error( %d ): %s %s" % (e.errno, e.strerror, strErrMsgCpLldbpy);
		if e.errno == 2:
			strMsg += " Src:'%s' Dst:'%s'" % (strSrc, strDst);
	except:
		bOk = False;
		strMsg = strErrMsgUnexpected % sys.exec_info()[ 0 ];
	
	return (bOk, strMsg);
Esempio n. 11
0
	def create_log_files(self):
		try:
			self.csvFile = open(self.csvFilename, 'w')
			self.rawFile = open(self.rawFilename, 'w')
		except:
			print "Unexpected file opening error:",sys.exec_info()[0]
			raise
Esempio n. 12
0
	def write_raw(self):
		try:
			""" writes each condition in the rawFile in the following format
				Filename(line x):
					Condition: condition text
					Type: type of condition(IF,WHILE ...)
					#Branches: Number of branches in condition
					Multicond: 0 - not a part of a multiple condition
							   1 - a part of a multiple condition
					Expected: 0 - contition is EXPECTED to happend
							  1 - condition is UNEXPECTED to happend
							  2 - unknown expectation
					Branches:
							# : branch name - probability% (x times executed)
			"""
			for cond in self.execConditions :
				out = cond.to_string()
				self.rawFile.write(out)

			self.rawFile.write("\n\n---- Never Executed ----\n\n")
			for cond in self.neverConditions :
				out = cond.to_string()
				self.rawFile.write(out)
		except:
			print "Unexpected file writing error:",sys.exec_info()[0]
			raise

		try:
			self.rawFile.close()
		except:
			print "Error on closing file"
			raise
def ftp_down(oftp, fd_save, range_start, range_end, n_thread):
    ''' thread function: create ftp data connections, download FTP target with REST and RETR request '''

    try:
        fd_ftp = ftp_connect_login(oftp)

        fd_ftp.voidcmd('TYPE I')
        fd_ftp.sendcmd('REST %s' % range_start)
        fd_ftpdata = fd_ftp.transfercmd(
            'RETR %s' % oftp.ftp_path)  # ftp data fd

        offset = range_start
        while offset < range_end + 1:
            if offset > range_end + 1 - BUFFSIZE:
                content_block = fd_ftpdata.recv(range_end + 1 - offset)
            else:
                content_block = fd_ftpdata.recv(BUFFSIZE)
            global rlock_file
            with rlock_file:
                fd_save.seek(offset)
                fd_save.write(content_block)
                global size_down
                size_down += len(content_block)
                # print "Thread %d piece %d done: %d-%d" % (n_thread, i, offset,
                # offset+len(content_block)-1)
            offset += len(content_block)
        print "Thread %d all done: %d-%d" % (n_thread, range_start, range_end)

        fd_ftpdata.close()
        ftp_disconnect(fd_ftp)
        return 0
    except Exception as error:
        print error
        return traceback.format_exception(sys.exec_info())
Esempio n. 14
0
def status_wifite(sWhat,sValue):
    if 'rab' in aToDisplay:
        del aToDisplay['rab']
    if sWhat=='Cracked':
        try:
            # sValue is the AP's ssid
            sKey=str(client.read('/cracked/from_reboot/'+sValue).value)
        except KeyError:
            print 'No key for AP "'+sValue+'"'
        except:
            print 'Unexpected error:',sys.exec_info()[0]
        else:
            lcd.backlight(ON)
            aToDisplay[sValue]=">"+sValue+':'+sKey+"\n"
            if 'wifite' in aToDisplay:
                del aToDisplay['wifite']
    elif sWhat == 'Attacking':
        aToDisplay['wifite']="Attacking:\n"+sValue+"\n"
    elif sWhat == 'Start cracking':
        aWStatus=string.split(str(client.read('/wifite/status').value),':')
        aToDisplay['wifite']="Cracking:\n"+sValue+"\n"+aWStatus[1]+"\n"
    else:
        # Wifite status
        aToDisplay['wifite']=sValue+"\n"
    return aToDisplay
Esempio n. 15
0
File: tv.py Progetto: otfbot/otfbot
 def processUpdatedData(self):
     del self.tv
     if self.source == "otr":
         complete = True
         for i in range(self.days):
             try:
                 if not self.is_complete(datadir + "/" + time.strftime("epg_%Y_%m_%d.csv",time.gmtime(time.time()+86400*i))):
                     complete = False
             except OSError: #FileNotFound
                 complete = False
         if not complete:
             self.tv = None
             self.bot.root.getServiceNamed('scheduler').callLater(30,self.processUpdatedData)
             self.bot.logger.info("tvdata is not loaded completely yet. TV-Plugin will be aviable as it's loading is done.")
         else:
             try:
                 self.tv = tv_otr(datadir, self.days, self.bot)
             except:
                 self.bot.logger.info(sys.exec_info())
     elif self.source == "xmltv":
         try:
             self.tv = tv_xmltv(self.xmltvfile)
             #reload data tomorrow
             self.bot.root.getServiceNamed('scheduler').callLater(86400, self.download_data)
         except IOError:
             self.logger.info("xmltv-file is not loaded completely yet. TV-Plugin will be aviable as it's loading is done.")
             #retry 30 seconds later
             self.bot.root.getServiceNamed('scheduler').callLater(30,self.processUpdatedData)
Esempio n. 16
0
def copy_lldbpy_file_to_lldb_pkg_dir(
        vDictArgs,
        vstrFrameworkPythonDir,
        vstrCfgBldDir):
    dbg = utilsDebug.CDebugFnVerbose(
        "Python script copy_lldbpy_file_to_lldb_pkg_dir()")
    bOk = True
    bDbg = "-d" in vDictArgs
    strMsg = ""

    strSrc = os.path.join(vstrCfgBldDir, "lldb.py")
    strSrc = os.path.normcase(strSrc)
    strDst = os.path.join(vstrFrameworkPythonDir, "__init__.py")
    strDst = os.path.normcase(strDst)

    if not os.path.exists(strSrc):
        strMsg = strErrMsgLLDBPyFileNotNotFound % strSrc
        return (bOk, strMsg)

    try:
        if bDbg:
            print((strMsgCopyLLDBPy % (strSrc, strDst)))
        shutil.copyfile(strSrc, strDst)
    except IOError as e:
        bOk = False
        strMsg = "I/O error(%d): %s %s" % (e.errno,
                                           e.strerror, strErrMsgCpLldbpy)
        if e.errno == 2:
            strMsg += " Src:'%s' Dst:'%s'" % (strSrc, strDst)
    except:
        bOk = False
        strMsg = strErrMsgUnexpected % sys.exec_info()[0]

    return (bOk, strMsg)
Esempio n. 17
0
def buildWget(in_file, out_file):

    try:
        ifile = open(in_file, 'r')
        ofile = open(out_file, 'w')

        comd = 'wget -P ./pdfs '
        comURL = 'http://gain.fas.usda.gov/Recent GAIN Publications/'

        ofile.write("#!/bin/bash" + os.linesep)

        for line in ifile:
            # a line is formatted: title \t date & time
            # split and just use the title
            nl1, nl2 = line.split('\t')

            ofile.write(comd + " \"" + comURL + nl1 + '.pdf\"' + os.linesep)


        ifile.close()
        ofile.close()
        return 0
    except:
        print "{}".format(sys.exec_info()[0])
        return 1
Esempio n. 18
0
def report_event_api(request):
    if not request.method == 'POST':
        return HttpResponse('{} Not Allowed'.format(request.method), status=405)
    
    print('Started view.')
    device = request.POST.get('device', '')
    event_type = request.POST.get('type', '')
    region_id = request.POST.get('region', None)
    event = models.FishingEvent(device=device, event_type=event_type, region_id=region_id, timestamp=now())
    # Optional stuff
    event.latitude = request.POST.get('latitude', None)
    event.longitude = request.POST.get('longitude', None)
    event.species = request.POST.get('species', None)
    event.size = request.POST.get('size', '')
    event.weight = request.POST.get('weight', '')
    event.notes = request.POST.get('notes', '')
    print('Built Event.')

    try:
        event.save()
    except Exception as ex:
        print(sys.exec_info())
        return HttpResponse('Data was missing or improperly formatted.', status=400)

    print('Saved Event.')

    return event_detail_api(request, event.pk)
Esempio n. 19
0
def DeleteZone():
	Values = request.json
	ZoneName= Values["Zone"]
	output = []
	i=0	
	#Delete the zone name from the configuration file("/etc/nemed.conf"):
	try: 
		with open("/etc/named.conf", "r") as f:
			
    			start = "zone "+ ZoneName
			for line in f:
				if not line.startswith(start) and (i == 0 or i>5)  :
        				output.append(line)
				else:
					i += 1
		f = open("/etc/named.conf", "w")
		f.writelines(output)
		f.close()
		print "Deleted from configuration file"
	except:
		return {"Message": str(sys.exec_info()[0])}
				
	#Connection to database
        cursor=Connection.cursor()
	#Delete from Database
        try:
                Querry="drop table "+ get_db_name(ZoneName)+";"
                cursor.execute(Querry)
		cursor.close()
	except MySQLdb.Error, e:		
		return {"Error"+str(e.argv[0])+" ":e.argv[1] }
Esempio n. 20
0
def genCfg():
    # init a cfg obj
    try:
        cfg = ConfigParser.ConfigParser()
        cfg.read('../conf/crawler.ini')
        return cfg
    except Exception, e:

        print red(sys.exec_info())
Esempio n. 21
0
def main():
	try:
		filename = sys.argv
	except:
		print("Improper input") #don't think this line can be encountered
		sys.exit(0)
	try:
		f = open(filename[1], 'rb')
	except IndexError:
		print("No input was given")
		sys.exit(0)
	except IOError:
		print(filename[1] + " does not exist")
		sys.exit(0)
	except:
		print("Unknown Error", sys.exec_info()[0])
		sys.exit(0)
		
	
	counter = 0 #number of bytes read so far
	try:	
		line = f.read(16)
	except:
		print("No data could be read") #don't think this line can be encountered
	try:
		while(len(line) > 0):
			print(format(counter, 'x').zfill(8), end="  ") #print bytes read with at least 8 digits
		
			#iterate byte hex values
			for x in range (0,len(line)):	
				print(hex(line[x])[2:].zfill(2), end=" ")
				if (x == 7): #awkward space
					print(" ", end="")
				counter += 1 #increment amount of bytes readi
	
			#adjust spaces on last line
			if(len(line) < 16):
				for x in range (0, 16-len(line)):
					print("   ", end="")
				if(len(line) < 8): #awkward space
					print(" ", end="")
	
			print(" |", end="")

			for x in range (0,len(line)):
				if(line[x] > 31 and line[x] < 127): #print character if ascii
					print(chr(line[x]), end="")
				else:
					print(".", end="")	#print . if not ascii
			print("|")
	
			line = f.read(16)
		f.close
		print(format(counter, 'x').zfill(8)) #print total bytes
	except:
		print("Unknown error", sys.exc_info()[0])
		sys.exit(0)
Esempio n. 22
0
 def _convert_exceptions(func, *args, **kwargs):
     try:
         return func(*args, **kwargs)
     except catch_types or () as e:
         exc = e
     except Exception as e:
         if catch_types is not None:
             raise
         exc = sys.exec_info()
     six.reraise(convert_type, convert_type(exc[1]), exc[2])
Esempio n. 23
0
def find():
    print "find, reporting for duty"
    query = {}
    try:
        cursor=scores.find(query).sort('student_id',pymongo.ASCENDING).skip(4).limit(1)
    except:
	    print "Unexpected error:", sys.exec_info()[0]
    
    for doc in cursor:
	    print doc
Esempio n. 24
0
def sync(sock, operation, filename):
	#print filename
	try:
		fp=open("userlog",'r')
		for line in fp:
			userinfo = line
		data = operation+'^'+filename
	except:
		print "file missing"
	try:
	    string = userinfo+'^'+data
	    fp.close()
	    sock.send('5'+string)
	    received = sock.recv(1024)
	except:
		print "Unexpected error:", sys.exec_info()[0]
	if operation ==  "add" or operation =="modify":
		#received = sock.recv(1024)
		hashlist1=received.split()
		fp=open("Sync-n-Share"+filename,'r')
		filelist=[]
		hashlist2=[]
		for line in fp:
			hashlist2.append(str(hash(line)))
			filelist.append(line)
		print hashlist1
		print hashlist2
	#	time.sleep(20)
		C = LCS(hashlist1, hashlist2) 
		createDiff(C, hashlist1, hashlist2,len(hashlist1),len(hashlist2),filelist)
		content=''.join(diff)
		#print "content is:"+content
		del diff[0:len(diff)]
		if content=='' and received!=' ':
			print "there were no updations"
		else:
			#print "content is:"+content+"over"
			try:
				sock.send('6'+userinfo+'^'+filename+'^'+content)
			except:
				print "Unexpected error:", sys.exec_info()[0]
			temp = sock.recv(512)
Esempio n. 25
0
def readFile(filename):
	lines=None
	try:
		with open(filename) as f:
			lines = f.readlines()
		f.close()
		return(lines)
	except:
		e=sys.exec_info()[0]
		print("Error: "+e)
		return(0)
	def renameFiles( self ):
		cntr = self._counter
		new_file_name = None
		# Rename all files that are contained in the file list
		for f in self._file_list:
			cntr = cntr + 1
			new_file_name = self._buildNewFileName( f, cntr )
			print "Old: " + f + " >> " + "New: " + new_file_name
			try:
				os.rename( f, new_file_name )
			except:
				print "Error while renaming file: ", sys.exec_info()[0]
    def __init__(self):
        logging.basicConfig(level=logging.DEBUG, format = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filename=node_config.PATH_TO_LOG_FILE)
	print "Initializing Node Plugin.."
        server = SimpleXMLRPCServer((node_config.LISTEN_IP, node_config.LISTEN_PORT), logRequests=True)
        server.register_function(self.do_default)
        server.register_function(self.get_vms)
        server.register_function(self.get_monitoring_data)
	try:
            server.serve_forever()
        except:
            e = sys.exec_info()[1]
            logging.error("server execution error: %s"%e)
Esempio n. 28
0
def geoJSONAttributes_processor(entity):
    """ Takes the properties/attributes of a GeoJSON document and returns a CSV representation of the data"""
    try:
        jsonout = json.loads(entity)
    except ValueError:
        logging.error('Had a problem converting json to python, is it well formed?')
        logging.error(sys.exec_info())
    try:
        df = pandas.DataFrame([ item['properties'] for item in jsonout ])
    except PandasError, e:
        logging.error('%s' % e )
        logging.error(sys.exc_info())
Esempio n. 29
0
def disconnectMdsip(fifodir):
    #print "Disconnecting"
    try:
        sys.stdout.flush()
        fifo_out=open(fifodir+'/in','r+b')
        fifo_out.write('bye'.ljust(48,'-'))
        fifo_out.flush()
        fifo_out.close()
    except Exception:
        print('got exception shutting down:',sys.exec_info())
        sys.stdout.flush()
    raise Exception('mdsip disconnect')
Esempio n. 30
0
        def _inner():
            try:
                while self._running:
                    self.func(*self.args, **self.kwargs)
                    """It is important to check `self._running`'s value, if it is False, break."""
                    if not self._running:
                        break

                    greenthread.sleep(interval)
            except Exception:
                LOG.error("Periodic task %s running failed..." %
                          self.func.__name__)
                self.done.send_exception(*sys.exec_info())
def phoneComm(cPhone_conn, s, ser):
    import os
    import RPi.GPIO as GPIO
    pid = os.getpid()
    cPhone_conn.send(pid)
    GPIO.setmode(GPIO.BCM)
    GPIO.setup(2, GPIO.OUT)
    GPIO.output(2, GPIO.LOW)
    GPIO.setup(11, GPIO.OUT)
    GPIO.output(11, GPIO.LOW)
    power = 1
    auto = False
    try:
        while True:
            s.listen(5)
            print("socket is listening here")

            #accept
            conn, addr = s.accept()
            #cPhone_conn.send(msg)
            try:
                i = 0
                dataString = ""
                while True:
                    #added print, each run the addr changes
                    #print (str(conn))
                    #print("Run : " + str(i))
                    print("Waiting for command"
                          )  #: {}".format(str(time.clock())))
                    data = conn.recv(1024)
                    print(
                        "Command received")  #: {}".format(str(time.clock())))

                    dataString = data.decode(('utf-8'), 'replace')
                    dataString = dataString.rstrip("\r\n")
                    print("Input: {}".format(dataString))
                    GPIO.output(2, GPIO.HIGH)
                    if not data:
                        #print('breaking')
                        conn.sendall("DC".encode("UTF-8"))
                        cPhone_conn.send("DC")
                        conn.close()
                        break
                    if (dataString == "Off"):
                        #print('Client' + str(addr)+ ' sent: ' + dataString)
                        #print("Powering down...")
                        conn.sendall(bytes("Off\n", "UTF-8"))
                        cPhone_conn.send("Off")
                        power = 0
                        break
                    if (dataString == "DC"):
                        #print('Client' + str(addr)+ ' sent: ' + dataString)
                        #print("Successful Disconnection")
                        ser.write('s'.encode('utf-8'))
                        conn.sendall(bytes("DC\n", "UTF-8"))
                        cPhone_conn.send("DC")
                        break
                    elif (dataString == "C"):
                        #print('Client' + str(addr)+ 'sent: ' + dataString)
                        #print("Successful Connection")
                        cPhone_conn.send("C")
                        conn.sendall(bytes("c\n", "UTF-8"))
                        i = i + 1
                    elif (dataString == "f"):
                        #print("Moving Forward")
                        #do_forward(ser)
                        print("Forward received: {}".format(str(time.clock())))
                        ser.write('f'.encode('utf-8'))
                        conn.sendall(bytes("F\n", "UTF-8"))
                        print("Forward processed: {}".format(str(
                            time.clock())))
                    elif (dataString == "l"):
                        #print("Left")
                        #do_goleft(ser)
                        ser.write('l'.encode('utf-8'))
                        conn.sendall(bytes("L\n", "UTF-8"))
                    elif (dataString == "r"):
                        #print("Right")
                        #do_goright(ser)
                        ser.write('r'.encode('utf-8'))
                        conn.sendall(bytes("R\n", "UTF-8"))
                    elif (dataString == "b"):
                        #print("back")
                        #do_goback(ser)
                        ser.write('b'.encode('utf-8'))
                        conn.sendall("B\n".encode("UTF-8"))
                    elif (dataString == "end"):
                        ##                        print("ending")
                        #do_stopcar(ser)
                        print("end received: {}".format(str(time.clock())))
                        ser.write('s'.encode('utf-8'))
                        conn.sendall(bytes("End\n", "UTF-8"))
                        print("end p: {}".format(str(time.clock())))
                    elif (dataString == "s"):
                        #print("STOP")
                        #do_stopcar(ser)
                        ser.write('s'.encode('utf-8'))
                        cPhone_conn.send("s")
                        GPIO.output(11, GPIO.HIGH)
                        conn.sendall(bytes("Stop\n", "UTF-8"))
                    elif (dataString == "m"):
                        #print("m")
                        cPhone_conn.send("m")
                        conn.sendall(bytes("m\n", "UTF-8"))
                    elif (dataString == "a"):
                        #print("a")
                        auto = True
                        cPhone_conn.send("a")
                        conn.sendall(bytes("a\n", "UTF-8"))
                    elif (dataString == "done"):
                        #print("Done")
                        conn.sendall(bytes("Done\n", "UTF-8"))
                    else:
                        if auto:
                            cPhone_conn.send(dataString)
                            conn.sendall(bytes("received\n", "UTF-8"))
                        else:
                            print("UC")
                            conn.sendall(bytes("UC\n", "UTF-8"))

                print('closed')
                conn.close()

                if (power == 0):
                    break
            except socket.error as e:
                print(e)
            except IOError as e:
                if e.error == errno.EPIPE:
                    cPhone_conn.send("Disconnect")
                else:
                    print(e)
            except:
                print("a FATAL error has occured: ", sys.exec_info()[0])
    finally:
        s.close()
        GPIO.cleanup()
Esempio n. 32
0
async def on_error(event):
    logger.error(f"Event {event} errored.", exc_info=sys.exec_info())
Esempio n. 33
0
                        '--temporal',
                        dest='temporal',
                        action='store_true',
                        help=temporal_help)

    args = parser.parse_args()

    # Load yml db configuration file (path temporally harcoded)
    db_config = None

    with open(args.db_config_path, 'r') as db_config_file:
        try:
            db_config = yaml.load(db_config_file)
            db_config_file.close()
        except:
            print('ERROR: ', sys.exec_info()[0])
            db_config_file.close()
            sys.exit()

    # Check that db configuration is well defined
    if (("PG_HOST" not in db_config) or ("PG_NAME" not in db_config)
            or ("PG_PORT" not in db_config) or ("PG_USER" not in db_config)
            or ("PG_PASSWORD" not in db_config)
            or ("MATERIALIZED_VIEW_NAME" not in db_config)):
        print((
            "ERROR: Database is not properly defined in '{0}', please refer to README.md"
            .format(args.db_config_path)))
        sys.exit()

    # Connect to database
    db = connect(
Esempio n. 34
0
def main(argv):
    logLevel = logging.WARN
    configureLogging(log, logLevel)
    _fixencoding()
    try:
        optlist, args = getopt.getopt(argv[1:], 'hVd:e:k:l:f:m:inpr', [
            'help', 'version', 'debug_port', 'key', 'log_level', 'log_file',
            'preload', 'modules', 'interactive', 'nodebug', 'nostdin'
        ])
    except getopt.GetoptError:
        msg = sys.exec_info()[0]
        sys.stderr.write("pydbgp: error: %s\n" % str(msg))
        sys.stderr.write("See 'pydbgp --help'.\n")
        return 1

    import locale
    codeset = locale.getdefaultlocale()[1]
    idekey = getenv('USER', getenv('USERNAME', ''))
    if is_v2:
        try:
            if codeset:
                idekey = idekey.decode(codeset)
            else:
                idekey = idekey.decode()
        except (UnicodeDecodeError, LookupError):
            log.warn("unable to decode idekey %r" % idekey)
            pass  # nothing we can do if defaultlocale is wrong
    host = '127.0.0.1'
    port = 9000
    preloadScript = None
    ignoreModules = []
    profiling = 0
    interactive = 0
    nodebug = 0
    redirect = 1
    logFile = None
    for opt, optarg in optlist:
        if optarg and is_v2:
            try:
                if codeset:
                    optarg = optarg.decode(codeset)
                else:
                    optarg = optarg.decode()
            except (UnicodeDecodeError, LookupError):
                log.warn("unable to decode argument %s = %r" % (opt, optarg))
                pass  # nothing we can do if defaultlocale is wrong
        if opt in ('-h', '--help'):
            sys.stdout.write(__doc__)
            return 0
        elif opt in ('-V', '--version'):
            import re
            kw = re.findall('\$(\w+):\s(.*?)\s\$', __revision__)
            sys.stderr.write("pydbgp Version %s %s %s %s %s\n"\
                             % ('.'.join([str(i) for i in __version__]),
                                kw[0][0], kw[0][1], kw[1][0], kw[1][1]))
            return 0
        elif opt in ('-d', '--debug_port'):
            if optarg.find(':') >= 0:
                host, port = optarg.split(':')
                port = int(port)
            else:
                host = '127.0.0.1'
                port = int(optarg)
        elif opt in ('-k', '--key'):
            idekey = optarg
        elif opt in ('-n', '--nodebug'):
            nodebug = 1
        elif opt in ('-l', '--log_level'):
            level_names = dict([
                (logging.getLevelName(lvl), lvl)
                for lvl in range(logging.NOTSET, logging.CRITICAL + 1, 10)
            ])
            # Add the levels that have multiple names.
            level_names['WARN'] = logging.WARNING
            level_names['FATAL'] = logging.FATAL
            try:
                logLevel = level_names[optarg]
            except KeyError:
                sys.stderr.write("pydbgp: error: Invalid log level\n")
                sys.stderr.write("See 'pydbgp --help'.\n")
                return 1
        elif opt in ('-f', '--log_file'):
            logFile = optarg
        elif opt in ('-e', '--preload'):
            preloadScript = optarg
        elif opt in ('-m', '--modules'):
            ignoreModules = optarg.split(',')
        elif opt in ('-p', '--profile', '--profiling'):
            profiling = 1
        elif opt in ('-i', '--interactive'):
            interactive = 1
        elif opt in ('-r', '--nostdin'):
            redirect = 0

    if not port:
        sys.stderr.write("pydbgp: error: IDE Port not provided\n")
        sys.stderr.write("See 'pydbgp --help'.\n")
        return 1

    if interactive:
        if not args:
            args = ['interactive']
            if sys.path[0] != '' and os.getcwd() not in sys.path:
                sys.path.insert(0, os.getcwd())

    if not args:
        sys.stderr.write("pydbgp: error: scriptname not provided\n")
        sys.stderr.write("See 'pydbgp --help'.\n")
        return 1

    # handle ~ paths
    if not interactive:
        args[0] = os.path.expanduser(args[0])
        args[0] = os.path.realpath(args[0])
        if not os.path.exists(args[0]):
            sys.stderr.write("pydbgp: error: scriptname %s does not exist\n" %
                             (args[0], ))
            sys.stderr.write("See 'pydbgp --help'.\n")
            return 1

    if nodebug:
        dbgp.client.runWithoutDebug(args, interactive, host, port, idekey,
                                    logLevel)
    elif profiling:
        dbgp.client.runWithProfiling(args, host, port, idekey, logLevel)
    else:
        if logFile:
            log.addHandler(logging.FileHandler(logFile))
            # Does not remove the existing default stderr handler.
        log.setLevel(logLevel)
        dbgp.client.set_thread_support(dbgp.client.backendCmd.debug_threads)
        client = dbgp.client.backendCmd(idekey,
                                        preloadScript,
                                        ignoreModules,
                                        module=h_main())
        client.stdin_enabled = redirect
        try:
            client.connect(host, port, '__main__', args)
        except socket.error:
            return 1
        if interactive and args[0] == 'interactive':
            cprt = 'Type "copyright", "credits" or "license" for more information.'
            sys.stdout.write("Python %s on %s\n%s\n" %
                             (sys.version, sys.platform, cprt))
            # wait until exit
            client.runInteractive()
        else:
            client.runMain(args, interactive)
    return 0
Esempio n. 35
0
# that is pointed to by the carat symbol in the syntax error.

# The try clause is executed. If no exception occurs, the except clause is
# skipped. If one occurs, then if there is a matching except, this is
# executed, and execution returns to the next line after the try/except
# block. If one occurs and is not handled in an except clause, execution
# stops and we have an unhandled exception

while True:
    try:
        x = int(input("Please enter a number: "))
        break
    except (RuntimeError, ValueError):  # Handle multiple exceptions
        print("That wasn't a valid number")
    except:
        print("Unexpected error: ", sys.exec_info()[0])  # Catch all other exc
        raise  # Re-raises the original exception
    else:
        print("Executed only if the try clause does not raise an exception")
    finally:
        print("This is always executed")

try:
    raise Exception("arg1", "arg2")
except Exception as exc:
    print(exc.args)
    print(exc)  # __str__ is defined for all exceptions, so can be printed
    # directly
    x, y = exc.args  # Exception arguments can be accessed

# When defining our own exceptions, typically have one class called
Esempio n. 36
0
def main():
    logging.basicConfig(level=logging.INFO)

    inputs = io_helper.fetch_data()

    # Dependent variable for tsne this might be the labels - this is optional
    labels = None
    dependent = inputs["data"].get("dependent", [])
    indep_vars = inputs["data"]["independent"]  # For tsne the data dimensions



    if not data_types_in_allowed(indep_vars, ["integer", "real"]):
        logging.warning("Independent variables should be continuous !")
        return None
    #
    data = format_independent_data(inputs["data"])
    df = pd.DataFrame.from_dict(data)
    source_dimensions = df.shape[1] # number of columns
    num_points = df.shape[0]   # number of samples/points

    convdf = df.apply(lambda x: pd.to_numeric(x))
    # Write the data to a temporary file
    f = tempfile.NamedTemporaryFile(delete=False)
    input = convdf.values.astype(np.float32)
    logging.debug('input {}'.format(input))

    # Get the parameters (optional)
    perplexity = 30
    theta = 0.5
    target_dimensions = 2
    iterations = 1000
    do_zscore = True
    dependent_is_label = True

    try:
        perplexity = get_parameter(inputs['parameters'], 'perplexity', perplexity)
        theta = get_parameter(inputs['parameters'], 'theta', theta)
        target_dimensions = get_parameter(inputs['parameters'], 'target_dimensions', target_dimensions)
        iterations = get_parameter(inputs['parameters'], 'iterations', iterations)
        do_zscore_str = get_parameter(inputs['parameters'], 'do_zscore', str(do_zscore))
        if do_zscore_str == 'True':
            do_zscore = True
        elif do_zscore_str == 'False':
            do_zscore = False
        else:
            raise ValueError
        dependent_is_label_str = get_parameter(inputs['parameters'], 'dependent_is_label', str(dependent_is_label))
        if dependent_is_label_str == 'True':
            dependent_is_label = True
        elif dependent_is_label_str == 'False':
            dependent_is_label = False
        else:
            raise ValueError

    except ValueError as e:
        logging.error("Could not convert supplied parameter to value, error: ", e)
        raise
    except:
        logging.error(" Unexpected error:", sys.exec_info()[0])
        raise
    # Compute results

    if do_zscore:
        input = scipy.stats.zscore(input)

    if len(dependent) > 0 and dependent_is_label:
        dep_var = dependent[0]
        labels = dep_var["series"]

    inputFilePath = f.name
    input.tofile(inputFilePath)
    f.close()

    f = tempfile.NamedTemporaryFile(delete=False)
    outputFilePath = f.name
    f.close()
    output = a_tsne(inputFilePath, outputFilePath, num_points,
                     source_dimensions, target_dimensions, perplexity,
                     theta, iterations)

    logging.debug('output shape {}'.format(output.shape))
    logging.debug('output {}'.format(output))
    chart = generate_scatterchart(output, indep_vars, labels, perplexity, theta, iterations)

    error = ''
    shape = 'application/highcharts+json'

    logging.debug("Highchart: %s", chart)
    io_helper.save_results(chart, error, shape)
    logging.info("Highchart output saved to database.")
Esempio n. 37
0
        print('      Finished')
    except sp.CalledProcessError as e:
        print(e.output)
    except:
        print("Unexpected error:", sys.exc_info()[0])

else:

    try:
        flag = 0
        print('Checking for gfortran...', end='')
        sys.stdout.flush()
        sp.call(['gfortran', '--version'])
        print('     Finished')
    except:
        print("Unexpected Error: ", sys.exec_info()[0])
        flag = 1
        print("Trying to install gfortran for you... ")
        try:
            sp.call(['brew', 'cask', 'install', 'gfortran'])
            print('     Finished')
        except:
            print('Unexpected Error: ', sys.exec_info()[0])
            print("Maybe you dont have Homebrew? Go install gfortran")

    if flag:
        try:
            print('Checking for gfortran again...', end='')
            sys.stdout.flush()
            sp.call(['gfortran', '--version'])
            print('     we did it lol')
Esempio n. 38
0
                '//*[@id="timelineId"]/div[1]/table/tbody/')
            for elem in range(table_xpath):
                print("for loop")
            table_id = chromedriver.find_element(By.ID, 'table_id')
            rows = table_id.find_elements(
                By.TAG_NAME, "tr")  # get all of the rows in the table
            for row in rows:
                # Get the columns (all the column 2)
                col = row.find_elements(
                    By.TAG_NAME,
                    "td")[1]  #note: index start from 0, 1 is col 2
                print(col.text)  #prints text from the element
            # second_result = wait.until(presence_of_element_located((By.)))
            print("")
        except:
            print(sys.exec_info())

        # time.sleep(5)
        # first_result = wait.until(presence_of_element_located((By.CSS_SELECTOR, "h3>div")))
        # print(first_result.get_attribute("textContent"))

        # print(search.text)
    except:
        chromedriver.quit()

#This example requires Selenium WebDriver 3.13 or newer
# with webdriver.Firefox() as driver:
#     wait = WebDriverWait(driver, 10)
#     driver.get("https://google.com/ncr")
#     driver.find_element(By.NAME, "q").send_keys("cheese" + Keys.RETURN)
#     first_result = wait.until(presence_of_element_located((By.CSS_SELECTOR, "h3>div")))
Esempio n. 39
0
 def run_p(self):
     global userChoices, enzymes, fastaRead
     sh.log("\nstart run_p")
     sh.click()
     self.restrictResults.clear()
     self.numeralResults.clear()
     if not fastaRead:
         self.restrictResults.setPlainText(
             "You must select a fasta file first")
         return
     if len(userChoices) <= 0:
         self.restrictResults.setPlainText(
             "You must select R.Enzymes first")
         return
     self.detectPushButton.setEnabled(False)  # can't run twice
     try:
         linear = self.linearCheckBox.isChecked()
         analysis = Analysis(userChoices, self.sequence, linear=linear)
     except:
         sh.log("analysis failed " + sys.exc_info()[0])
     # print each enzyme with a list of it's matching sites
     cutSites = str(
         analysis.format_output(
             dct=None,
             title='',
             s1='\n  Enzymes which do not cut the sequence\n'))
     self.restrictResults.setPlainText(cutSites)
     # ------------------------------- FIND PALINDROME HIT COUNTS -----------------------------------------------
     try:
         endMarker = "END"
         enzymes.append(endMarker)
         # Extract enzymes and the index of their cutSites from cutSites
         palin = cutSites[:cutSites.find("Enzymes")].replace(
             '.', "").replace(':', "").split()
         palin.append(endMarker)
         sh.log("palin: " + str(palin))
     except:
         sh.log("palin NG " + sys.exec_info()[0])
     try:
         # Calculate and display the number of matching sites for each enzyme
         # enzPosn initally has a list of lists.  Each sublist has the enzyme name
         #   and the index of the enzyme in palin
         # enzPosn sublist later has the enzyme name and the number of matches.
         enzPosn = []
         enzNone = []
         sh.log("len palin " + str(len(palin)))
         sh.log("user choices " + str(userChoices))
         allChoices = userChoices
         allChoices.append(endMarker)  # matches last name in palin
         sh.log("allChoices " + str(allChoices))
         for enz in allChoices:
             if enz in palin:
                 enzPosn.append([enz, palin.index(enz)])
             else:
                 sh.log(enz + " not in palin")
                 enzNone.append(enz)
         sh.log("enzPosn = " + str(enzPosn))
         enzPosn.sort(key=lambda x: x[1])  # sort on index of name in palin
         for i in range(len(enzPosn) - 1):  # Replace the index with the
             enzPosn[i][1] = enzPosn[
                 i + 1][1] - enzPosn[i][1] - 1  # length of palin entry
         del enzPosn[-1]  # delete endMarker
         for enz in enzNone:
             enzPosn.append([enz,
                             0])  # add in enzymes not found; length = 0
         enzPosn.sort(key=lambda x: x[0])  # sort on name
         sh.log("enzPosn = " + str(enzPosn))
         for i in range(len(
                 enzPosn)):  # show the number of matches for each enzyme
             matchStr = "{0:7,d} : {1:s}\n\n".format(
                 enzPosn[i][1], enzPosn[i][0])
             self.numeralResults.insertPlainText(matchStr)
     except:
         sh.log('I cannot do that. ' + sys.exec_info()[0])
     self.detectPushButton.setEnabled(False)
     self.nPosPushButton.setEnabled(True)
def main():

    #Define global variables
    global imgWidthDriver
    global imgHeightDriver
    global imgWidthVision
    global imgHeightVision
    global framesPerSecond

    #Define local variables
    driverCameraBrightness = 50
    visionCameraBrightness = 0

    #Define local flags
    networkTablesConnected = False
    driverCameraConnected = False
    visionCameraConnected = False
    foundBall = False
    foundTape = False
    foundVisionTarget = False

    #Get current time as a string
    currentTime = time.localtime(time.time())
    timeString = str(currentTime.tm_year) + str(currentTime.tm_mon) + str(
        currentTime.tm_mday) + str(currentTime.tm_hour) + str(
            currentTime.tm_min)

    #Open a log file
    logFilename = '/data/Logs/Run_Log_' + timeString + '.txt'
    log_file = open(logFilename, 'w')
    log_file.write('run started on %s.\n' % datetime.datetime.now())
    log_file.write('')

    #Load VMX module
    vmxpi = imp.load_source('vmxpi_hal_python',
                            '/usr/local/lib/vmxpi/vmxpi_hal_python.py')
    vmx = vmxpi.VMXPi(False, 50)
    if vmx.IsOpen() is False:
        log_file.write('Error:  Unable to open VMX Client.\n')
        log_file.write('\n')
        log_file.write(
            '        - Is pigpio (or the system resources it requires) in use by another process?\n'
        )
        log_file.write('        - Does this application have root privileges?')
        log_file.close()
        sys.exit(0)

    #Connect NetworkTables
    try:
        NetworkTables.initialize(server='10.41.21.2')
        visionTable = NetworkTables.getTable("vision")
        navxTable = NetworkTables.getTable("navx")
        smartDash = NetworkTables.getTable("SmartDashboard")
        networkTablesConnected = True
        log_file.write('Connected to Networktables on 10.41.21.2 \n')
    except:
        log_file.write('Error:  Unable to connect to Network tables.\n')
        log_file.write('Error message: ', sys.exec_info()[0])
        log_file.write('\n')

    #Navx configuration
    navxTable.putNumber("ZeroGyro", 0)
    #navxTable.putNumber("ZeroDisplace", 0)

    #Reset yaw gyro
    vmx.getAHRS().Reset()
    vmx.getAHRS().ZeroYaw()

    #Reset displacement
    vmx.getAHRS().ResetDisplacement()

    #Set up a camera server
    camserv = CameraServer.getInstance()
    camserv.enableLogging

    #Start capturing webcam videos
    try:
        driverCameraPath = '/dev/v4l/by-path/platform-3f980000.usb-usb-0:1.5:1.0-video-index0'
        driverCamera = camserv.startAutomaticCapture(name="DriverCamera",
                                                     path=driverCameraPath)
        driverCamera.setResolution(imgWidthDriver, imgHeightDriver)
        driverCamera.setBrightness(driverCameraBrightness)
        driverCameraConnected = True
        log_file.write('Connected to driver camera on ID = 0.\n')
    except:
        log_file.write('Error:  Unable to connect to driver camera.\n')
        log_file.write('Error message: ', sys.exec_info()[0])
        log_file.write('\n')

    try:
        visionCameraPath = '/dev/v4l/by-path/platform-3f980000.usb-usb-0:1.4:1.0-video-index0'
        visionCamera = camserv.startAutomaticCapture(name="VisionCamera",
                                                     path=visionCameraPath)
        visionCamera.setResolution(imgWidthVision, imgHeightVision)
        visionCamera.setBrightness(visionCameraBrightness)
        visionCameraConnected = True
    except:
        log_file.write('Error:  Unable to connect to vision camera.\n')
        log_file.write('Error message: ', sys.exec_info()[0])
        log_file.write('\n')

    #Define video sink
    if driverCameraConnected == True:
        driverSink = camserv.getVideo(name='DriverCamera')
    if visionCameraConnected == True:
        visionSink = camserv.getVideo(name='VisionCamera')

    #Create an output video stream
    driverOutputStream = camserv.putVideo("DriveCamera", imgWidthDriver,
                                          imgHeightDriver)

    #Set video codec and create VideoWriter
    fourcc = cv.VideoWriter_fourcc(*'XVID')
    videoFilename = '/data/Match_Videos/RobotVisionCam-' + timeString + '.avi'
    visionImageOut = cv.VideoWriter(videoFilename, fourcc, 20.0,
                                    (imgWidthVision, imgHeightVision))

    #Create blank image
    imgDriver = np.zeros(shape=(imgWidthDriver, imgHeightDriver, 3),
                         dtype=np.uint8)
    imgVision = np.zeros(shape=(imgWidthVision, imgHeightVision, 3),
                         dtype=np.uint8)

    #Start main processing loop
    while (True):

        #Read in an image from 2019 Vision Images (for testing)
        #img = cv.imread('RetroreflectiveTapeImages2019/CargoStraightDark90in.jpg')
        #if img is None:
        #    break

        #Initialize video time stamps
        driverVideoTimestamp = 0
        visionVideoTimestamp = 0

        #Grab frames from the web cameras
        if driverCameraConnected == True:
            driverVideoTimestamp, imgDriver = driverSink.grabFrame(imgDriver)
        if visionCameraConnected == True:
            visionVideoTimestamp, imgVision = visionSink.grabFrame(imgVision)

        #Check for frame errors
        visionFrameGood = True
        if (driverVideoTimestamp == 0) or (visionVideoTimestamp == 0):
            print(str(driverVideoTimestamp))
            if (driverVideoTimestamp == 0) and (driverCameraConnected == True):
                log_file.write('Driver video error: \n')
                log_file.write(driverSink.getError())
                log_file.write('\n')
            if (visionVideoTimestamp == 0) and (visionCameraConnected == True):
                log_file.write('Vision video error: \n')
                log_file.write(visionSink.getError())
                log_file.write('\n')
                visionFrameGood = False
            sleep(float(framesPerSecond * 2) / 1000.0)
            continue

        if (visionFrameGood == True):

            #Call detection methods
            ballX, ballY, ballRadius, ballDistance, ballAngle, ballOffset, ballScreenPercent, foundBall = detect_ball_target(
                imgDriver)
            #tapeX, tapeY, tapeW, tapeH, tapeOffset, foundTape = detect_floor_tape(imgVision)
            visionTargetX, visionTargetY, visionTargetW, visionTargetH, visionTargetDistance, visionTargetAngle, visionTargetOffset, foundVisionTarget = detect_vision_targets(
                imgVision)

            #Update networktables and log file
            if networkTablesConnected == True:

                visionTable.putNumber("RobotStop", 0)
                visionTable.putBoolean("WriteVideo", writeVideo)

                visionTable.putNumber("BallX", round(ballX, 2))
                visionTable.putNumber("BallY", round(ballY, 2))
                visionTable.putNumber("BallRadius", round(ballRadius, 2))
                visionTable.putNumber("BallDistance", round(ballDistance, 2))
                visionTable.putNumber("BallAngle", round(ballAngle, 2))
                visionTable.putNumber("BallOffset", round(ballOffset, 2))
                visionTable.putNumber("BallScreenPercent",
                                      round(ballScreenPercent, 2))
                visionTable.putBoolean("FoundBall", foundBall)
                log_file.write('Cargo found at %s.\n' %
                               datetime.datetime.now())
                log_file.write('  Ball distance: %.2f \n' %
                               round(ballDistance, 2))
                log_file.write('  Ball angle: %.2f \n' % round(ballAngle, 2))
                log_file.write('  Ball offset: %.2f \n' % round(ballOffset, 2))
                log_file.write('\n')

                ##                visionTable.putNumber("TapeX", round(tapeX, 2))
                ##                visionTable.putNumber("TapeY", round(tapeY, 2))
                ##                visionTable.putNumber("TapeW", round(tapeW, 2))
                ##                visionTable.putNumber("TapeH", round(tapeH, 2))
                ##                visionTable.putNumber("TapeOffset", round(tapeOffset, 2))
                ##                visionTable.putBoolean("FoundTape", foundTape)
                ##                log_file.write('Floor tape found at %s.\n' % datetime.datetime.now())
                ##                log_file.write('  Tape offset: %.2f \n' % round(tapeOffset, 2))
                ##                log_file.write('\n')

                visionTable.putNumber("VisionTargetX", round(visionTargetX, 2))
                visionTable.putNumber("VisionTargetY", round(visionTargetY, 2))
                visionTable.putNumber("VisionTargetW", round(visionTargetW, 2))
                visionTable.putNumber("VisionTargetH", round(visionTargetH, 2))
                visionTable.putNumber("VisionTargetDistance",
                                      round(visionTargetDistance, 2))
                visionTable.putNumber("VisionTargetAngle",
                                      round(visionTargetAngle, 2))
                visionTable.putNumber("VisionTargetOffset",
                                      round(visionTargetOffset, 2))
                visionTable.putBoolean("FoundVisionTarget", foundVisionTarget)
                log_file.write('Vision target found at %s.\n' %
                               datetime.datetime.now())
                log_file.write('  Vision target distance: %.2f \n' %
                               round(visionTargetDistance, 2))
                log_file.write('  Vision target angle: %.2f \n' %
                               round(visionTargetAngle, 2))
                log_file.write('  Vision target offset: %.2f \n' %
                               round(visionTargetOffset, 2))
                log_file.write('\n')

            #Draw various contours on the image
            if foundBall == True:
                cv.circle(imgDriver, (int(ballX), int(ballY)), int(ballRadius),
                          (0, 255, 0), 2)  #ball
            #    cv.putText(imgVision, 'Distance to Ball: %.2f' %ballDistance, (320, 400), cv.FONT_HERSHEY_SIMPLEX, .75,(0, 0, 255), 2)
            #    cv.putText(imgVision, 'Angle to Ball: %.2f' %ballAngle, (320, 440), cv.FONT_HERSHEY_SIMPLEX, .75,(0, 0, 255), 2)
            if foundTape == True:
                cv.rectangle(imgVision, (tapeX, tapeY),
                             (tapeX + tapeW, tapeY + tapeH), (100, 0, 255),
                             1)  #floor tape
            if foundVisionTarget == True:
                cv.rectangle(imgVision, (visionTargetX, visionTargetY),
                             (visionTargetX + visionTargetW,
                              visionTargetY + visionTargetH), (0, 255, 0),
                             2)  #vision targets
                cv.putText(imgVision,
                           'Distance to Vision: %.2f' % visionTargetDistance,
                           (10, 400), cv.FONT_HERSHEY_SIMPLEX, .75,
                           (0, 255, 0), 2)
                cv.putText(imgVision,
                           'Angle to Vision: %.2f' % visionTargetAngle,
                           (10, 440), cv.FONT_HERSHEY_SIMPLEX, .75,
                           (0, 255, 0), 2)

            #Put timestamp on image
            cv.putText(imgVision, str(datetime.datetime.now()), (10, 30),
                       cv.FONT_HERSHEY_SIMPLEX, .5, (0, 0, 255), 2)

        #Update navx network table
        if networkTablesConnected == True:
            navxTable.putNumber("GyroAngle", round(vmx.getAHRS().GetAngle(),
                                                   2))
            navxTable.putNumber("GyroYaw", round(vmx.getAHRS().GetYaw(), 2))
            navxTable.putNumber("GyroPitch", round(vmx.getAHRS().GetPitch(),
                                                   2))
            navxTable.putNumber("YVelocity",
                                round(vmx.getAHRS().GetVelocityY(), 4))
            navxTable.putNumber("XVelocity",
                                round(vmx.getAHRS().GetVelocityX(), 4))
            navxTable.putNumber("YDisplacement",
                                round(vmx.getAHRS().GetDisplacementY(), 4))
            navxTable.putNumber("XDisplacement",
                                round(vmx.getAHRS().GetDisplacementX(), 4))
            navxTable.putNumber("YVelocity",
                                round(vmx.getAHRS().GetVelocityY(), 4))
            navxTable.putNumber("XVelocity",
                                round(vmx.getAHRS().GetVelocityX(), 4))
            navxTable.putNumber("YAccel",
                                round(vmx.getAHRS().GetWorldLinearAccelY(), 4))
            navxTable.putNumber("XAccel",
                                round(vmx.getAHRS().GetWorldLinearAccelX(), 4))

        #Add crosshairs to driver screen
##        if driverCameraConnected == True:
##            lineLength = 30
##            cv.line(imgDriver, (int(imgWidthDriver/2), int(imgHeightDriver/2 - lineLength)), (int(imgWidthDriver/2), int(imgHeightDriver/2 + lineLength)), (0, 0, 0), 1)
##            cv.line(imgDriver, (int(imgWidthDriver/2 - lineLength), int(imgHeightDriver/2)), (int(imgWidthDriver/2 + lineLength), int(imgHeightDriver/2)), (0, 0, 0), 1)
##            cv.circle(imgDriver, (int(imgWidthDriver/2), int(imgHeightDriver/2)), 10, (0, 0, 0), 1)

#Send driver camera to dashboard
        if driverCameraConnected == True:
            driverOutputStream.putFrame(imgDriver)

        #Write processed image to file
        if (writeVideo == True) and (visionCameraConnected == True):
            visionImageOut.write(imgVision)

        #Display the two camera streams (for testing only)
        cv.imshow("Vision", imgVision)
        cv.imshow("Driver", imgDriver)

        #Check for gyro re-zero
        gyroInit = navxTable.getNumber("ZeroGyro", 0)
        if gyroInit == 1:
            vmx.getAHRS().Reset()
            vmx.getAHRS().ZeroYaw()
            navxTable.putNumber("ZeroGyro", 0)

        #Check for displacement zero
        #dispInit = navxTable.getNumber("ZeroDisplace", 0)
        #if dispInit == 1:
        #    vmx.getAHRS().ResetDisplacement()
        #    navxTable.putNumber("ZeroDisplace", 0)

        #Check for stop code from robot or keyboard (for testing)
        if cv.waitKey(1) == 27:
            break
        robotStop = visionTable.getNumber("RobotStop", 0)
        if (robotStop == 1) or (driverCameraConnected == False) or (
                visionCameraConnected == False) or (networkTablesConnected
                                                    == False):
            break

    #Close all open windows (for testing)
    #cv.destroyAllWindows()

    #Close video file
    visionImageOut.release()

    #Close the log file
    log_file.write('Run stopped on %s.' % datetime.datetime.now())
    log_file.close()
Esempio n. 41
0
 def process_management(self, ts):
     try:
         self.parent.management_frame_received(self.frame, ts)
     except Exception as e:
         self.logger.warning(sys.exec_info())
Esempio n. 42
0
    def update(self, obs):
        # process air data
        try:
            tm = obs[0][0]  # ts
            p = obs[0][1]  # pressure
            t = obs[0][2]  # temp
            h = obs[0][3]  # humidity
            ls = obs[0][4]  # strikes
            ld = obs[0][5]  # distance
            bv = obs[0][6]  # battery

            sl = derived.toSeaLevel(p, self.elevation)
            trend = derived.updateTrend(p, self.trend)

            try:
                fl = derived.ApparentTemp(t, self.windspeed / 3.6, h)
                dp = derived.Dewpoint(t, h)
                hi = derived.Heatindex(t, h)
                wc = derived.Windchill(t, self.windspeed)
            except Exception as e:
                LOGGER.error('Failure to calculate Air temps: ' + str(e))

        except Exception as e:
            (t, v, tb) = sys.exec_info()
            LOGGER.error('Failure in processing AIR data: ' + str(e))
            LOGGER.error('  At: ' + str(tb.tb_lineno))

        # temperatures t, fl, dp, hi, wc  (conversions)
        if self.units['temperature'] is not 'c':
            t = round((t * 1.8) + 32, 2)  # convert to F
            fl = round((fl * 1.8) + 32, 2)  # convert to F
            dp = round((dp * 1.8) + 32, 2)  # convert to F
            hi = round((hi * 1.8) + 32, 2)  # convert to F
            wc = round((wc * 1.8) + 32, 2)  # convert to F
            uom = 17
        else:
            uom = 4
        self.setDriver('CLITEMP', t, uom=uom)
        self.setDriver('GV0', fl, uom=uom)
        self.setDriver('DEWPT', dp, uom=uom)
        self.setDriver('HEATIX', hi, uom=uom)
        self.setDriver('WINDCH', wc, uom=uom)

        # pressures p, sl  (conversions)
        if self.units['pressure'] == 'inhg':
            p = round(p * 0.02952998751, 3)
            sl = round(sl * 0.02952998751, 3)
            uom = 23
        elif self.units['pressure'] == 'hpa':
            uom = 118
        else:
            uom = 117
        self.setDriver('ATMPRES', sl, uom=uom)
        self.setDriver('BARPRES', p, uom=uom)

        # distance ld  (conversions)
        if self.units['distance'] == 'mi':
            ld = round(ld / 1.609344, 1)
            uom = 116
        else:
            uom = 83
        self.setDriver('DISTANC', ld, uom=uom)

        # humidity h, strikes ls, battery bv, and trend (no conversions)
        self.setDriver('CLIHUM', h)
        self.setDriver('BATLVL', bv)
        self.setDriver('GV1', trend)
        self.setDriver('GV2', ls)
Esempio n. 43
0
def writePin(status):
    print(status)
    if status == 'yes':
        response = displayAround()
    else:
        response = displayNotAround()
    return response



@app.route("/getCurrentLightValue")
def getCurrentLightValue():
    global currentLightValue
    msg = str(currentLightValue)
    return msg



if __name__ == '__main__':
   try:
        http_server = WSGIServer(('0.0.0.0', 8001), app)
        app.debug = True
        http_server.serve_forever()
        print('Server waiting for requests')
   except:
        print("Exception")
        import sys
        print(sys.exec_info()[0])
        print(sys.exec_info()[1])
    
Esempio n. 44
0
def process_psl(sample, contigs, psl):
    """
    Iterate over a PSL file, pull out the higest scoring hit and do a number of other
    calculations. TODO: Break this into a couple of functions to clean up the logic a bit.
    """
    try:
        sequences = SeqIO.index("./finished_" + sample + "/" + contigs,
                                "fasta")  # how should i avoid doing this?
        blatfinal = {}
        for line in open(psl, 'r'):
            line = line.strip().split()
            matches = int(line[0])
            strand = line[8]
            qname = line[9]
            qsize = int(line[10])
            qstart = int(line[11]) + 1
            qend = int(line[12])
            tname = line[13]
            if len(tname.split('_:_')) > 1:
                tnamesplit = tname.split('_:_')[1]
            elif len(tname.split('_:_')) == 1:
                tnamesplit = tname
            tsize = int(line[14])
            tstart = int(line[15]) + 1
            tend = int(line[16])
            blocksizes = [int(m) for m in line[18].split(",")[0:-1]]
            qstarts = [int(m) + 1 for m in line[19].split(",")[0:-1]]
            tstarts = [int(m) + 1 for m in line[20].split(",")[0:-1]]
            if tstart == 1 and qstart > 1:
                hsubsize = (qstart - 1)
            elif tend == tsize and qend < qsize:
                hsubsize = (qsize - qend)
            else:
                hsubsize = 0
            #calculate percent id
            id = matches / (qsize - hsubsize)
            #calculate cigar
            cigar = []
            cigarstring = str()
            if strand == "+":
                sqstart = qstart
                sqend = qend
            elif strand == "-":
                sqstart = (qsize - qend) + 1
                sqend = (qsize - qstart) + 1
            if sqstart > 1:
                L = sqstart - 1
                cigar.append(str(L) + "S")
            if len(blocksizes) == 1:
                cigar.append(str(blocksizes[0]) + "M")
            elif len(blocksizes) > 1:
                for i in range(len(blocksizes[0:-1])):
                    cigar.append(str(blocksizes[i]) + "M")
                    if qstarts[i] + blocksizes[i] != qstarts[i + 1]:
                        L = qstarts[i + 1] - (qstarts[i] + blocksizes[i])
                        cigar.append(str(L) + "I")
                    if tstarts[i] + blocksizes[i] != tstarts[i + 1]:
                        L = tstarts[i + 1] - (tstarts[i] + blocksizes[i])
                        cigar.append(str(L) + "D")
                cigar.append(str(blocksizes[-1]) + "M")
            if qsize > sqend:
                L = qsize - sqend
                cigar.append(str(L) + "S")
            for i in cigar:
                cigarstring = cigarstring + i
            #pull out sequence
            qseq = sequences[qname].seq
            #only keep if it hits the right target
            if qname.split('_:_')[1] == tnamesplit:
                #if hit doesn't match something in blatfinal already, append
                if qname not in blatfinal.keys():
                    blatfinal.update({
                        qname:
                        [tname, tstart, tend, strand, cigarstring, qseq, id]
                    })
                #if hit matches something in blatfinal, replace if id is higher
                elif qname in blatfinal.keys():
                    if id > blatfinal[qname][6]:
                        blatfinal.update({
                            qname: [
                                tname, tstart, tend, strand, cigarstring, qseq,
                                id
                            ]
                        })
        return blatfinal
    except Exception as e:
        print "EXCEPTION:", e
        print "".join(traceback.format_exception(*sys.exec_info()))
Esempio n. 45
0
import MySQLdb
import csv
import sys
from datetime import datetime

dbserver = 'localhost'
dbuser = '******'
dbpass = '******'
dbname = 'VM'
errorfile = 'historic_error.log'

db = MySQLdb.connect(dbserver, dbuser, dbpass, dbname)
cursor = db.cursor()
with open('parsed.csv', 'r') as csvfile:
    reader = csv.reader(csvfile, delimiter=',', quotechar='"')
    for rec in reader:
        query = 'insert into historic values("' + rec[0] + '","' + rec[
            1] + '","' + rec[2] + '","' + rec[3] + '","' + rec[4] + '","' + rec[
                5] + '","' + rec[6] + '","' + rec[7] + '","' + rec[8] + '")'
        cursor.execute(query)
try:
    db.commit()
except:
    db.rollback()
    etype, evalue, etraceb = sys.exec_info()
    ferr.write(datetime.now() + ' ' + evalue)

db.close()
csvfile.close()
Esempio n. 46
0
timeStamp = dt.strftime("%y%b%d%H%M")

try:
    #remove old tar files if something went wrong
    fileList = glob.glob(folder + '/static/images/images.*.tar.gz')
    for filePath in fileList:
        os.remove(filePath)

    #wrapper for preparing a tarball of imagefolder
    #folders hardcoded for now
    tardir(folder + '/static/images',
           folder + '/static/images/images.' + timeStamp + '.tar.gz')

    #looks like the process ended successfully
    f = open(folder + "/logs/status.txt", "w+")
    f.write("container_ready")
    f.close()

    #looks like the process ended successfully
    f = open('/media/ramdisk/status.txt', 'w+')
    f.write("container_ready")
    f.close()

except:
    #record exception
    error = sys.exec_info()[0]
    f = open(folder + "/logs/error.txt", "a+")
    f.write("error during tarball creation\n")
    f.write(error)
    f.close()
Esempio n. 47
0
    fenster.geometry("800x350")
    fenster.title("Be A Pro!: Kuhn Munkres Algorithmus Solver")
    rahmen = Frame(fenster, relief="ridge", borderwidth=5)
    rahmen.pack(fill="both", expand = 1)
    button1 = Anzeigen(rahmen,text="Personen Anzeigen", width = 20, height = 5)
    button1.config(font=("Arial", 12, "bold"))
    button1["command"] = button1.anzeigen
    button1.place(x = 50, y = 50)
    button2 = Hinzufuegen(rahmen,text="Mitarbeiter hinzufügen", width = 20, height = 5)
    button2.config(font=("Arial", 12, "bold"))
    button2["command"] = button2.hinzufuegen
    button2.place(x = 430, y = 50)
    button3 = Entfernen(rahmen,text="Mitarbeiter entfernen", width = 20, height = 5)
    button3.config(font=("Arial", 12, "bold"))
    button3["command"] = button3.entfernen
    button3.place(x = 50, y = 200)
    button4 = MyButton(rahmen,text="Matching erzeugen", width = 20, height = 5)
    button4.config(font=("Arial", 12, "bold"))
    button4["command"] = button4.aktion
    button4.place(x = 430, y = 200)

    fenster.mainloop()

except:
  print("folgender Fehler ist aufgetreten: ", sys.exec_info()[0])
#finally:
   # outfile.close()
    #f.close()

Esempio n. 48
0
    def find(self, epsilon=0.001, M=1000, tolerance=1E-6):

        #solving the TIC-finding problem using the information from the initialization
        #Unconstrained variables
        #   objective value
        #
        #Constrainted discrete variables - note that the (j) indicates that these should be defined for each reaction
        #   v_j - flux rate for reaction j in the current solution, constrained to values between -1 and -epsilon and epsilon and 1
        #
        #Binary variables - note that the (j) indicates that these should be defined for each reaction
        #   eta(j) - value of 1 if reaction j participates in TIC
        #   alpha(j) - value of 1 if v_j is positive and eta is 1
        #   beta(j) - value of 1 if v_j is negative and eta is 1
        #   gamma(p) - used in integer cuts to prevent duplicate solutions (where p is the set of past solutions)

        #the max phi value will be defined on the number of reactions
        max_phi = 0

        #counter for the number of solutions
        solution_number = 0

        #helps keeps track of the time to solve the full algorithm
        alg_start = time.time()

        #create a file to write the output to
        output = open('TFP_results.txt', 'w', buffering=1)
        output.write("TIC-Finding Problem Solutions\n")
        output.write("Model: " + self.original_model.name + "\n")
        output.write("Database: " + self.database.name + "\n\n")

        #need to start by turning off all exchange reactions
        for rxn in self.combined_model.reactions:

            #note that max_phi should also be the index of the reaction in the exchange_rxns and reactions lists
            #so long as we are not in the list of reactions originating in the database

            #if the reaction is exchange, then fix it as having zero flux
            if rxn.id in self.exchange_rxns:

                #if it is an exchange reaction, force both bounds to zero
                self.combined_model.reactions[max_phi].lower_bound = 0.0
                self.combined_model.reactions[max_phi].upper_bound = 0.0

            #check if irreversible backwards
            elif (rxn.reversibility == True) and (rxn.upper_bound == 0):

                #if here an irreversible forward non-exchange reaction
                self.combined_model.reactions[max_phi].lower_bound = -1.0
                self.combined_model.reactions[max_phi].upper_bound = 0.0

            #check if irreversible forward
            elif rxn.reversibility == False:

                #if here an irreversible forward non-exchange reaction
                self.combined_model.reactions[max_phi].lower_bound = 0.0
                self.combined_model.reactions[max_phi].upper_bound = 1.0

            #otherwise must be reversible
            else:

                #otherwise not an exchange reaction
                #for all other reactions (e.g. reversible non-exchange), force bounds to be -1 to +1
                #the epsilon part will be taken care of by constraints
                self.combined_model.reactions[max_phi].lower_bound = -1.0
                self.combined_model.reactions[max_phi].upper_bound = 1.0

            #by this point have defined reaction bounds as used in OptFill
            #could increment a value of max phi
            max_phi = max_phi + 1

        #initialize a list of constraints
        constraints = list()

        #dummy objective: maximize # of participating reactions
        #objective is summation of eta
        self.combined_model.objective = self.combined_model.problem.Objective(
            Zero, direction='max')

        #force number of etas to be at most phi
        const_6 = self.combined_model.problem.Constraint(Zero,
                                                         lb=0,
                                                         ub=0,
                                                         name="const_6",
                                                         sloppy=False)
        self.combined_model.add_cons_vars([const_6], sloppy=False)
        self.combined_model.solver.update()

        #constraint to force number of etas from the database to be at least one
        #ensures that we are not simply finding inherent TICs
        const_7 = self.combined_model.problem.Constraint(Zero,
                                                         lb=1,
                                                         name="const_7",
                                                         sloppy=False)
        self.combined_model.add_cons_vars([const_7], sloppy=False)
        self.combined_model.solver.update()

        #build a list of coefficients for the integer cuts that will be used?

        #need to iterate constraint definition over reactions
        for rxn in self.combined_model.reactions:

            #add variables
            alpha = self.combined_model.problem.Variable(
                name='alpha_{}'.format(rxn.id), lb=0, ub=1, type='binary')
            beta = self.combined_model.problem.Variable(name='beta_{}'.format(
                rxn.id),
                                                        lb=0,
                                                        ub=1,
                                                        type='binary')
            eta = self.combined_model.problem.Variable(name='eta_{}'.format(
                rxn.id),
                                                       lb=0,
                                                       ub=1,
                                                       type='binary')

            self.combined_model.add_cons_vars([alpha, beta, eta], sloppy=False)

            #define a way to reference the variables
            alpha.rxn_id = rxn.id
            beta.rxn_id = rxn.id
            eta.rxn_id = rxn.id

            #define constraints on the combined model

            #upper bound of flux based on beta
            #if I understand things right, self.combined_model.reactions.flux is the same as v_j
            const_1 = self.combined_model.problem.Constraint(
                rxn.flux_expression -
                ((1 - beta) * rxn.upper_bound - epsilon * beta),
                ub=0,
                name='const_1_{}'.format(rxn.id))

            #upper boound of flux based on eta
            const_2 = self.combined_model.problem.Constraint(
                rxn.flux_expression - (eta * rxn.upper_bound),
                ub=0,
                name='const_2_{}'.format(rxn.id))

            #lower bound of flux based on beta
            const_3 = self.combined_model.problem.Constraint(
                rxn.flux_expression - (beta * rxn.lower_bound + epsilon * eta),
                lb=0,
                name='const_3_{}'.format(rxn.id))

            #lower bound of flux based on eta
            const_4 = self.combined_model.problem.Constraint(
                rxn.flux_expression - (eta * rxn.lower_bound),
                lb=0,
                name='const_4_{}'.format(rxn.id))

            #mass balance should be automatically built into the model, this is constraint 5 (const_5)

            #define alpha based on eta
            const_8 = self.combined_model.problem.Constraint(
                alpha - eta, ub=0, name='const_8_{}'.format(rxn.id))

            #define alpha based on beta
            const_9 = self.combined_model.problem.Constraint(
                alpha - (1 - beta), ub=0, name='const_9_{}'.format(rxn.id))

            #define alpha based on eta and beta
            const_10 = self.combined_model.problem.Constraint(
                alpha - (eta + (1 - beta) - 1),
                lb=0,
                name='const_10_{}'.format(rxn.id))

            #add the new constraints to the list of constraints
            constraints.extend([
                const_1, const_2, const_3, const_4, const_8, const_9, const_10
            ])

            #set coefficients for constraint equations
            const_6.set_linear_coefficients({eta: 1})
            self.combined_model.objective.set_linear_coefficients({eta: 1})

            #since const_7 is a sum across database reactions, noeed to check if in database
            if bool(re.search('^database$', rxn.origin)):

                #give this eta a coefficient of 1
                const_7.set_linear_coefficients({eta: 1})

            else:

                #otherwise give this eta a coefficient of 1
                const_7.set_linear_coefficients({eta: 0})

            self.alphas.update({alpha.name: alpha})
            self.betas.update({beta.name: beta})

        #add constraints to the model
        self.combined_model.add_cons_vars(constraints, sloppy=False)

        #give the problem a name
        self.combined_model.problem.name = "TIC Finding Problem (TFP)"

        #write out the problem expression for debugging purposes
        #output.write("problem: "+str(const_7.problem)+"\n")

        #try to make sure that the model is good to go for solving
        self.combined_model.solver.update()
        self.combined_model.repair()

        #iterate over each phi, solving
        for phi in range(max_phi):

            #since multiple solutions possible at each value of phi, have while loop to try to capture each
            found_all = False

            #update constraints based on new reaction number
            #need to figure on what this is for syntax
            const_6.ub = int(phi)
            const_6.lb = int(phi)
            print("Working on phi = " + str(phi))

            self.combined_model.tolerance = tolerance
            self.combined_model.solver.update()
            self.combined_model.repair()

            while not found_all:

                #try, errors might occur if infeasible enough who knows
                try:

                    #ignore warnings since "infeasible" is a warning but a normal part of this tool
                    with warnings.catch_warnings():

                        #ignores "infeasible" warnings since will probably happen frequently
                        warnings.simplefilter('ignore', category=UserWarning)

                        #attempt to find a solution
                        self.combined_model.repair()
                        self.combined_model.solver.update()

                        print("attempting to solve TFP...")

                        #time how long the solution takes
                        start_time = time.time()

                        tfp_solution = self.combined_model.optimize()

                        end_time = time.time()

                        total_time = end_time - start_time

                        print("complete\n")

                except:

                    #output to command line that error occurred
                    print("error occurred: " + str(sys.exec_info()[0]))
                    print("End of solutions for phi = " + str(phi) + "\n\n")

                    #some kind of error got thrown in solution, move to next phi
                    output.write("End of solutions for phi = " + str(phi) +
                                 "\n")
                    output.write("Solution time: " + str(total_time) +
                                 " s\n\n")

                    #if solution is not optimal, then
                    found_all = True

                    #define an empty solution so errors don't get thrown later1
                    tfp_solution = cobra.core.Solution(objective_value=0,
                                                       status=INFEASIBLE,
                                                       fluxes=None)

                #tfp_solution will be a solution object, which has property "status" anything but infeasible is good enough
                if tfp_solution.status == INFEASIBLE:

                    #state that no more solutions to be had at this value of phi
                    output.write("End of solutions for phi = " + str(phi) +
                                 "\n")
                    output.write("Solution time: " + str(total_time) +
                                 " s\n\n")
                    print("End of solutions for phi = " + str(phi) + "\n")

                    #if solution is not optimal, then
                    found_all = True

                else:

                    #need to add results to the class properties
                    #looks like the command is ".append()"
                    self.solution_numbers.append(solution_number)
                    self.objective_values.append(tfp_solution.objective_value)

                    #found all is still so far false
                    found_all = False

                    print("Solution #" + str(solution_number) +
                          " identified\n")

                    #write the solution
                    output.write("TFP SOLUTION #" + str(solution_number) +
                                 "\n")
                    output.write("PHI = " + str(phi) + "\n")
                    output.write("OBJECTIVE VALUE: " +
                                 str(tfp_solution.objective_value) + "\n")
                    output.write("REACTION\tSOURCE\t\tDIR\t\tFLUX\n")
                    output.write(
                        "-------------------------------------------------------------\n"
                    )

                    #need to make sum of alphas and betas
                    sum_alpha = 0
                    sum_beta = 0

                    for rxn in self.combined_model.reactions:

                        #report the optimal results
                        #this is the only
                        if self.combined_model.solver.primal_values.get(
                                'alpha_{}'.format(rxn.id)) == 1.0:

                            output.write(
                                str(rxn.id) + "\t" + str(rxn.origin) +
                                "\t->\t\t" + str(rxn.flux) + "\n")
                            sum_alpha = sum_alpha + 1

                        elif self.combined_model.solver.primal_values.get(
                                'beta_{}'.format(rxn.id)) == 1.0:

                            output.write(
                                str(rxn.id) + "\t" + str(rxn.origin) +
                                "\t<-\t\t" + str(rxn.flux) + "\n")
                            sum_beta = sum_beta + 1

                        elif self.combined_model.solver.primal_values.get(
                                'eta_{}'.format(rxn.id)) == 1.0:

                            #if there is an eta value yet not an alpha or beta something went wrong
                            #hence the question marks in the report
                            output.write(
                                str(rxn.id) + "\t" + str(rxn.origin) +
                                "\t??\t\t" + str(rxn.flux) + "\n")

                        #also need to save the solutions for later referencing
                        #will use nested dictonaries to call first solution number then reaction id
                        #note that ronding because sometimes the values are imprecise, therefore we round to nearest integer so that we have not issues later
                        self.prev_alphas.update({
                            "{}_{}".format(solution_number, rxn.id):
                            round(
                                self.combined_model.solver.primal_values.get(
                                    'alpha_{}'.format(rxn.id)))
                        })
                        self.prev_betas.update({
                            "{}_{}".format(solution_number, rxn.id):
                            round(
                                self.combined_model.solver.primal_values.get(
                                    'beta_{}'.format(rxn.id)))
                        })

                    #if a solution has been found, need to add an integer cut precluding that
                    #solution from being found in future
                    #some constraints also iterate over solutions

                    #the binary variable gamma ensures that at minimum one of the integer cuts defiend below applies
                    gamma = self.combined_model.problem.Variable(
                        name='gamma_{}'.format(solution_number),
                        lb=0,
                        ub=1,
                        type='binary')
                    gamma.soln_id = solution_number

                    self.combined_model.add_cons_vars([gamma], sloppy=False)

                    #integer cut 1: ensure that all the alphas in a following solution are not the same as in any previous solution
                    #need to define constraint first
                    int_cut_1 = self.combined_model.problem.Constraint(
                        Zero, ub=sum_alpha)
                    self.combined_model.add_cons_vars([int_cut_1],
                                                      sloppy=False)
                    self.combined_model.solver.update()
                    int_cut_1.set_linear_coefficients({gamma: 1})

                    #OR

                    #integer cut 2: ensure that all the betas are not the same as in any previous solution
                    int_cut_2 = self.combined_model.problem.Constraint(
                        Zero, ub=(sum_beta - 1))
                    self.combined_model.add_cons_vars([int_cut_2],
                                                      sloppy=False)
                    self.combined_model.solver.update()
                    int_cut_2.set_linear_coefficients({gamma: -1})

                    #need to loop over all reactions to define coefficients for summation
                    for rxn in self.combined_model.reactions:

                        #check to see if there is a coefficient for the
                        if self.combined_model.solver.primal_values.get(
                                'alpha_{}'.format(rxn.id)) == 1:

                            #if there is a previous value of alpha then need to have a coefficient of 1
                            int_cut_1.set_linear_coefficients(
                                {self.alphas['alpha_{}'.format(rxn.id)]: 1})

                        #check to see if there is a coefficient for the
                        if self.combined_model.solver.primal_values.get(
                                'beta_{}'.format(rxn.id)) == 1:

                            #if there is a previous value of alpha then need to have a coefficient of 1
                            int_cut_2.set_linear_coefficients(
                                {self.betas['beta_{}'.format(rxn.id)]: 1})

                    #write the time to get the solution
                    output.write("time to get solution: " + str(total_time) +
                                 " s\n")

                    #update the solution number
                    solution_number = solution_number + 1

                    #add another newline to space reported solutions
                    output.write("\n")

            #if here, then phi will be updated

        alg_end = time.time()
        total_alg_time = alg_end - alg_start
        output.write("total TFP runtime: " + str(total_alg_time) + " s")

        #return itself so can
        return self
 def Clear(self):
     try:
         for (key, value) in self.device:
             value.Clear()
     except:
         logger.error('%r', sys.exec_info())
Esempio n. 50
0
def gen_measurement_matrix(seed=0, N=1000, m=100, group_size=4, max_tests_per_individual=4, verbose=False,
                           graph_gen_method='no_multiple', plotting=False, saving=True, run_ID='debugging'):
    """
    Function to generate and return the group testing matrix based on near-doubly-regular design 

    Parameters:
        seed (int): Seed for random number generation
        N (int): Population size
        m (int): Number of group tests
        group_size (int): Size of the groups
        max_tests_per_individual (int): Maximum number of tests allowed per individual
        verbose (bool): Flag for turning on debugging print statements
        graph_gen_method (str): Method for igraph to use in generating the graph
        plotting (bool): Flag for turning on plotting 
        saving (bool): Flag for turning on saving the result
        run_ID (str): String for specifying name of run

    Returns:
        A (binary numpy array): The group testing matrix
    """

    # set the seed used for graph generation to the options seed
    random.seed(seed)
    np_random = np.random.RandomState(seed)

    # compute tests per individual needed to satisfy
    #           N*tests_per_individual == m*group_size
    avg_test_split = math.floor(m * group_size / N)
    if verbose:
        print('number of tests per individual needed to satisfy ' \
              + '(N*tests_per_individual == m*group_size) = ' + str(avg_test_split))

    # verify the given parameters can lead to a valid testing scheme:
    #   if floor(m*group_size/N) > max_tests_per_individual, we would need to 
    #   test each individual more times than the maximum allowed to satisfy 
    #   the group size constraint
    try:
        # ensure that we're not violating the maximum
        assert avg_test_split <= max_tests_per_individual
    except AssertionError:
        errstr = ('Assertion Failed: With group_size = ' + str(group_size) + ', since m = ' + str(m) +
                  ' and N = ' + str(N) + ' we have floor(m*group_size/N) = ' + str(avg_test_split) +
                  'which exceeds max_tests_per_individual = ' + str(max_tests_per_individual))
        print(errstr)
        sys.exit()

    # compute the actual tests per individual
    # note: we may end up with individuals being tested less than the maximum
    #   allowed number of tests, but this is not a problem (only the opposite is)
    tests_per_individual = max(avg_test_split, 1)

    if verbose:
        print("tests_per_individual = " + str(tests_per_individual))

    # In this model, the first N elements of the degree sequence correspond
    # to the population, while the last m elements correspond to the groups

    # in-degree corresponds to the individuals, here we set the first N 
    # entries of the in-degree sequence to specify the individuals 
    indeg = np.zeros(N + m)
    indeg[0:N] = tests_per_individual
    if N - m*group_size > 0:
        indeg[np.random.choice(np.arange(0,N), N - m*group_size, replace=False)] = 0

    if m*group_size - N*tests_per_individual > 0:
        temp = np.random.choice(np.arange(0,N), m*group_size - N*tests_per_individual, replace=False)
        indeg[temp] = indeg[temp] + 1

    # out-degree corresponds to the group tests, here we set the last m 
    # entries of the out-degree sequence to specify the groups
    outdeg = np.zeros(N + m)
    outdeg[N:(N + m)] = group_size

    # keep track of vertex types for final graph vertex coloring
    vtypes = np.zeros(N + m)
    vtypes[0:N] = 1

    # output the sum of indeg and outdeg if checking conditions
    if verbose:
        print("out degree sequence: {}".format(outdeg.tolist()))
        print("in degree sequence:  {}".format(indeg.tolist()))
        print("sum outdeg (groups) = {}".format(np.sum(outdeg)))
        print("sum indeg (individ) = {}".format(np.sum(indeg)))

    try:
        assert np.sum(outdeg) == np.sum(indeg)
    except AssertionError:
        errstr = ("Assertion Failed: Require sum(outdeg) = " + str(np.sum(outdeg)) + " == " \
                    + str(np.sum(indeg)) + " = sum(indeg)")
        print(errstr)
        print("out degree sequence: {}".format(outdeg.tolist()))
        print("in degree sequence: {}".format(indeg.tolist()))
        # sys.exit()

    # generate the graph
    try:
        assert igraph._igraph.is_graphical_degree_sequence(outdeg.tolist(), indeg.tolist())
        g = igraph.Graph.Degree_Sequence(outdeg.tolist(), indeg.tolist(), graph_gen_method)
        g.vs['vertex_type'] = vtypes
        assert np.sum(outdeg) == len(g.get_edgelist())
    except AssertionError:
        errstr = ("Assertion Failed: Require [sum(outdeg) = " + str(np.sum(outdeg)) + "] == [" \
                  + str(np.sum(indeg)) + " = sum(indeg)] == [ |E(G)| = " + str(len(g.get_edgelist())) + "]")
    except igraph._igraph.InternalError as err:
        print("igraph InternalError (likely invalid outdeg or indeg sequence): {0}".format(err))
        print("out degree sequence: {}".format(outdeg.tolist()))
        print("in degree sequence: {}".format(indeg.tolist()))
        sys.exit()
    except:
        print("Unexpected error:", sys.exec_info()[0])
        sys.exit()
    else:
        # get the adjacency matrix corresponding to the nodes of the graph
        A = np.array(g.get_adjacency()._get_data())
        if verbose:
            # print(g)
            # print(A)
            # print("before resizing")
            # print(A.shape)
            print("row sum {}".format(np.sum(A, axis=1)))
            print("column sum {}".format(np.sum(A, axis=0)))

        # the generated matrix has nonzeros in bottom left with zeros 
        # everywhere else, resize to it's m x N
        A = A[N:m + N, 0:N]
        # A = np.minimum(A, 1)

        # check if the graph corresponds to a bipartite graph
        check_bipartite = g.is_bipartite()

        # save the row and column sums
        row_sum = np.sum(A, axis=1)
        col_sum = np.sum(A, axis=0)

        # display properties of A and graph g
        if verbose:
            # print(A)
            # print("after resizing")
            # print(A.shape)
            print("row sum {}".format(row_sum))
            print("column sum {}".format(col_sum))
            print("max row sum {}".format(max(row_sum)))
            print("max column sum {}".format(max(col_sum)))
            print("min row sum {}".format(min(row_sum)))
            print("min column sum {}".format(min(col_sum)))
            print("g is bipartite: {}".format(check_bipartite))

        # set options and plot corresponding graph
        if plotting:
            layout = g.layout("auto")
            color_dict = {1: "blue", 0: "red"}
            g.vs['color'] = [color_dict[vertex_type] for vertex_type in g.vs['vertex_type']]
            B = g.vs.select(vertex_type='B')
            C = g.vs.select(vertex_type='C')
            visual_style = {}
            visual_style['vertex_size'] = 10
            visual_style['layout'] = layout
            visual_style['edge_width'] = 0.5
            visual_style['edge_arrow_width'] = 0.2
            visual_style['bbox'] = (1200, 1200)
            igraph.drawing.plot(g, **visual_style)

        # save data to a MATLAB ".mat" file
        data_filename = run_ID + '_generate_groups_output.mat'
        # if saving:
        #     data = {}
        #     # data['A'] = A
        #     data['bipartite'] = check_bipartite
        #     data['indeg'] = indeg
        #     data['outdeg'] = outdeg
        #     data['min_col_sum'] = min(col_sum)
        #     data['min_row_sum'] = min(row_sum)
        #     data['max_col_sum'] = max(col_sum)
        #     data['max_row_sum'] = max(row_sum)
        #     data['opts'] = opts
        #     sio.savemat(opts['data_filename'], data)

    # return the adjacency matrix of the graph
    return A
Esempio n. 51
0
                print("off\n")

        # closed = sensor blocked - possible score!
        else:
            if (time.perf_counter() - activation_time > MS_BOUNCE_TIME):
                score_active = True
                GPIO.output(LED_OUTPUT_PIN, True)
            if PRINT_STATE_CHANGE:
                print("on\n")

    # add modest 50ms bounce time to GPIO lib - problem is that both down and up edge are absorbed
    # by the library, so we miss the up edge when it happens. allowing 51ms to MS_BOUNCE_TIME ms
    # to clear the sensor
    GPIO.add_event_detect(IR_INPUT_PIN,
                          GPIO.BOTH,
                          callback=send_osc_on_change,
                          bouncetime=50)

    while True:
        osc_process()
        time.sleep(0.01)
except KeyboardInterrupt:
    print("\nExiting")
except:
    e = sys.exec_info()[0]
    write_to_page("Error: %s" % e)
    print("\nExiting for other reason")
finally:
    osc_terminate()
    GPIO.cleanup()
Esempio n. 52
0
    def tempest_data(self, data, st_tm):
        try:
            LOGGER.debug(data)

            tm = data['obs'][0][0]  # ts
            # convert wind speed from m/s to kph
            if (data['obs'][0][1] is not None):
                wl = data['obs'][0][1] * (18 / 5)  # wind lull
            else:
                wl = 0
            if (data['obs'][0][2] is not None):
                ws = data['obs'][0][2] * (18 / 5)  # wind speed
            else:
                ws = 0
            if (data['obs'][0][3] is not None):
                wg = data['obs'][0][3] * (18 / 5)  # wind gust
            else:
                wg = 0
            wd = data['obs'][0][4]  # wind direction
            p = data['obs'][0][6]  # pressure
            t = data['obs'][0][7]  # temp
            h = data['obs'][0][8]  # humidity
            il = data['obs'][0][9]  # Illumination
            uv = data['obs'][0][10]  # UV Index
            sr = data['obs'][0][11]  # solar radiation
            ra = float(data['obs'][0][12])  # rain
            ls = data['obs'][0][14]  # strikes
            ld = data['obs'][0][15]  # distance
            it = data['obs'][0][17]  # reporting interval

            if st_tm == tm:
                LOGGER.debug('Duplicate Tempest observations, ignorning')
                return st_tm

            st_tm = tm

            el = float(self.params.get('Elevation')) + float(
                self.params.get('AGL'))
            sl = self.nodes['pressure'].toSeaLevel(p, el)
            trend = self.nodes['pressure'].updateTrend(p)
            fl = self.nodes['temperature'].ApparentTemp(t, ws, h)
            dp = self.nodes['temperature'].Dewpoint(t, h)
            hi = self.nodes['temperature'].Heatindex(t, h)
            wc = self.nodes['temperature'].Windchill(t, ws)

            self.nodes['pressure'].update(p, sl, trend)
            self.nodes['temperature'].update(t, fl, dp, hi, wc)
            self.nodes['humidity'].update(h)
            self.nodes['lightning'].update(ls, ld)
            self.nodes['wind'].update(ws, wd, wg, wl)
            self.nodes['light'].update(uv, sr, il)

            self.update_rain(ra, it)

            # battery voltage
            self.nodes['hub'].update(data['obs'][0][16], None)
            #self.setDriver('GV0', data['obs'][0][16], report=True, force=True)

        except Exception as e:
            (t, v, tb) = sys.exec_info()
            LOGGER.error('Failure in TEMPEST data: ' + str(e))
            LOGGER.error('  At: ' + str(tb.tb_lineno))

        return st_tm
    def update(self, obs):
        # process air data
        try:
            tm = obs[0][0]  # ts
            wd = obs[0][4]  # wind direction
            p = obs[0][6]  # pressure
            t = obs[0][7]  # temp
            h = obs[0][8]  # humidity
            il = obs[0][9]  # Illumination
            uv = obs[0][10]  # UV Index
            sr = obs[0][11]  # solar radiation
            ra = float(obs[0][12])  # rain
            ls = obs[0][14]  # strikes
            ld = obs[0][15]  # distance
            bv = obs[0][16]  # battery
            it = obs[0][17]  # reporting interval

            # convert wind speed from m/s to kph
            if (obs[0][1] is not None):
                wl = obs[0][1] * (18 / 5)  # wind lull
            else:
                wl = 0
            if (obs[0][2] is not None):
                ws = obs[0][2] * (18 / 5)  # wind speed
            else:
                ws = 0
            if (obs[0][3] is not None):
                wg = obs[0][3] * (18 / 5)  # wind gust
            else:
                wg = 0

            sl = derived.toSeaLevel(p, self.elevation)
            trend = derived.updateTrend(p, self.trend)

            try:
                fl = derived.ApparentTemp(t, ws / 3.6, h)
                dp = derived.Dewpoint(t, h)
                hi = derived.Heatindex(t, h)
                wc = derived.Windchill(t, ws)
            except Exception as e:
                LOGGER.error('Failure to calculate Air temps: ' + str(e))

        except Exception as e:
            (t, v, tb) = sys.exec_info()
            LOGGER.error('Failure in processing AIR data: ' + str(e))
            LOGGER.error('  At: ' + str(tb.tb_lineno))

        # temperatures t, fl, dp, hi, wc  (conversions)
        if self.units['temperature'] is not 'c':
            t = round((t * 1.8) + 32, 2)  # convert to F
            fl = round((fl * 1.8) + 32, 2)  # convert to F
            dp = round((dp * 1.8) + 32, 2)  # convert to F
            hi = round((hi * 1.8) + 32, 2)  # convert to F
            wc = round((wc * 1.8) + 32, 2)  # convert to F
            uom = 17
        else:
            uom = 4
        self.setDriver('CLITEMP', t, uom=uom)
        self.setDriver('GV0', fl, uom=uom)
        self.setDriver('DEWPT', dp, uom=uom)
        self.setDriver('HEATIX', hi, uom=uom)
        self.setDriver('WINDCH', wc, uom=uom)

        # pressures p, sl  (conversions)
        if self.units['pressure'] == 'inhg':
            p = round(p * 0.02952998751, 3)
            sl = round(sl * 0.02952998751, 3)
            uom = 23
        elif self.units['pressure'] == 'hpa':
            uom = 118
        else:
            uom = 117
        self.setDriver('ATMPRES', sl, uom=uom)
        self.setDriver('BARPRES', p, uom=uom)

        # distance ld  (conversions)
        if self.units['distance'] == 'mi':
            ld = round(ld / 1.609344, 1)
            uom = 116
        else:
            uom = 83
        self.setDriver('DISTANC', ld, uom=uom)

        # humidity h, strikes ls, battery bv, and trend (no conversions)
        self.setDriver('CLIHUM', h)
        self.setDriver('BATLVL', bv)
        self.setDriver('GV1', trend)
        self.setDriver('GV2', ls)

        # ra == mm/minute (or interval)  (conversion necessary)
        if self.units['rain'] == 'in':
            uom = 24  # in/hr
            ra = round(ra * 0.03937, 2) * 60
        else:
            uom = 46  # mm/hr
            ra = ra * 60
        self.setDriver('RAINRT', ra)
        self.rain_update(ra)

        # ws, wl, wg (conversion)
        if self.units['wind'] == 'mph':
            ws = round(ws / 1.609344, 2)
            wl = round(wl / 1.609344, 2)
            wg = round(wg / 1.609344, 2)
            uom = 48
        elif self.units['wind'] == 'kph':
            uom = 32
        else:  # m/s
            ws = round(wg * 5 / 18, 2)
            wl = round(wg * 5 / 18, 2)
            wg = round(wg * 5 / 18, 2)
            uom = 40
        self.setDriver('SPEED', ws, uom=uom)
        self.setDriver('GV4', wl, uom=uom)
        self.setDriver('GUST', wg, uom=uom)

        # il, uv, sr, wd (no conversion)
        self.setDriver('LUMIN', il)
        self.setDriver('UV', uv)
        self.setDriver('SOLRAD', sr)
        self.setDriver('WINDDIR', wd)
        self.setDriver('GV3', wd)
        self.setDriver('BATLVL', bv)
Esempio n. 54
0
#exec_inf

from sys import exec_info
from traceback import print_tb
try:
    print "Hi"

except:
    print exec_info()
    print_tb((exec_info()[2]))

def aggregate_logs(log_directory, output_csv_file):
    log_files = []

    # get all the log files from log directory
    log_files = [
        os.path.join(log_directory, file) for file in os.listdir(log_directory)
        if os.path.isfile(os.path.join(log_directory, file))
    ]

    with open(output_csv_file, 'w') as csv_file:
        fieldnames = [
            'file', 'algorithm', 'partitions', 'ingress', 'rf',
            'total_ingress', 'compute_imbalance', 'li_max', 'li_min', 'li_25',
            'li_50', 'li_75', 'total_time', 'total_network'
        ]
        writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
        writer.writeheader()

        # start processing the files
        for log_file in log_files:
            # variables that we try to extract from the log
            algorithm = ""
            file = ""
            nparts = 0
            ingress = ""
            rf = 0
            ingress_time = 0
            finalize_time = 0
            iterations = 0
            compute_balance = [1]
            li_max = 0
            li_min = 0
            li_25 = 0
            li_50 = 0
            li_75 = 0
            gather_call = 0
            apply_call = 0
            scatter_call = 0
            execution_time = 0
            breakdownx = 0
            breakdownr = 0
            breakdowng = 0
            breakdowna = 0
            breakdowns = 0
            bytes_sent = 0
            bytes_received = 0
            calls_sent = 0
            calls_received = 0
            network_sent = 0

            print "Parsing {}".format(log_file)
            file = log_file.split("/")[-1]
            algorithm = file.split("-")[0]

            try:

                with open(log_file, 'r') as logs:
                    for log in logs:
                        # now we need to check for each occurance of parameters that we try to parse
                        match = re.search("Cluster of (.*) instances", log)
                        if match != None:
                            nparts = int(match.group(1))
                        # match for ingress method
                        match = re.search("ingress = (.*)", log)
                        if match != None:
                            ingress = match.group(1)
                        # replication factor
                        match = re.search("replication factor: (.*)", log)
                        if match != None:
                            rf = float(match.group(1))
                        # ingress time
                        match = re.search("Final Ingress \(second\): (.*)",
                                          log)
                        if match != None:
                            ingress_time = float(match.group(1))
                        # finalize time
                        match = re.search("Finalizing graph. Finished in (.*)",
                                          log)
                        if match != None:
                            finalize_time = float(match.group(1))
                        # iterations
                        match = re.search(": (\w*) iterations completed", log)
                        if match != None:
                            iterations = int(match.group(1))
                        # compute balance array
                        match = re.search("Compute Balance: (.*)", log)
                        if match != None:
                            compute_balance = map(float,
                                                  match.group(1).split())
                        # gas calls
                        match = re.search(" Total Calls\(G\|A\|S\): (.*)", log)
                        if match != None:
                            [gather_call, apply_call,
                             scatter_call] = map(float,
                                                 match.group(1).split("|"))
                        # execution time
                        match = re.search("Execution Time: (.*)", log)
                        if match != None:
                            execution_time = float(match.group(1))
                        # Breakdown of timing
                        match = re.search("Breakdown\(X\|R\|G\|A\|S\): (.*)",
                                          log)
                        if match != None:
                            [
                                breakdownx, breakdownr, breakdowng, breakdowna,
                                breakdowns
                            ] = map(float,
                                    match.group(1).split("|"))
                        # bytes sent
                        match = re.search("Bytes Sent: (.*)", log)
                        if match != None:
                            bytes_sent += int(match.group(1))
                        # calls sent
                        match = re.search("Calls Sent: (.*)", log)
                        if match != None:
                            calls_sent += int(match.group(1))
                        # bytes received
                        match = re.search("Bytes Received: (.*)", log)
                        if match != None:
                            bytes_received += int(match.group(1))
                        # calls received
                        match = re.search("Calls Received: (.*)", log)
                        if match != None:
                            calls_received += int(match.group(1))
                        # network sent
                        match = re.search("Network Sent: (.*)", log)
                        if match != None:
                            network_sent += int(match.group(1))

                # write result into csv
                writer.writerow({
                    'file':
                    file,
                    'algorithm':
                    algorithm,
                    'partitions':
                    str(nparts),
                    'ingress':
                    ingress,
                    'rf':
                    str(rf),
                    'total_ingress':
                    str(ingress_time + finalize_time),
                    'compute_imbalance':
                    str(max(compute_balance) / mean(compute_balance)),
                    'li_max':
                    numpy.percentile(compute_balance, 100),
                    'li_min':
                    numpy.percentile(compute_balance, 0),
                    'li_25':
                    numpy.percentile(compute_balance, 25),
                    'li_50':
                    numpy.percentile(compute_balance, 50),
                    'li_75':
                    numpy.percentile(compute_balance, 75),
                    'total_time':
                    str(execution_time),
                    'total_network':
                    str(network_sent)
                })
                print "!!! Done parsing {}".format(log_file)

            except (IOError, ValueError, ZeroDivisionError) as e:
                print "Could not parse: {}, skipping entry".format(log_file)
                # write result into csv
                writer.writerow({
                    'file': file,
                    'algorithm': algorithm,
                    'partitions': str(nparts),
                    'ingress': ingress,
                    'rf': "-",
                    'total_ingress': "-",
                    'compute_imbalance': "-",
                    'total_time': "-",
                    'total_network': "-"
                })
            except:
                print "Unexpected error: {}".format(sys.exec_info()[0])
                raise
Esempio n. 56
0
def safe(func, *pargs, **kargs):
    try:
        func(*pargs, **kargs)
    except:
        traceback.print_exc()
        print('Got %s %s' % (sys.exec_info()[0], sys.exec_info()[1]))
Esempio n. 57
0
File: greens.py Progetto: jimsrc/vq
    def plot_greens_hist(self,
                         shear_normal='shear',
                         greens_ary=None,
                         fnum=0,
                         do_clf=True,
                         n_bins=1000,
                         do_fit=False,
                         **hist_kwargs):
        '''
		# plot_greens_hist: plot a histogram of greens funciton values. besides giving a histogram, as oppposed to a cumulative type dist.,
		# this might be useful for really really big data sets that don't plot in memory.
		# prams (in order): (shear_normal='shear', greens_fname='model1_greens_3000.h5', fnum=0, n_bins=1000, **kwargs)
		# use kwargs to provide other hist() arguments (range, normed, weights, cumulative, bottom, histtype, align, log, ...)
		#   note that some hist_kwards will be allocated by default (bins=n_bins=1000, log=True, histtype='step')
		'''
        #
        if greens_ary == None:
            shear_normal = shear_normal_aliases(shear_normal)
            print "shear_normal (translated): ", shear_normal
            if shear_normal == 'greens_shear': greens_ary = self.get_shear()
            if shear_normal == 'greens_normal': greens_ary = self.get_normal()
        #
        greens_ary = numpy.array(greens_ary)
        #
        #n_bins = hist_kwargs.get('bins', n_bins)
        hist_kwargs['bins'] = hist_kwargs.get('bins', n_bins)
        hist_kwargs['log'] = hist_kwargs.get('log', True)
        hist_kwargs['histtype'] = hist_kwargs.get('histtype', 'step')
        hist_kwargs['normed'] = hist_kwargs.get('normed', False)
        #print hist_kwargs
        #
        sh_0 = greens_ary.shape
        greens_ary.shape = (1, greens_ary.size)
        #
        #n_bins = min(n_bins, greens_ary.size/2)
        print "Some stats:"
        gr_val_mean = numpy.mean(greens_ary[0])
        gr_val_stdev = numpy.std(greens_ary[0])
        gr_val_median = numpy.median(greens_ary[0])
        gr_val_max, gr_val_min = max(greens_ary[0]), min(greens_ary[0])
        gr_val_max_abs, gr_val_min_abs = max(abs(greens_ary[0])), min(
            abs(greens_ary[0]))
        print "mean(greens): ", gr_val_mean
        print "median(greens): ", gr_val_median
        print "stdev(greens): ", gr_val_stdev
        print "max/min: ", gr_val_max, gr_val_min
        print "max/min abs: ", gr_val_max_abs, gr_val_min_abs
        #
        #
        plt.figure(fnum)
        #plt.ion()
        ax = plt.gca()
        if do_clf: plt.clf()
        gr_hist = plt.hist(greens_ary[0], **hist_kwargs)
        #
        # now (optionally), get a gaussian fit (actually, gaussian fit to logarithms, so log-normal fit)
        #
        if do_fit:
            try:
                print "begin (try()ing to) fitting to gauss model..."
                bin_edges = gr_hist[
                    1]  # contains the left edges + right edge of final entry.
                bin_centers = (bin_edges[:-1] + bin_edges[1:]) / 2.
                #
                x_hist, y_hist = zip(*[[x, math.log10(y)]
                                       for x, y in zip(bin_centers, gr_hist[0])
                                       if y > 0])
                #
                #plt.figure(fnum+1)
                #plt.clf()
                #plt.plot(x_hist, y_hist, '-')

                #for j in xrange(len(x_hist)): print "[%f, %f]" % (x_hist[j], y_hist[j])
                #return x_hist, y_hist
                #plt.figure(0)
                gauss_p0 = [
                    math.log10(max(y_hist)), 0., 1.0
                ]  # because we treat A like --> 10**log(a), for linearization., so note this is log(log(y))...
                # now, guess sigma:
                for j, y in enumerate(y_hist):
                    if y > .5 * gauss_p0[0] and x_hist[j] != gauss_p0[1]:
                        gauss_p0[2] = x_hist[j]
                        break
                # maybe another guess here?
                #
                print "begin fit: A, mu, sigma = ", gauss_p0
                coeff, var_matrix = scipy.optimize.curve_fit(gauss_pdf,
                                                             x_hist,
                                                             y_hist,
                                                             p0=gauss_p0)
                #
                print "fit complete: A, mu, sigma = ", coeff, gauss_p0
                #
                x_hist_fit = numpy.arange(
                    min(x_hist), max(x_hist),
                    .5 * (max(x_hist) - min(x_hist)) / float(n_bins))
                hist_fit = gauss_pdf(x_hist_fit, *coeff)
                #
                # let's have a go at the original figure:
                plt.figure(fnum)
                plt.plot(x_hist_fit,
                         numpy.power(10., hist_fit),
                         'r-',
                         lw=1.5,
                         alpha=.7,
                         label='gauss fit: $A=%f$, $\\mu=%f$, $\\sigma=%f$' %
                         (coeff[0], coeff[1], coeff[2]))
                #for jw in numpy.arange(1.,3.):
                for jw in [1., 2., 2.5, 3.]:
                    my_x = numpy.array(
                        [coeff[1] - jw * coeff[2], coeff[1] + jw * coeff[2]])
                    print "Greens range for %d sigma (mu=%f): x=%s, log(y)=%s" % (
                        int(jw), coeff[1], my_x, gauss_pdf(my_x, *coeff))
                    plt.plot(my_x,
                             numpy.power(10., gauss_pdf(my_x, *coeff)),
                             'r.--',
                             label='$x_%d=[%f, %f]$' %
                             (int(jw), my_x[0], my_x[1]))
                #
            except:
                try:
                    print "fitting attempt failed.: %s" % sys.exec_info()[0]
                except:
                    print "fitting attempt failed for an un-printed reason."
        #
        plt.legend(loc=0, numpoints=1)
        #
        # return to original shape.
        greens_ary.shape = sh_0
        #
        return gr_hist
Esempio n. 58
0
           " deterministic transform)" )


if __name__ == '__main__':
    input_fname = None
    output_fname = None
    deterministic = False

    argv = sys.argv[1:]

    # Parse command line options
    try:
        opts, args = getopt.getopt( argv, "h", ["input=", "output=", \
                                                 "hash", "help"] )
    except getopt.GetoptError:
        print sys.exec_info()
        usage()
        sys.exit(2)

    for opt, arg in opts:
        if opt in ("-h", "--help"):
            usage()
            sys.exit(0)
        elif opt == "--input":
            input_fname = arg
        elif opt == "--output":
            output_fname = arg
        # Use a deterministic transform instead of a uuid
        elif opt == "--hash":
            deterministic = True
        else: