def test():
	print("Test mode!")
	import logSetup
	import WebMirror.rules
	import WebMirror.Engine
	import multiprocessing
	logSetup.initLogging()

	c_lok = cookie_lock = multiprocessing.Lock()
	engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok)



	url = 'http://taulsn.wordpress.com/feed/'

	job = testJobFromUrl(url)
	engine.dispatchRequest(job)


	url = 'http://turb0translation.blogspot.com/feeds/posts/default'
	job = testJobFromUrl(url)
	engine.dispatchRequest(job)


	url = 'http://www.w3schools.com/xml/note.xml'
	job = testJobFromUrl(url)
	engine.dispatchRequest(job)
def logSweeps(dataQueue, ctrlNs, printQueue, test=False):
	log = logging.getLogger("Main.LogProcess")
	logSetup.initLogging(printQ = printQueue)

	log.info("Logging thread starting")

	# the size of the acquisiton array can vary. Therefore, we wait for the acq thread to send a message containing
	# the array size before allocating the HDF5 array.
	if not test:
		while 1:

			if dataQueue.empty():
				time.sleep(0.005)
			else:
				tmp = dataQueue.get()
				if "arrSize" in tmp:
					log.info("Have array size for acquisition. Creating HDF5 file and starting logging.")
					arrWidth = tmp["arrSize"]
					break
	else:
		arrWidth = 20


	while ctrlNs.acqRunning:
		logIter(dataQueue, ctrlNs, printQueue, arrWidth, test)

	log.info("Log-thread closing queues!")
	dataQueue.close()
	dataQueue.join_thread()
	log.info("Log-thread exiting!")
	printQueue.close()
	printQueue.join_thread()
Example #3
0
def printer(printQueue, ctrlNs):


	log = logging.getLogger("Main.Printer")
	logSetup.initLogging()

	while 1:
		if not printQueue.empty():
			print printQueue.get()





		if ctrlNs.acqRunning == False and ctrlNs.apiRunning == False:
			print("Stopping Printing-thread!")
			break

		time.sleep(0.001)


	print("Print-thread exiting!")
	printQueue.close()
	printQueue.join_thread()
	print("Print-thread exited!")
Example #4
0
def testSetup(startObservers=False):

    import runStatus

    runStatus.preloadDicts = False

    import logSetup
    import signal
    import nameTools as nt

    logSetup.initLogging(logToDb=True)

    def signal_handler(dummy_signal, dummy_frame):
        if runStatus.run:
            runStatus.run = False
            print("Telling threads to stop")
        else:
            print("Multiple keyboard interrupts. Raising")
            raise KeyboardInterrupt

    signal.signal(signal.SIGINT, signal_handler)

    nt.dirNameProxy.startDirObservers(useObservers=startObservers)

    yield

    if startObservers:
        nt.dirNameProxy.stop()
Example #5
0
def test():
	print("Test mode!")
	import webFunctions
	import logSetup
	logSetup.initLogging()

	import TextScrape.RelinkLookup

	relinkable = TextScrape.RelinkLookup.getRelinkable()

	wg = webFunctions.WebGetRobust()

	tests = [
		('http://jawztranslations.blogspot.com/2015/03/LMS-V22-C07.html', 'http://jawztranslations.blogspot.com/'),
		('http://skythewood.blogspot.sg/2015/03/G26.html', 'http://skythewood.blogspot.sg/'),
	]

	for testurl, context in tests:
		content = wg.getpage(testurl)
		scraper = HtmlPageProcessor([context], testurl, content, 'Main.Test', tld=['com', 'net', 'sg'], relinkable=relinkable)
		extr = scraper.extractContent()
		# print(scraper)

	print()
	print()
	print()
	print(extr)
def test():
	print("Test mode!")
	import logSetup
	import WebMirror.rules
	import WebMirror.Engine
	import WebMirror.Runner
	import multiprocessing
	logSetup.initLogging()

	crawler = WebMirror.Runner.Crawler()
	crawler.start_aggregator()


	c_lok = cookie_lock = multiprocessing.Lock()
	engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok, response_queue=crawler.agg_queue)



	engine.dispatchRequest(testJobFromUrl('http://royalroadl.com/fiction/3333'))
	# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fiction/2850'))
	# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/latest-updates/'))

	# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/best-rated/'))
	# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/latest-updates/'))
	# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/active-top-50/'))
	# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/weekly-views-top-50/'))
	# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/newest/'))

	crawler.join_aggregator()
Example #7
0
	def __init__(self, ctrlNs, printQueue, rawDataRingBuf, fftDataRingBuf):

		self.log = logging.getLogger("Main.FFTWorker")
		logSetup.initLogging(printQ = printQueue)

		self.log.info("FFT Worker Starting up")

		self.ctrlNs         = ctrlNs
		self.printQueue     = printQueue
		self.rawDataRingBuf = rawDataRingBuf
		self.fftDataRingBuf = fftDataRingBuf

		self.fftChunkSize = s.FFT_CHUNK_SIZE
		self.outputSize = self.fftChunkSize//2 + 1
		self.chunksPerAcq = int(SignalHound.rawSweepArrSize/self.fftChunkSize)
		self.overlap = s.FFT_OVERLAP
		self.window = np.hamming(self.fftChunkSize)
		inArr = pyfftw.n_byte_align_empty(self.fftChunkSize, 16, dtype=np.float32)
		outArr = pyfftw.n_byte_align_empty(self.outputSize, 16, dtype=np.complex64)

		self.log.info("Choosing maximally optimized transform")
		self.fftFunc = pyfftw.FFTW(inArr, outArr, flags=('FFTW_PATIENT', "FFTW_DESTROY_INPUT"))
		self.log.info("Optimized transform selected. Run starting")

		self.run()
def test():
	print("Test mode!")
	import logSetup
	import WebMirror.rules
	import WebMirror.Engine
	import WebMirror.Runner
	import multiprocessing
	logSetup.initLogging()

	crawler = WebMirror.Runner.Crawler()
	crawler.start_aggregator()


	c_lok = cookie_lock = multiprocessing.Lock()
	engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok, response_queue=crawler.agg_queue)



	engine.dispatchRequest(testJobFromUrl('http://www.novelupdates.com/series/sendai-yuusha-wa-inkyou-shitai'))
	engine.dispatchRequest(testJobFromUrl('http://www.novelupdates.com/series/when-he-comes-close-your-eyes'))
	engine.dispatchRequest(testJobFromUrl('http://www.novelupdates.com/series/kenkyo-kenjitsu-o-motto-ni-ikite-orimasu'))
	engine.dispatchRequest(testJobFromUrl('http://www.novelupdates.com/series/night-ranger/'))
	engine.dispatchRequest(testJobFromUrl('http://www.novelupdates.com/series/mythical-tyrant/'))
	engine.dispatchRequest(testJobFromUrl('http://www.novelupdates.com/series/kenkyo-kenjitsu-o-motto-ni-ikite-orimasu/'))


	crawler.join_aggregator()
Example #9
0
def test():
	import logSetup
	logSetup.initLogging()
	print("Wat")
	# truncate_url_history('http://royalroadl.com/fiction/4293')
	proc = DbFlattener()
	# proc.wat()
	proc.fix_missing_history()
Example #10
0
def test():

	signal.signal(signal.SIGINT, customHandler)

	logSetup.initLogging(logLevel=logging.DEBUG)

	runner = ScrapePlugins.BuMonitor.Run.Runner()
	runner.go()
	def __init__(self, printQueue):
		self.printQueue = printQueue
		logSetup.initLogging(printQ=printQueue)

		self.calcScanBands()

		if ACQ_TYPE != "real-time-sweeping":
			raise ValueError("internalSweep module only supports 'real-time-sweeping' mode! Configured mode = {mode}".format(mode=ACQ_TYPE))
Example #12
0
def preflight():
	logSetup.initLogging(logToDb=True)

	# runStatus.notq = UploadPlugins.Madokami.notifier.start_notifier()
	runStatus.notq = None

	schemaUpdater.schemaRevisioner.updateDatabaseSchema()
	statusManager.resetAllRunningFlags()

	nt.dirNameProxy.startDirObservers()
Example #13
0
def main():
	logSetup.initLogging()

	remote = rpyc.connect("localhost", 12345, config = rpyc.core.protocol.DEFAULT_CONFIG)


	raw_job = buildjob(
		module         = 'WebRequest',
		call           = 'getItem',
		dispatchKey    = "fetcher",
		jobid          = -1,
		args           = ['http://www.google.com'],
		kwargs         = {},
		additionalData = {'mode' : 'fetch'},
		postDelay      = 0
	)

	print(remote)
	print(remote.root.putJob('wat', raw_job))

	while 1:
		try:
			j = remote.root.getJob("wat")
			print("Got job!")
		except queue.Empty:
			time.sleep(1)
			print("No message")
		except rpyc.core.vinegar.GenericException as e:
			# this is horrible
			if 'queue.Empty' in rpyc.core.vinegar._generic_exceptions_cache:
				if isinstance(e, rpyc.core.vinegar._generic_exceptions_cache['queue.Empty']):
					print("Empty exception")
					continue

			print("type", type(e))
			print("instance", issubclass(type(e), queue.Empty))

			import inspect
			print(inspect.getmro(type(e)))
			# extp = rpyc.core.vinegar._get_exception_class(queue.Empty)
			# print(extp)
			# print("instance", isinstance(e, extp))
			# print("instance", isinstance(type(e), extp))
			# print("type", type(extp()))
			fakemodule = {"__module__" : "%s/%s" % ("rpyc.core.vinegar", "queue")}
			extp = type("queue.Empty", (rpyc.core.vinegar.GenericException,), fakemodule)
			print(extp)
			print(isinstance(e, extp))
			print(isinstance(e, rpyc.core.vinegar.GenericException))


			print(rpyc.core.vinegar._generic_exceptions_cache )
			raise e

	remote.close()
def test():
	print("Test mode!")
	import logSetup
	import WebMirror.rules
	import WebMirror.Engine
	import multiprocessing
	logSetup.initLogging()

	c_lok = cookie_lock = multiprocessing.Lock()
	engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok)
	engine.dispatchRequest(testJobFromUrl('https://twitter.com/Baka_Tsuki'))
def test():
	print("Test mode!")
	import logSetup
	import WebMirror.rules
	import WebMirror.Engine
	import multiprocessing
	logSetup.initLogging()

	c_lok = cookie_lock = multiprocessing.Lock()
	engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok)
	engine.dispatchRequest(testJobFromUrl('http://japtem.com/fanfic.php'))
def test():
	print("Test mode!")
	import logSetup
	import WebMirror.rules
	import WebMirror.Engine
	import multiprocessing
	logSetup.initLogging()

	c_lok = cookie_lock = multiprocessing.Lock()
	engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok)

	# engine.dispatchRequest(testJobFromUrl('http://www.novelupdates.com/'))

	for x in range(0, 180):
		engine.dispatchRequest(testJobFromUrl('http://www.novelupdates.com/?pg={num}'.format(num=x)))
def test():
    print("Test mode!")
    import logSetup
    import WebMirror.Engine
    import multiprocessing

    logSetup.initLogging()

    c_lok = cookie_lock = multiprocessing.Lock()
    engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok)

    engine.dispatchRequest(testJobFromUrl("http://lndb.info/light_novel/Gamers!"))
    engine.dispatchRequest(testJobFromUrl("http://lndb.info/light_novel/AntiMagic_Academy_The_35th_Test_Platoon"))
    engine.dispatchRequest(testJobFromUrl("http://lndb.info/light_novel/Madan_no_Ou_to_Vanadis"))
    engine.dispatchRequest(testJobFromUrl("http://lndb.info/light_novel/Aru_Hi,_Bakudan_ga_Ochitekite"))
Example #18
0
def preflight():
	logSetup.initLogging(logToDb=True)

	# A side effect of get_plugins() is to validate there are no database key conflicts.
	# This has been an issue in the past.
	utilities.runPlugin.get_plugins()

	firstRun.checkInitTables()

	# runStatus.notq = UploadPlugins.Madokami.notifier.start_notifier()
	runStatus.notq = None

	schemaUpdater.schemaRevisioner.updateDatabaseSchema()
	statusManager.resetAllRunningFlags()

	nt.dirNameProxy.startDirObservers()
def test():
	print("Test mode!")
	import webFunctions
	import logSetup
	logSetup.initLogging()

	scraper = GDriveDirProcessor('https://drive.google.com/folderview?id=0B_mXfd95yvDfQWQ1ajNWZTJFRkk&usp=drive_web', 'Main.Test')
	print(scraper)
	extr = scraper.extractContent()
	print('Plain Links:')
	for link in extr['plainLinks']:
		print(link)
	print()
	print()
	print('Resource files:')
	for link in extr['rsrcLinks']:
		print(link)
	print(extr['contents'])
Example #20
0
def go():



	if len(sys.argv) <= 1:
		print "You must enter a test mode!"
		printUsage()
		sys.exit()

	funcs = {
		'radio'      : audioTest,
		'status'     : testDeviceStatusQueries,
		'raw-pipe'   : testRawPipeMode,
		'callback'   : testCallback,
		'traces'     : testSweeps,
		'int-traces' : interruptedSweeping,
		'gps'        : testGpsSweeps,
		'reset'      : resetDevice,
		'iq'         : testIqStreaming
	}

	if sys.argv[1] in funcs:

		logSetup.initLogging()

		sh = SignalHound()
		# sh.preset()

		# testDeviceStatusQueries(sh)
		# testRawPipeMode(sh)
		if len(sys.argv) == 2:
			funcs[sys.argv[1]](sh)
		if len(sys.argv) == 3:
			funcs[sys.argv[1]](sh, float(sys.argv[2]))

		# testCallback(sh)

		sh.closeDevice()

	else:
		print "Error! You must enter a valid test-mode!"
		printUsage()
Example #21
0
def test2():
	import logSetup
	import pprint
	logSetup.initLogging()

	specialcase = WebMirror.rules.load_special_case_sites()
	WebMirror.SpecialCase.pushSpecialCase(specialcase, 0, "http://www.novelupdates.com/1", "www.novelupdates.com")
	WebMirror.SpecialCase.pushSpecialCase(specialcase, 0, "http://www.novelupdates.com/2", "www.novelupdates.com")



	for x in range(30):
		print("Sleeping, ", x)
		time.sleep(1)
		ret = WebMirror.SpecialCase.getSpecialCase(specialcase)
		print("Return: ", ret)
		if x == 15:
			WebMirror.SpecialCase.pushSpecialCase(specialcase, 0, "http://www.novelupdates.com/3", "www.novelupdates.com")
			WebMirror.SpecialCase.pushSpecialCase(specialcase, 0, "http://www.novelupdates.com/4", "www.novelupdates.com")

	print("Done!")
def test():
    print("Test mode!")
    import webFunctions
    import logSetup

    logSetup.initLogging()

    scraper = GDriveDirProcessor(
        "https://drive.google.com/folderview?id=0B_mXfd95yvDfQWQ1ajNWZTJFRkk&usp=drive_web", "Main.Test"
    )
    print(scraper)
    extr = scraper.extractContent()
    print("Plain Links:")
    for link in extr["plainLinks"]:
        print(link)
    print()
    print()
    print("Resource files:")
    for link in extr["rsrcLinks"]:
        print(link)
    print(extr["contents"])
Example #23
0
def test():
	print("Test mode!")
	import webFunctions
	import logSetup
	logSetup.initLogging()

	wg = webFunctions.WebGetRobust()
	# content = wg.getpage('http://www.arstechnica.com')
	scraper = GdocPageProcessor('https://docs.google.com/document/d/1atXMtCutHRpcHwSRS5UyMAC58_gQjMPR2dDVn1LCD3E', 'Main.Test', 'testinating')
	print(scraper)
	extr, rsc = scraper.extractContent()
	print('Plain Links:')
	for link in extr['plainLinks']:
		print(link)
	print()
	print()
	print('Resource files:')
	# for link in extr['rsrcLinks']:
	# 	print(link)

	for fName, mimeType, content, pseudoUrl in rsc:
		print(fName, mimeType, pseudoUrl)
def test2():
	import logSetup
	import pprint
	logSetup.initLogging()

	agg = RawJobAggregator()
	outq = agg.get_queues()
	for x in range(20):
		print("Sleeping, ", x)
		time.sleep(1)
		try:
			j = outq.get_nowait()
			print("Received job! %s", len(j))
			with open("jobs.txt", "a") as fp:
				fp.write("\n\n\n")
				fp.write(pprint.pformat(j))
			print(j)
		except queue.Empty:
			pass
	print("Joining on the aggregator")
	agg.join_proc()
	print("Joined.")
Example #25
0
def main():
	import logSetup
	from WebMirror.NewJobQueue import buildjob
	logSetup.initLogging()

	raw_job = buildjob(
		module         = 'WebRequest',
		call           = 'getItem',
		dispatchKey    = "fetcher",
		jobid          = -1,
		args           = ['http://www.google.com'],
		kwargs         = {},
		additionalData = {'mode' : 'fetch'},
		postDelay      = 0
	)

	rint = RemoteJobInterface("wat")
	print(rint.put_job(raw_job))
	print(rint)
	while 1:
		try:
			j = rint.get_job()
			if j:
				print("Got job!", j)
		except queue.Empty:
			time.sleep(1)
			print("No message")
		except Exception as e:
		# except pyjsonrpc.JsonRpcError as err:
			print("type", type(e))
			print("instance", issubclass(type(e), queue.Empty))

			import inspect
			print(inspect.getmro(type(e)))

			raise e

	remote.close()
Example #26
0
def run():

	interface_dict = {}

	logSetup.initLogging()

	# Make sure the socket does not already exist
	try:
		os.unlink(sock_path)
	except OSError:
		if os.path.exists(sock_path):
			raise

	initialize_manager(interface_dict)
	FetchAgent.AmqpInterface.startup_interface(interface_dict)
	try:
		run_server(interface_dict)
	except KeyboardInterrupt:
		pass

	FetchAgent.AmqpInterface.shutdown_interface(interface_dict)

	os.unlink(sock_path)
def test():
	print("Test mode!")
	import logSetup
	import WebMirror.rules
	import WebMirror.Engine
	import multiprocessing
	logSetup.initLogging()

	c_lok = cookie_lock = multiprocessing.Lock()
	engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok)





	# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fiction/3021'))
	# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/latest-updates/'))

	# engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/best-rated/'))
	engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/latest-updates/'))
	engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/active-top-50/'))
	engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/weekly-views-top-50/'))
	engine.dispatchRequest(testJobFromUrl('http://www.royalroadl.com/fictions/newest/'))
Example #28
0
def printer(printQueue, ctrlNs):


	log = logging.getLogger("Main.Printer")
	logSetup.initLogging()

	while 1:
		if not printQueue.empty():
			print printQueue.get()





		if ctrlNs.printerRun == False:
			log.info("Stopping Printing-thread!")
			break

		time.sleep(0.001)


	log.info("Log-thread exiting!")
	printQueue.close()
	printQueue.join_thread()
def test():
	print("Test mode!")
	import logSetup
	import WebMirror.rules
	import WebMirror.Engine
	import multiprocessing
	logSetup.initLogging()

	c_lok = cookie_lock = multiprocessing.Lock()
	engine = WebMirror.Engine.SiteArchiver(cookie_lock=c_lok)



	job = testJobFromUrl(r'https://www.wattpad.com/api/v3/stories?fields%3Dstories%28id%2Ctitle%2Curl%2Cdescription%29%2Ctotal%2CnextUrl&limit=50&offset=0')
	engine.dispatchRequest(job)

	job = testJobFromUrl(r'https://www.wattpad.com/api/v3/stories?fields%3Dstories%28id%2Ctitle%2Curl%2Cdescription%29%2Ctotal%2CnextUrl&limit=50&offset=1490')
	engine.dispatchRequest(job)

	job = testJobFromUrl(r'https://www.wattpad.com/api/v3/stories?fields%3Dstories%28id%2Ctitle%2Curl%2Cdescription%29%2Ctotal%2CnextUrl&limit=50&offset=1500')
	engine.dispatchRequest(job)

	job = testJobFromUrl(r'https://www.wattpad.com/api/v3/stories?fields%3Dstories%28id%2Ctitle%2Curl%2Cdescription%29%2Ctotal%2CnextUrl&limit=50&offset=1550')
	engine.dispatchRequest(job)
Example #30
0
def dotest():
    print("Starting test")
    import Queue
    logSetup.initLogging()
    startGpsLog((Queue.Queue(), Queue.Queue()), None, Queue.Queue())
Example #31
0
def preflight():
	logSetup.initLogging(logToDb=True)
	schemaUpdater.schemaRevisioner.updateDatabaseSchema()
	statusManager.resetAllRunningFlags()

	nt.dirNameProxy.startDirObservers()
Example #32
0
def test():
    import logSetup
    logSetup.initLogging()
    scrp = Runner()
    scrp.go()
Example #33
0
def logSweeps(statusMessageQueue, fftDataRingBuf, ctrlNs, printQueue):

    log = logging.getLogger("Main.LogProcess")
    logSetup.initLogging(printQ=printQueue)
    loop_timer = time.time()

    logName = time.strftime("Datalog - %Y %m %d, %a, %H-%M-%S.h5",
                            time.localtime())
    logPath = time.strftime("./Data/%Y/%m/%d/", time.localtime())

    if not os.path.exists(logPath):
        os.makedirs(logPath)

    logFQPath = os.path.join(logPath, logName)

    log.info("Logging data to %s", logFQPath)

    if not NO_WRITE_MODE:
        out = h5py.File(logFQPath, "w")

        arrWidth = 16384 + 1  # FFT Array is 16384 items wide, +1 for time-stamp

        # Main dataset - compressed, chunked, checksummed.
        dset = out.create_dataset('Spectrum_Data', (0, arrWidth),
                                  maxshape=(None, arrWidth),
                                  chunks=True,
                                  compression="gzip",
                                  fletcher32=True)

        # Cal and system status log dataset.
        calset = out.create_dataset('Acq_info', (0, 1),
                                    maxshape=(None, None),
                                    dtype=h5py.new_vlen(str))

    items = []
    loopCounter = 0
    while ctrlNs.procRunning:

        if statusMessageQueue.empty() and fftDataRingBuf.getItemsNum() == 0:
            time.sleep(0.005)
        else:
            loopCounter += 1

            if NO_WRITE_MODE:
                if not statusMessageQueue.empty():
                    statusMessageQueue.get()

                if not fftDataRingBuf.getItemsNum() == 0:
                    ret = fftDataRingBuf.getOldest()
                    if ret != False:
                        pass

            else:
                tmp = statusMessageQueue.get()
                if "max" in tmp:
                    items.append(tmp["max"])
                elif "settings" in tmp or "status" in tmp:

                    if "settings" in tmp:
                        tmp["settings"]["averaging-interval"] = NUM_AVERAGE

                    data = [time.time(), tmp]

                    dataPik = cPickle.dumps(data)

                    calSz = calset.shape[0]
                    calset.resize([calSz + 1, 1])
                    calset[calSz, ...] = dataPik

                    log.info("Status message - %s.", tmp)
                    log.info("StatusTable size = %s", calset.shape)
                else:
                    log.error("WAT? Unknown packet!")
                    log.error(tmp)

                if len(items) == NUM_AVERAGE:

                    arr = np.array(items)
                    # print "Array shape = ", arr.shape
                    arr = np.average(arr, axis=0)
                    # print arr.shape

                    dat = np.concatenate(([time.time()], arr))

                    curSize = dset.shape[0]
                    dset.resize(curSize + 1, axis=0)
                    dset[curSize, :] = dat

                    out.flush()  # FLush early, flush often
                    # Probably a bad idea without a SSD

                    items = []

            now = time.time()
            delta = (now - loop_timer)
            if delta > 1:
                updateInterval = delta / loopCounter
                freq = 1 / updateInterval
                log.info("Elapsed Time = %0.5f, Frequency = %s", delta, freq)
                log.info("Processed FFT queue size = %s",
                         fftDataRingBuf.getItemsNum())
                loop_timer = now
                loopCounter = 0

    if not NO_WRITE_MODE:
        out.close()

    log.info("Log-thread exiting!")
Example #34
0
import logSetup
if __name__ == "__main__":
	logSetup.initLogging()



import runStatus
runStatus.preloadDicts = False


import UploadPlugins.Madokami.uploader as up
import os
import os.path

TESTPATH = "/media/Storage/MP/Koalove [++]/"
def test():
	con = up.MkUploader()
	con.checkInitDirs()

	con.migrateTempDirContents()

	# for fileN in os.listdir(TESTPATH):
	# 	fqPath = os.path.join(TESTPATH, fileN)
	# 	con.uploadFile("Koalove", fqPath)


if __name__ == "__main__":
	test()
Example #35
0
def dotest():
    print("Starting test")
    import Queue
    logSetup.initLogging()
    logSweeps(Queue.Queue(), None, Queue.Queue(), test=True)
Example #36
0
        print("Current URL:", cr.get_current_url())
        print(cr.get_page_url_title())
        # cr.close()


def test_rendered_fetch():

    crbin = "google-chrome"
    cr = ChromeController.ChromeRemoteDebugInterface(binary=crbin)

    resp = cr.blocking_navigate(
        "https://www.catatopatch.com/appraise-chapter-15", timeout=10)
    print("Current URL:", cr.get_current_url())
    rcnt = cr.get_rendered_page_source()
    print("content:", type(rcnt))
    # cr.close()


if __name__ == '__main__':
    import logSetup
    logSetup.initLogging(logging.DEBUG)
    # test()
    test_delete_cookies()
    # test_title()
    # test_tabs()
    # test_cycle()
    # test_rendered_fetch()

    # test_url()
    # docstring_dbg()
Example #37
0
        master=True,
        synchronous=False,
        flush_queues=False,
        prefetch=25,
        durable=True,
        heartbeat=240,
        task_exchange_type='direct',
        poll_rate=1.0 / 100,
        task_queue=settings["task_queue_name"],
        response_queue=settings["response_queue_name"],
    )

    log.info("Connected AMQP Interface: %s", connector)
    log.info("Connection parameters: %s, %s, %s, %s", settings["userid"],
             settings["password"], settings["host"], settings["virtual_host"])

    wait_time = 120
    for x in range(wait_time):
        time.sleep(1)
        print("Sleeping: ", wait_time - x)

    connector.stop()


if __name__ == "__main__":
    logSetup.initLogging(logLevel=logging.DEBUG)

    test_basic()
    # test_disconnect()
    # test_direct()
Example #38
0
def sweepSource(dataQueues, ctrlNs, printQueue):

    dataQueue, plotQueue = dataQueues

    from SignalHound import SignalHound

    logSetup.initLogging(printQ=printQueue)
    loop_timer = time.time()
    print "Starting sweep-logger!"
    log = logging.getLogger("Main.AcqProcess")

    loop_timer = time.time()
    loops = 0

    sh = SignalHound()
    startAcquisition(sh, dataQueue, plotQueue)

    # Send the trace size to the acq thread so I can properly set up the data-log file
    numPoints = sh.queryTraceInfo()["arr-size"]
    dataQueue.put({"arrSize": numPoints})

    temperature = sh.getDeviceDiagnostics()["temperature"]

    runningSum = np.array(())
    runningSumItems = 0
    startFreq = 0

    while 1:
        try:
            trace = sh.fetchTrace()
            traceInfo = sh.queryTraceInfo()
            dataDict = {"info": traceInfo, "data": trace}

            acqInfo = dataDict["info"]
            if runningSum.shape != dataDict["data"]["max"].shape:
                runningSum = np.zeros_like(dataDict["data"]["max"])
                runningSumItems = 0
                startFreq = acqInfo["ret-start-freq"]
                binSize = acqInfo["arr-bin-size"]
                log.info(
                    "Running average array size changed! Either the system just started, or something is seriously wrong!"
                )

            changed = False
            if startFreq != acqInfo["ret-start-freq"]:
                changed = True

            else:
                runningSum += dataDict["data"]["max"]
                runningSumItems += 1

            # if we've reached the number of average items per output array, or the frequency has changed, requiring an early dump of the specra data.
            if runningSumItems == NUM_AVERAGE or changed:

                # Divide down to the average
                arr = runningSum / runningSumItems

                # Build array to write out.
                saveTime = time.time()
                # log.info("Saving data record with timestamp %f", saveTime)

                # Only write out to the file if we actually have data
                if runningSumItems != 0:

                    dataQueue.put({
                        "row":
                        (saveTime, startFreq, binSize, runningSumItems, arr)
                    })
                    if plotQueue:
                        plotQueue.put({
                            "row": (saveTime, startFreq, binSize,
                                    runningSumItems, arr)
                        })

                    del (trace)

                    runningSum = np.zeros_like(runningSum)
                    log.info("Estimated items in processing queue %s",
                             dataQueue.qsize())
                    log.info("Running sum shape = %s, items = %s",
                             runningSum.shape, runningSumItems)
                    runningSumItems = 0

                # now = time.time()
                # delta = now-loop_timer
                # freq = 1 / (delta)
                # log.info("Elapsed Time = %0.5f, Frequency = %s", delta, freq)
                # loop_timer = now

                # If we wrote the output because the current spectra has changed, we need to update the running acq info variables with the new frequencies.
                if changed:
                    log.info("Retuned! Old freq = %s, new freq = %s",
                             startFreq, acqInfo["ret-start-freq"])

                    runningSum += dataDict["data"]["max"]
                    startFreq = acqInfo["ret-start-freq"]
                    binSize = acqInfo["arr-bin-size"]
                    runningSumItems = 1

        except Exception:
            log.error("IOError in Acquisition Thread!")
            log.error(traceback.format_exc())

            dataQueue.put(
                {"status": "Error: Device interface crashed. Reinitializing"})
            log.error("Resetting hardware!")
            # sh.preset()
            sh.forceClose()
            try:
                while 1:
                    log.warning("Freeing python device handle")
                    del (sh)
            except UnboundLocalError:
                pass

            log.error(
                "Hardware shut down, completely re-initializing device interface!"
            )
            # sys.exit()
            sh = SignalHound()
            startAcquisition(sh, dataQueue, plotQueue)

        if loops % PRINT_LOOP_CNT == 0:
            now = time.time()
            delta = now - loop_timer
            freq = 1 / (delta / PRINT_LOOP_CNT)
            # log.info("Elapsed Time = %0.5f, Frequency = %s", delta, freq)
            loop_timer = now

        if loops % CAL_CHK_LOOP_CNT == 0:
            diags = sh.getDeviceDiagnostics()
            dataQueue.put({"status": diags})

            temptmp = diags["temperature"]
            if abs(
                    temperature - temptmp
            ) > 2.0:  # Temperature deviations of > 2° cause IF shifts. Therefore, we do a re-cal if they're detected
                dataQueue.put({"status": "Recalibrating IF"})
                sh.selfCal()
                startAcquisition(sh, dataQueue, plotQueue)
                log.warning(
                    "Temperature changed > 2.0 C. Delta is %f. Recalibrated!",
                    abs(temperature - temptmp))
                temperature = temptmp
            else:
                log.info(
                    "Temperature deviation = %f. Not doing recal, since drift < 2C",
                    abs(temperature - temptmp))

        loops += 1

        if ctrlNs.run == False:
            log.info("Stopping Acq-thread!")
            break

    sh.abort()
    sh.closeDevice()

    del (sh)

    log.info("Acq-thread closing dataQueue!")
    dataQueue.close()
    dataQueue.join_thread()
    if plotQueue:
        plotQueue.close()
        plotQueue.cancel_join_thread()

    ctrlNs.acqRunning = False

    log.info("Acq-thread exiting!")
    printQueue.close()
    printQueue.join_thread()
Example #39
0
    update_nl = True
    if "no_namelist" in sys.argv:
        update_nl = False
    if "drain" in sys.argv:
        update_nl = False
    if not update_nl:
        print("Not fetching new names from site!")

    # ins.go(ctrlNamespace=flags.namespace, update_namelist=True)
    ins.go(ctrlNamespace=flags.namespace,
           update_namelist=update_nl,
           local=True)


if __name__ == '__main__':

    import sys
    import logSetup
    logSetup.initLogging()

    if 'local' in sys.argv:
        run_local()
    elif len(
            sys.argv) == 1 or 'drain' in sys.argv or 'no_namelist' in sys.argv:
        run_remote()
    elif 'test_get_filename' in sys.argv:
        ins = GetYp()
        ins.go_test()
    else:
        local_test()
Example #40
0
def go(logGps=False, gpsTest=False):

    plotQueue = mp.Queue()
    dataQueue = mp.Queue()
    printQueue = mp.Queue()
    ctrlManager = mp.Manager()

    logSetup.initLogging(printQ=printQueue)
    log = logging.getLogger("Main.Main")

    ctrlNs = ctrlManager.Namespace()
    ctrlNs.run = True
    ctrlNs.acqRunning = True
    ctrlNs.apiRunning = True
    ctrlNs.logRunning = True
    ctrlNs.stopped = False

    if not settings.GPS_COM_PORT:
        print("WARNING: No GPS port specified. GPS mode can not work.")

    if not gpsTest:
        if settings.ACQ_TYPE == "real-time-sweeping":
            print("Importing real-time-sweeping module!")
            acqProc = mp.Process(
                target=internalSweepSpectraAcqThread.sweepSource,
                name="AcqThread",
                args=((dataQueue, plotQueue), ctrlNs, printQueue))
        else:
            print("Importing real-time module!")
            acqProc = mp.Process(target=spectraAcqThread.sweepSource,
                                 name="AcqThread",
                                 args=((dataQueue, plotQueue), ctrlNs,
                                       printQueue))

        acqProc.start()

    if logGps and settings.GPS_COM_PORT:
        import gpsLogThread
        gpsProc = mp.Process(target=gpsLogThread.startGpsLog,
                             name="GpsThread",
                             args=((dataQueue, plotQueue), ctrlNs, printQueue))
        gpsProc.start()

    logProc = mp.Process(target=spectraLogThread.logSweeps,
                         name="LogThread",
                         args=(dataQueue, ctrlNs, printQueue, gpsTest))
    logProc.start()

    if not gpsTest:
        plotProc = mp.Process(target=spectraPlotApiThread.startApiServer,
                              name="PlotApiThread",
                              args=(plotQueue, ctrlNs, printQueue))
        plotProc.start()

    # A separate process for printing, which allows nice easy non-blocking printing.
    printProc = mp.Process(target=printThread.printer,
                           name="PrintArbiter",
                           args=(printQueue, ctrlNs))
    printProc.daemon = True
    printProc.start()

    try:
        while 1:
            inStr = raw_input()
            print inStr
            if inStr == "q":
                break

    except KeyboardInterrupt:
        pass

    log.info("Stopping Processes!")

    ctrlNs.run = False

    # You have to empty the queue for everything to exit properly
    log.info("Emptying Queue")

    # Sometimes the last few queue items take a little while to trickle in.
    # therefore, we both poll the queue for items, and try to join() the thread. That way
    # as soon as the queue is *actually* empty, we exit immediately
    # - - -
    # this was a f*****g nightmare to track down.

    if not gpsTest:
        log.info("Joining on AcqProc")
        while acqProc.is_alive():
            acqProc.join(0.1)
    if logGps and settings.GPS_COM_PORT:
        log.info("Joining on GpsProc")
        while gpsProc.is_alive():
            gpsProc.join(0.1)

        if gpsTest:
            print("Faking halt signals")
            ctrlNs.acqRunning = False

        # print("acqProc.is_alive()", acqProc.is_alive(), "logProc.is_alive()", logProc.is_alive(), "plotProc.is_alive()", plotProc.is_alive())
    log.info("Joining on LogProc")
    while logProc.is_alive():
        logProc.join(0.1)
        # print("acqProc.is_alive()", acqProc.is_alive(), "logProc.is_alive()", logProc.is_alive(), "plotProc.is_alive()", plotProc.is_alive())

    if not gpsTest:
        log.info("Joining on PlotProc")
        while plotProc.is_alive():
            plotProc.join(0.1)
            # print("acqProc.is_alive()", acqProc.is_alive(), "logProc.is_alive()", logProc.is_alive(), "plotProc.is_alive()", plotProc.is_alive())

    if gpsTest:
        print("Faking halt signals")
        ctrlNs.apiRunning = False

    print("Joining on PrintProc")
    while printProc.is_alive():
        printProc.join(0.05)
        print("wating on printProc")

    print("Threads stopped.")
    print("Stopping Shared Memory Manager.")
    ctrlManager.shutdown()

    print("Shutdown complete. Exiting.")

    sys.exit()
Example #41
0
def test():
	print("Test mode!")
	import webFunctions
	import logSetup
	logSetup.initLogging()
Example #42
0
def test():
    print("Test mode!")
    import WebRequest
    import logSetup
    logSetup.initLogging()
def test():
    print("Test mode!")
    import logSetup
    from WebMirror.Engine import SiteArchiver
    import common.database as db

    logSetup.initLogging()

    def fetch(url):
        with db.session_context() as sess:
            archiver = SiteArchiver(cookie_lock=None,
                                    db_interface=sess,
                                    new_job_queue=None)
            archiver.synchronousJobRequest(url, ignore_cache=True, debug=True)

    fetch(
        'https://www.scribblehub.com/series/112220/the-dragonkin-and-the-succubus/'
    )
    fetch(
        'https://www.scribblehub.com/series/107977/bookworld-online-marsh-man/'
    )
    fetch(
        'https://www.scribblehub.com/series/100965/reincarnation-of-a-worthless-man/'
    )
    fetch('https://www.scribblehub.com/series/106548/i-am-an-eggplant-bl/')
    fetch(
        'https://www.scribblehub.com/series/81596/the-broken-system-what-bred-a-king/'
    )
    fetch('https://www.scribblehub.com/series/82656/the-th-demon-lord/')
    fetch(
        'https://www.scribblehub.com/series/66899/the-trials-path-toward-godhood-warning-mature-content/'
    )
    fetch('https://www.scribblehub.com/series/106712/lust-knight/')
    fetch('https://www.scribblehub.com/series/111453/the-forgotten-character/')
    fetch('https://www.scribblehub.com/series/69064/morbid/')
    fetch(
        'https://www.scribblehub.com/series/34196/the-legend-of-the-fake-hero/'
    )
    fetch(
        'https://www.scribblehub.com/series/58245/a-reincarnated-demons-tales-of-wonder/'
    )
    fetch(
        'https://www.scribblehub.com/series/86103/the-demon-lords-successor/')
    fetch(
        'https://www.scribblehub.com/series/93826/waking-up-as-a-spaceship-whats-a-ship-girl-supposed-to-do-now/'
    )
    fetch(
        'https://www.scribblehub.com/series/94224/the-man-who-killed-the-first-monster/'
    )
    fetch('https://www.scribblehub.com/series/110849/monster-parade/')
    fetch('https://www.scribblehub.com/series/40636/falling-over/')
    fetch(
        'https://www.scribblehub.com/series/94576/psionic-goddess-and-the-akashic-system/'
    )
    fetch(
        'https://www.scribblehub.com/series/98089/the-creed-of-an-avenger-an-arifureta-fanfic/'
    )
    fetch(
        'https://www.scribblehub.com/series/51635/eh-where-did-my-pen-pen-go/')
    fetch('https://www.scribblehub.com/series/81242/summoned-again/')
    fetch('https://www.scribblehub.com/series/62217/ultimate-fruit/')
    fetch(
        'https://www.scribblehub.com/series/108367/the-queen-of-darkness-does-not-want-to-be-the-villain/'
    )
    fetch(
        'https://www.scribblehub.com/series/101250/reborn-as-batmans-little-brother/'
    )
    fetch('https://www.scribblehub.com/series/10442/world-keeper/')
    fetch(
        'https://www.scribblehub.com/series/83275/nero-my-existence-is-perfect/'
    )