예제 #1
0
def on_text(msg):
    try:
        u = json.loads(msg['text'])
        if 'dbm' in u:
            Datastore.db_store_uplink(msg['time'], u['dbm'], u['ip'], u['up'], u['rat'], u['sig'], u['net'])
    except ValueError:
        pass
예제 #2
0
	def Delete(self, datasegment = None, tagslist = None):
		
		if datasegment is not None or tagslist is not None:
			if datasegment == None:
				Datastore.DeleteTag(self.cur,tagslist)
			elif tagslist == None:
				Datastore.DeleteValue(self.cur,datasegment)
			else:
				Datastore.DeleteValueByTags(self.cur,datasegment,tagslist)
		else:
			return -1
예제 #3
0
	def store_by_tags_list(self, data_string, tag_list):
		
		if len(data_string) == 0:
			msg = {"status" : -1, "message" : "Data string is empty"}
			return json.dumps(msg)

		if tag_list == None: 
			msg = {"status" : -2, "message" : "No tags in supplied data"}
			return json.dumps(msg)

		tag_list = [tag.strip() for tag in tag_list if len(tag.strip()) != 0]

		Datastore.update_tag_value(self.cur,data_string,tag_list)
예제 #4
0
	def store_by_tags_string(self, data_string, tags_string, delimiter = " "):
		
		if len(data_string) == 0:
			msg = {"status" : -1, "message" : "Data string is empty"}
			return json.dumps(msg)

		data_string, tag_list = Parser.parse_data_tags(data_string, tags_string, delimiter)

		if tag_list == None: 
			msg = {"status" : -2, "message" : "No tags in supplied data"}
			return json.dumps(msg)

		Datastore.update_tag_value(self.cur,data_string,tag_list)
 def get(self):
     #self.response.headers['Content-Type'] = 'text/plain'
     #self.response.write('starting parse')
     
     day = datetime.datetime.today().weekday()
     if (day == 2) or (day == 4):
         script.update_current_semester()
         script.update_current_year()
     if day == 3:
         try:
             Datastore.delete_aggr()
         except:
             pass
         script.update_aggregates(1)
     if day == 5:
         script.update_aggregates(2)
예제 #6
0
	def storeByTagsString(self, data_string, tags_string, delimiter = " "):
		if len(data_string) == 0:
			msg = {"status" : -1, "message" : "Data string is empty"}
			return json.dumps(msg)

		data_string, tag_list = Parser.parse_data_tags(data_string, tags_string, delimiter)

		if tag_list == None: 
			msg = {"status" : -2, "message" : "No tags in supplied data"}
			return json.dumps(msg)

		Datastore.UpdateTagValue(self.cur,data_string,tag_list)
예제 #7
0
    def __init__(self, db_file):
        """
        Initializer for the class model. This will want to know which file to look at for the db.
        Then it will provide a connection to that database and allow for some ORM opterations, such
        as insert and select.

        db_file :str

        Example Usage:
        cm = ClassModel.ClassModel('testing.db')
        """
        # init db connection using the datastore
        self.conn = Datastore.DB(db_file).ret().conn
예제 #8
0
	def storeByTagsList(self, data_string, tag_list):
		if len(data_string) == 0:
			msg = {"status" : -1, "message" : "Data string is empty"}
			return json.dumps(msg)

		if tag_list == None: 
			msg = {"status" : -2, "message" : "No tags in supplied data"}
			return json.dumps(msg)
		tag_list = [tag.strip() for tag in tag_list if len(tag.strip()) != 0]
		Datastore.UpdateTagValue(self.cur,data_string,tag_list)
		print 
		print "*************"
		return tag_list
예제 #9
0
    def __init__(self, db_file):
        """
        Initializer for the road maps toggles model. This will want to know which file to look at for the db.
        Then it will provide a connection to that database and allow for some ORM opterations, such
        as insert and select.

        :param
        db_file :str

        Example Usage:
        tm = TogglesModel.TogglesModel('testing.db')
        """
        # init db connection using the datastore
        self.conn = Datastore.DB(db_file).ret().conn
        """INSERT INTO "main"."roadmap_toggles"("id","requirements_id","students_id","highlight","created_at",
예제 #10
0
	def delete(self, data_segment = None, tagslist = None):
		
		if data_segment is not None or tagslist is not None:
			
			if data_segment == None: #delete tag
				Datastore.delete_tag(self.cur, tagslist)
			
			elif tagslist == None: #delete a particular data element
				
				if type(data_segment) is list:
					for segment in data_segment:
						Datastore.delete_value(self.cur, segment)
				
				else:
					Datastore.delete_value(self.cur, data_segment)
			
			else:
				Datastore.delete_value_by_tags(self.cur, data_segment, tagslist)
		
		else:
			return -1
예제 #11
0
	def store(self, data_string):
		
		if len(data_string) == 0:
			msg = {"status" : -3, "message" : "Data string is empty"}
			return json.dumps(msg)

		data_string, tag_list = Parser.parse_tags(data_string)

		if tag_list == None:
			msg = {"status" : -2, "message" : "No tags in supplied data"}
			return json.dumps(msg)

		result = Datastore.update_tag_value(self.cur, data_string,tag_list)

		if result == -1:
			msg = {"status" : -1, "message" : "error"}
			return json.dumps(msg)

		elif result == 1:
			msg = {"status" : 1, "message" : "success"}
			return json.dumps(msg)
예제 #12
0
def on_image(msg):
    if msg['type'] == Messaging.ImageMessage.TYPE_PERIODICAL:
        filename = Datastore.add_image(msg['src'], msg['time'], msg['data'])
        Datastore.db_store_image(msg['src'], msg['time'], filename, len(msg['data']))
    elif msg['type'] == Messaging.ImageMessage.TYPE_MOVEMENT:
        filename = Datastore.add_image_movement(msg['src'], msg['time'], msg['uuid'], msg['data'])
        Datastore.db_store_image_movement(msg['src'], msg['time'], filename, msg['uuid'], len(msg['data']))
        # send only the first picture belonging to a group of pictures from a source. uuid is the group identifier
        if msg['src'] not in email_alert or email_alert[msg['src']] != msg['uuid']:
            email_alert[msg['src']] = msg['uuid']

            if not (datetime.time(8, 0) < datetime.datetime.now().time() < datetime.time(15, 0)):
                if calendar.timegm(time.gmtime()) > email_alert['last'] + 3600:
                    email_alert['last'] = calendar.timegm(time.gmtime())
                    gmail.send('Activity from cam %i' % msg['src'], 'See attachment.', filename)
                else:
                    main_logger.info('skip email alert due to grace period, last alert %u s ago' %
                                     (calendar.timegm(time.gmtime()) - email_alert['last']))
            else:
                main_logger.info('skip email alert during day')
    elif msg['type'] == Messaging.ImageMessage.TYPE_TEST:
        filename = Datastore.add_test_image(msg['src'], msg['time'], msg['data'])
        main_logger.info('wrote {}'.format(filename))
예제 #13
0
def cam_on_any(msg):
    Datastore.set_variable(msg['src'], 'uptime', msg['uptime'])
    main_logger.info('cameras -> local %s' % Messaging.Message.msg_info(msg))
    local_messaging.send(msg, serialize=False)
예제 #14
0
def on_light_control(msg):
    Datastore.db_store_light_control(msg['src'], msg['time'], msg['state'], msg['uuid'])
예제 #15
0
def on_movement(msg):
    Datastore.db_store_movement(msg['src'], msg['time'], msg['detector'], msg['state'], msg['uuid'])
예제 #16
0
def on_variable(msg):
    if msg['name'] == 'temperature':
        Datastore.db_store_temperature(msg['src'], msg['time'], msg['value'])
    else:
        Datastore.set_variable(msg['src'], msg['name'], msg['value'])
예제 #17
0
import Datastore
import datetime

now = datetime.datetime.now()

Datastore.store_image_meta(now, 2, '/data/image.jpg', 12312)
import threading
import Datastore as data
obj = data.Datastore()
threads = []
for _ in range(1):
    t = threading.Thread(target=obj.create,
                         args=['tirumala', '{name:tirumala,age:21}', 1000])
    t.start()
    threads.append(t)
for thread in threads:
    thread.join()
threads = []
for _ in range(1):
    t = threading.Thread(target=obj.read, args=['tirumala'])
    t.start()
    threads.append(t)
for thread in threads:
    thread.join()

threads = []
for _ in range(3):
    t = threading.Thread(target=obj.delete, args=['tirumala'])
    t.start()
    threads.append(t)
for thread in threads:
    thread.join()
예제 #19
0
Samuel Lundquist

Priority credit to:
Ryan Gurnick - 3/4/20  Creation

"""
import os

import ClassParser
import Datastore

DB = 'seed.db'

if not os.path.exists('seed.db'):
    # setup datastore
    ds = Datastore.DB(DB)
    # generate them default tables
    ds.generateTables()

# all of the subject codes
subject_codes = ["AAAP", "AAD", "ACTG", "AEIS", "AFR", "AIM", "ANTH", "ANTM", "ARB", "ARCH", "ARH", "ART", "ARTC",
                 "ARTD", "ARTF", "ARTM", "ARTO", "ARTP", "ARTR", "ARTS", "ASIA", "ASL", "ASTR", "BA", "BI", "BIKC",
                 "BIOE", "BLST", "CARC", "CAS", "CDS", "CFT", "CH", "CHKC", "CHN", "CHNF", "CINE", "CIS", "CIT", "CLAS",
                 "COLT", "CPSY", "CRDG", "CRES", "CRWR", "CSCH", "DAN", "DANC", "DANE", "DIST", "DSGN", "EALL", "EC",
                 "ECE", "EDLD", "EDST", "EDUC", "ENG", "ENVS", "ERTH", "ES", "ESC", "EURO", "FHS", "FIN", "FINN", "FLR",
                 "FR", "GEOG", "GEOL", "GER", "GRK", "GRST", "GSAE", "GSCL", "GSGE", "GSST", "HBRW", "HC", "HIST",
                 "HPHY", "HUM", "IARC", "ICH", "INTL", "IST", "ITAL", "J", "JDST", "JGS", "JPN", "KC", "KRN", "LA",
                 "LAS", "LAT", "LAW", "LEAD", "LERC", "LIB", "LING", "LT", "MATH", "MDVL", "MENA", "MGMT", "MIL",
                 "MKTG", "MUE", "MUJ", "MUP", "MUS", "NAS", "NORW", "OBA", "OIMB", "OLIS", "PD", "PDX", "PE", "PEAQ",
                 "PEAS", "PEC", "PEF", "PEI", "PEIA", "PEL", "PEMA", "PEMB", "PEO", "PERS", "PERU", "PETS", "PEW",
                 "PHIL", "PHKC", "PHYS", "PORT", "PPPM", "PREV", "PS", "PSY", "QST", "REES", "REL", "RL", "RUSS",
예제 #20
0
파일: main.py 프로젝트: sfhbarnett/UNet
def main(mainpath, load=False, training=True, weights=False, rgb=0):

    torch.cuda.device(0)
    plt.ion()

    # If data is multi or single channel
    if rgb:
        tforms = transforms.Compose([transforms.ToTensor(),
                                     transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
        net = UNet(n_channels=3, n_classes=1)
    else:
        tforms = transforms.Compose([transforms.ToTensor(), transforms.Normalize(0.5, 0.5)])
        net = UNet(n_channels=1, n_classes=1)

    if training:
        trainpath = os.path.join(mainpath, 'image')
        filelist = os.listdir(trainpath)
        trainmasks = os.path.join(mainpath, 'label')
        masklist = os.listdir(trainmasks)

        if weights:
            if os.path.isdir(os.path.join(mainpath, 'weights')) != 1:
                os.mkdir(os.path.join(mainpath, 'weights'))
                print("generating weights")
                for file in masklist:
                    img = Image.open(os.path.join(mainpath, 'label', file))
                    weights = Datastore.generateWeights(img)
                    weights = Image.fromarray(weights)
                    weights.save(os.path.join(mainpath, 'weights', file[:-4]+'.tif'))
                print("generated weights")
                weightspath = os.path.join(mainpath, 'weights')
                weightslist = os.listdir(weightspath)
            else:
                weightspath = os.path.join(mainpath,'weights')
                weightslist = os.listdir(weightspath)

        dataset = Datastore.Datastore(filelist, masklist, weightslist, mainpath, transforms=tforms)
        batch_N = 1
        trainloader = torch.utils.data.DataLoader(dataset, batch_size=batch_N, shuffle=True, num_workers=0)
        N_train = len(dataset)
        gpu = 0
        startepoch = 0

        if gpu == 1:
            gpu = torch.device("cuda:0")
            print("Connected to device: ", gpu)
            net = net.to(gpu)

        epochs = 50
        lr = 0.001
        val_percent = 0.05
        optimizer = optim.SGD(net.parameters(),
                              lr=lr,
                              momentum=0.9)
        criterion = nn.BCEWithLogitsLoss()
        fig = plt.figure(figsize=(18, 5), dpi=80, facecolor='w', edgecolor='k')
        fig.tight_layout()

        # Load in previous model
        if load:
            try:
                checkpoint = torch.load('model2.pt')
                net.load_state_dict(checkpoint['model_state_dict'])
                optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
                startepoch = checkpoint['epoch'] + 1
                loss = checkpoint['loss']
            except FileNotFoundError:
                print(f"No model file found at {mainpath}")

        train(net, optimizer, criterion, trainloader, startepoch, epochs, gpu, batch_N, N_train, mainpath)
    else:
        checkpoint = torch.load('model2.pt')
        net.load_state_dict(checkpoint['model_state_dict'])
        predict(net, mainpath)