def ingest_geo_feature(path, kind):
    log.info("ingest_geo_feature %s" % path)
    t_protect = model.get_protect(kind)
    sightings = []
    headings = {}
    with open(path) as f:
        rows = csv.reader(f)
        for r, row in enumerate(rows):
            if r == 0:
                for i, item in enumerate(row):
                    headings[item] = i
                continue
            try:
                dt = util.parse_date("%s %s" % (row[headings['Date']], row[headings['Time']]), tz=config['local_tz'], dayfirst=True)
                t = util.timestamp(dt)
                if t <= t_protect:
                    log.warning("Protected t, skipping...")
                    continue                
                try:
                    coordinates = strings.as_numeric(row[headings['Longitude']]), strings.as_numeric(row[headings['Latitude']]), strings.as_numeric(row[headings['Altitude']])
                except Exception as e:
                    log.error("Missing coordinates! Skipping...")
                    continue
                properties = {'DateTime': dt.strftime("%Y-%m-%dT%H:%M:%S%z"), 't_utc': t, 'ContentType': kind}
                for heading in headings:
                    if heading not in ['Date', 'Time', 'Latitude', 'Longitude', 'Altitude']:
                        try:
                            properties[heading] = strings.as_numeric(row[headings[heading]])
                        except IndexError:
                            pass
                feature = geojson.Feature(geometry={'type': "Point", 'coordinates': coordinates}, properties=properties)
                model.insert_feature(kind, t, geojson.dumps(feature))
            except Exception as e:
                log.error("Row failed: " + log.exc(e))
                continue
 def traverse(pd):
     log.info("--> checking %s" % pd)
     for i, filename in enumerate(os.listdir(pd)):
         if filename[0] == ".":
             continue
         elif os.path.isdir(os.path.join(pd, filename)):
             traverse(os.path.join(pd, filename))
         elif kind == 'ambit' and filename[-3:] == "xml":
             ingest_ambit(os.path.join(pd, filename), t_protect)
         elif kind == 'image' and filename[-3:] == "jpg":
             ingest_image(os.path.join(pd, filename), i, t_protect)
         elif kind == 'audio' and filename[-3:] == "mp3":
             ingest_audio(os.path.join(pd, filename), i, t_protect)
         else:
             log.warning("--> unknown file type %s, skipping..." % filename)
Exemple #3
0
 def purge_dups(material):
     dups = []
     for phrase in material:
         matches = []
         for p, phrase_ in enumerate(material):
             if phrase == phrase_:
                 matches.append(p)
                 dups.extend(matches[1:])
     material = [
         phrase for (p, phrase) in enumerate(material) if (p not in dups)
     ]
     if len(material) < 32:
         log.warning("WARNING: insufficient encoder length (%s)" %
                     len(material))
     return material
Exemple #4
0
 def on_close(self):
     log.info("//////////// WebSocket.on_close")
     user_id = None
     for uid, instance in WebSocket.sockets.items():
         if instance == self:
             user_id = uid
     log.info("--> closing user_id %s" % user_id)                
     if user_id is None:
         log.warning("socket for %s not found" % user_id)
         return
     if user_id in WebSocket.users:
         del WebSocket.users[user_id]
     for uid, partner_id in WebSocket.users.items():
         if partner_id == user_id:
             WebSocket.send(uid, "unentangled")
             WebSocket.users[uid] = None   
     log.debug("--> users %s" % WebSocket.users)
     log.info("--> complete")
Exemple #5
0
def process_image(path, member=None, t_utc=None):
    # try to get EXIF data
    log.info("process_image %s..." % path)
    data = {}
    if member is not None:
        data['Member'] = member
    if t_utc is not None:
        data['t_utc'] = t_utc
    try:    
        image = Image.open(path)  
        width, height = image.size
        data['Dimensions'] = width, height
        try:
            exif = {ExifTags.TAGS[k]: v for (k, v) in image._getexif().items() if k in ExifTags.TAGS}
        except Exception as e:
            log.warning("--> no EXIF data in image: %s" % e)            
            if 't_utc' not in data:
                log.warning("--> substituting current time for t_utc")
                data['t_utc'] = util.timestamp()
        else:
            # log.debug(json.dumps(exif, indent=4, default=lambda x: str(x)))
            date_field = exif['DateTimeOriginal'] if 'DateTimeOriginal' in exif else exif['DateTime']
            if date_field[4] == ":" and date_field[7] == ":":
                date_field = list(date_field)
                date_field[4] = "-"
                date_field[7] = "-"
                date_field = ''.join(date_field)
            date = util.parse_date(date_field, tz=config['local_tz'])
            data['t_utc'] = util.timestamp(date)                            ## careful about this overriding
            data['DateTime'] = util.datestring(data['t_utc'], tz=config['local_tz'])    
            data['Make'] = exif['Make'].replace("\u0000", '').strip() if 'Make' in exif else None
            data['Model'] = exif['Model'].replace("\u0000", '').strip() if 'Model' in exif else None
        filename = "%s_%s.jpg" % (data['t_utc'], str(uuid.uuid4()))
        new_path = os.path.join(os.path.dirname(__file__), "..", "static", "data", "images", filename)
        shutil.copy(path, new_path)
        data['Url'] = "/static/data/images/%s" % filename
    except Exception as e:
        log.error(log.exc(e))
        return None
    return data
Exemple #6
0
 def on_message(self, data):
     log.info("//////////// WebSocket.on_message %s" % data)
     try:
         data = json.loads(data)
         url = data['url']
         user_id = data['user_id']
     except Exception as e:
         log.error(log.exc(e))
         return
     log.info("--> user_id %s" % user_id)            
     if user_id not in WebSocket.users:
         log.warning("--> %s (originator) not in WebSocket.users" % user_id)
         return
     partner_id = WebSocket.users[user_id]
     if partner_id is None:
         log.info("--> no partner")
         return
     if partner_id not in WebSocket.sockets:
         log.warning("--> %s (partner) not in WebSocket.users" % partner_id)
         return
     log.info("--> %s sent %s to %s" % (user_id, partner_id, url))
     WebSocket.send(partner_id, url)
     WebSocket.send(user_id, "OK")
Exemple #7
0
data = []
with open(FILENAME) as f:
    for l, line in enumerate(f):
        try:
            line = line.strip()
            if not len(line):
                continue
            line = line.split(',')
            if l == 0:
                fields = line
                continue
            line = [strings.as_numeric(field.strip('"')) for field in line]
            entry = dict(zip(fields, line))
            del entry['uid']
            entry['user_id'] = entry['i']
            del entry['i']
            point = Point((entry['lon'], entry['lat'], entry['alt']))
            entry['location'] = point
            del entry['lon']
            del entry['lat']
            del entry['alt']
            dt = timeutil.string_to_dt(entry['date'])
            entry['t'] = timeutil.timestamp(dt)
            print(json.dumps(entry, indent=4))
            db.entries.insert(entry)
        except Exception as e:
            log.exc(log.warning(e))


## NOTE: this ends up labeling all local times as UTC without conversion
## that's intended, because then we can just work naively
Exemple #8
0
    ts.append(t)
    try:
        heartrate = float(trackpoint.findtext("%sHeartRateBpm/%sValue" % tuple([ns]*2)))
    except Exception:
        heartrate = heartrates[-1] if len(heartrates) else 0.0  # carry over, heartrate doesnt go to 0 (hopefully)
    heartrates.append(heartrate)
    try:
        cadence = float(trackpoint.findtext("%sCadence" % tuple([ns])))
    except Exception as e:
        cadence = 0.0   # drop to 0, probably stopped
    cadences.append(cadence * 2)    # two feet!
log.info("DATA START TIME %s UTC" % datetime.datetime.utcfromtimestamp(start_t).strftime("%Y-%m-%d %H:%M:%S"))
num_samples = len(ts)
log.info("NUM DATA SAMPLES %s" % num_samples)
if len(ts) != int(ts[-1]):
    log.warning("%s != %s" % (util.format_time(len(ts)), util.format_time(ts[-1])))
log.info("DURATION %s" % (util.format_time(ts[-1])))

log.info("CONVERTING AND SAMPLING")

# clean data
cadences = science.filter_deviations(cadences, positive_only=True)
# heartrates = science.filter_deviations(heartrates)

# normalize data
cadences_norm = science.normalize(cadences)
heartrates_norm = science.normalize(heartrates)

# show
ctx = drawing.Context(2000, 250, relative=True, flip=True)
ctx.line([(float(i) / num_samples, cadences_norm[i]) for i in range(num_samples)], stroke=(0, 0, 255), thickness=2)
log.info("Using %s to connect to %s..." % (adapter, address))

while True:
    try:
        c = MetaWearClient(address, 'pygatt', debug=False, adapter=adapter)
        log.info("--> MetaWear initialized: {0}".format(c))
        log.info(c.accelerometer)
        ## setup
        # c.accelerometer.set_settings(data_rate=100.0, data_range=2.0)
        # c.soft_reset()
        # c.disconnect()
        # time.sleep(4)
        # exit()
    except Exception as e:
        log.error(log.exc(e))
        log.warning("Retrying...")
    else:
        break

log.info("Blinking 10 times...")
pattern = c.led.load_preset_pattern('blink', repeat_count=10)
c.led.write_pattern(pattern, 'g')
c.led.play()
time.sleep(5)
log.info("--> ready")
rater.start()


def on_data(data):
    if config['debug']:
        log.debug("Epoch time: [{0}] - X: {1}, Y: {2}, Z: {3}".format(
Exemple #10
0
def estimate_geometry(data, db):
    """Estimate the location of a geotagged object for a new feature that's missing it"""
    """For data tagged to a Member, find something else that's geotagged with that Member, best case ambit_geo, worst case take the beacon if they are core, otherwise fail"""
    """For non-member data, just tag it to the beacon"""
    log.info("Estimating geometry...")
    t = data['properties']['t_utc']
    feature_type = data['properties']['FeatureType']
    log.info("--> t is %s (%s)" % (t, util.datestring(t, tz=config['local_tz'])))
    try:

        # find geodata from this Member
        ## bh16 restrict this to ambit_geo. why wasnt it before?
        member_closest_before = None
        member_closest_after = None
        if 'Member' in data['properties'] and data['properties']['Member'] is not None:
            member = data['properties']['Member']
            log.info("--> member is %s" % member)
            try:
                # member_closest_before = list(db.features.find({'properties.Member': member, 'geometry': {'$ne': None}, 'properties.t_utc': {'$lte': t}, 'properties.EstimatedGeometry': {'$exists': False}}).sort('properties.t_utc', -1).limit(1))[0]
                # member_closest_after =  list(db.features.find({'properties.Member': member, 'geometry': {'$ne': None}, 'properties.t_utc': {'$gte': t}, 'properties.EstimatedGeometry': {'$exists': False}}).sort('properties.t_utc', 1).limit(1))[0]
                member_closest_before = list(db.features.find({'properties.Member': member, 'properties.FeatureType': "ambit_geo", 'properties.t_utc': {'$lte': t}}).sort('properties.t_utc', -1).limit(1))[0]
                member_closest_after =  list(db.features.find({'properties.Member': member, 'properties.FeatureType': "ambit_geo", 'properties.t_utc': {'$gte': t}}).sort('properties.t_utc', 1).limit(1))[0]
            except IndexError:
                pass

        # # core?   # eliminate, bh16
        # if 'CoreExpedition' in data['properties']:
        #     core = data['properties']['CoreExpedition']
        # else:   # there should never be an else
        #     log.warning("--> no CoreExpedition for estimator")
        #     core = False
        # log.info("--> core is %s" % core)

        # find geodata from the nearest beacon
        # but only do it if there is no Member (always core, unless overridden), or the Member is/was core at that point
        core_sat = config['satellites'][0] # first satellite is core expedition
        beacon_closest_before = None
        beacon_closest_after = None

        # bh16 temporarily ignoring satellites
        #
        # if core and not feature_type == "ambit":  ## don't let ambit readings pop to beacons
        #     try:
        #         beacon_closest_before = list(db.features.find({'$or': [{'properties.t_utc': {'$lte': t}, 'properties.FeatureType': 'beacon', 'properties.Satellite': {'$exists': False}}, {'properties.t_utc': {'$lte': t}, 'properties.FeatureType': 'beacon', 'properties.Satellite': {'$eq': core_sat}}]}).sort('properties.t_utc', -1).limit(1))[0]
        #         beacon_closest_after = list(db.features.find({'$or': [{'properties.t_utc': {'$gte': t}, 'properties.FeatureType': 'beacon', 'properties.Satellite': {'$exists': False}}, {'properties.t_utc': {'$gte': t}, 'properties.FeatureType': 'beacon', 'properties.Satellite': {'$eq': core_sat}}]}).sort('properties.t_utc', 1).limit(1))[0]
        #     except IndexError:
        #         pass

        # pick the best ones
        if member_closest_before is not None and beacon_closest_before is not None:
            closest_before = beacon_closest_before if beacon_closest_before['properties']['t_utc'] > member_closest_before['properties']['t_utc'] else member_closest_before
        elif member_closest_before is not None:
            closest_before = member_closest_before
        else:
            closest_before = beacon_closest_before

        if member_closest_after is not None and beacon_closest_after is not None:
            closest_after = beacon_closest_after if beacon_closest_after['properties']['t_utc'] < member_closest_after['properties']['t_utc'] else member_closest_after
        elif member_closest_after is not None:
            closest_after = member_closest_after
        else:
            closest_after = beacon_closest_after

        if closest_before is None or closest_after is None:
            data['properties']['EstimatedGeometry'] = None
            log.warning("--> closest not found")
            return data

        # average the times and positions: this is naive calculation not taking projection into account
        data['geometry'] = closest_before['geometry'] # make sure the fields are there (if theres an error it will default to this assignment)
        if len(data['geometry']['coordinates']) == 2:  # add altitude if it's missing
            data['geometry']['coordinates'].append(None)
        data['properties']['EstimatedGeometry'] = closest_before['properties']['FeatureType']   # note what we used

        t1 = closest_before['properties']['t_utc']
        t2 = closest_after['properties']['t_utc']
        if t1 != t2:
            p = (t - t1) / (t2 - t1)
            data['geometry']['coordinates'][0] = (closest_before['geometry']['coordinates'][0] * (1 - p)) + (closest_after['geometry']['coordinates'][0] * p)
            data['geometry']['coordinates'][1] = (closest_before['geometry']['coordinates'][1] * (1 - p)) + (closest_after['geometry']['coordinates'][1] * p)
            try:
                data['geometry']['coordinates'][2] = (closest_before['geometry']['coordinates'][2] * (1 - p)) + (closest_after['geometry']['coordinates'][2] * p)            
            except:
                data['geometry']['coordinates'][2] = None
        log.debug(data['geometry']['coordinates'])

        log.info("--> derived from %s" % data['properties']['EstimatedGeometry'])

    except Exception as e:
        log.error(log.exc(e))
    return data
Exemple #11
0
 def traverse(pd):
     log.info("Checking %s..." % pd)
     for i, filename in enumerate(os.listdir(pd)):
         if filename[0] == ".":
             continue
         elif os.path.isdir(os.path.join(pd, filename)):
             traverse(os.path.join(pd, filename))
         elif filename[-3:] == "sml":
             try:
                 log.info("Reading %s..." % os.path.join(pd, filename))
                 with open(os.path.join(pd, filename)) as f:
                     content = f.read()        
             except Exception as e:
                 log.error("Could not read file: %s" % log.exc(e))
             else:
                 try:
                     log.info("Parsing...")
                     data = xmltodict.parse(content)
                     # log.debug(json.dumps(data, indent=4))
                     serial_number = str(data['sml']['DeviceLog']['Device']['SerialNumber'])
                     try:
                         member = config['ambits'][serial_number]
                     except KeyError:
                         log.warning("Ambit serial number not linked to a Member")
                         log.debug(serial_number)
                         log.debug(config['ambits'])
                         continue
                     log.info("Member: %s" % member)
                     samples = data['sml']['DeviceLog']['Samples']['Sample']
                     start_t = None                                            
                     for s, sample in enumerate(samples):  
                         if s == 0:
                             dt = util.parse_date(sample['UTC']) # these are marked UTC in the data
                             start_t = util.timestamp(dt)                
                         sample['Member'] = member
                         if 'Satellites' in sample:  # ingest satellite location data                    
                             try:
                                 url = "%s/ingest/ambit_geo" % config['url']
                                 log.info("Sending to %s..." % url)
                                 response = net.read(url, str(json.dumps(sample)).encode('utf-8'))
                                 log.info("--> %s" % response)                                                        
                             except Exception as e:
                                 log.error(log.exc(e))
                         else: # ingest energy data sample 
                             ## bh16
                             # this data is not interesting, and mucks up the estimating
                             continue
                             # try:
                             #     url = "%s/ingest/ambit" % config['url']
                             #     log.info("Sending to %s..." % url)
                             #     response = net.read(url, str(json.dumps(sample)).encode('utf-8'))
                             #     log.info("--> %s" % response)
                             # except Exception as e:
                             #     log.error(log.exc(e))
                     try:
                         beats = [strings.as_numeric(beat) for beat in data['sml']['DeviceLog']['R-R']['Data'].split()]
                         d = {'Member': member, 't_utc': start_t, 'Beats': beats}
                         url = "%s/ingest/ambit_hr" % config['url']
                         log.info("Sending to %s..." % url)
                         response = net.read(url, str(json.dumps(d)).encode('utf-8'))
                         log.info("--> %s" % response)
                     except Exception as e:
                         log.error(log.exc(e))                                                        
                 except Exception as e:
                     log.error("Parsing error: %s" % log.exc(e))
         else:
             log.warning("--> unknown file type %s, skipping..." % filename)
Exemple #12
0
def main():    
    log.info("Checking email...")
    messages = emailer.fetch()
    log.info("--> found %s new messages" % len(messages))
    for m, message in enumerate(messages):
        log.info("Processing message %s..." % m)
        if message['from'] not in config['allowed_senders']:
            log.warning("Received bunk email from %s" % message['from'])
            continue
        subject = message['subject'].lower().strip()
        log.info("--> subject: %s" % subject)
        def check_satellites(subject):
            for satellite in config['satellites']:
                if satellite.lower() in subject:            
                    return True
            return False
        if check_satellites(subject):
            # relay a beacon (body post)
            url = "%s/ingest/beacon" % config['url']
            log.info("Sending to %s..." % url)
            try:
                response = net.read(url, ("%s\n%s" % (str(subject), str(message['body']))).encode('utf-8'))
                log.info("--> %s" % response)
            except Exception as e:
                log.error("--> main server error: %s" % log.exc(e))
                continue
        else:
            # unpack the ambit zip and post each sample (to ambit or to ambit_geo)
            log.info("--> %s attachments" % len(message['attachments']))
            for attachment in message['attachments']:
                try:
                    path = os.path.join(os.path.dirname(__file__), "..", "uploads", "%s-%s_%s" % (util.timestamp(), m, attachment['filename'].lower()))
                    def write_file():
                        with open(path, 'wb') as f:
                            f.write(attachment['data'])                    
                    if path[-3:] != "zip":
                        log.info("--> skipping non-zip file %s" % path)
                        continue
                    write_file()
                    if zipfile.is_zipfile(path) is False:
                        log.warning("--> zip file is corrupt %s" % path)
                        continue
                    p = path[:-4]
                    os.mkdir(p)
                    with zipfile.ZipFile(path, 'r') as archive:
                        archive.extractall(p)
                        log.info("--> zip file extracted")
                        def traverse(pd):
                            log.info("Checking %s..." % pd)
                            for i, filename in enumerate(os.listdir(pd)):
                                if filename[0] == ".":
                                    continue
                                elif os.path.isdir(os.path.join(pd, filename)):
                                    traverse(os.path.join(pd, filename))
                                elif filename[-3:] == "sml":
                                    try:
                                        log.info("Reading %s..." % os.path.join(pd, filename))
                                        with open(os.path.join(pd, filename)) as f:
                                            content = f.read()        
                                    except Exception as e:
                                        log.error("Could not read file: %s" % log.exc(e))
                                    else:
                                        try:
                                            log.info("Parsing...")
                                            data = xmltodict.parse(content)
                                            # log.debug(json.dumps(data, indent=4))
                                            serial_number = str(data['sml']['DeviceLog']['Device']['SerialNumber'])
                                            try:
                                                member = config['ambits'][serial_number]
                                            except KeyError:
                                                log.warning("Ambit serial number not linked to a Member")
                                                log.debug(serial_number)
                                                log.debug(config['ambits'])
                                                continue
                                            log.info("Member: %s" % member)
                                            samples = data['sml']['DeviceLog']['Samples']['Sample']
                                            start_t = None                                            
                                            for s, sample in enumerate(samples):  
                                                if s == 0:
                                                    dt = util.parse_date(sample['UTC']) # these are marked UTC in the data
                                                    start_t = util.timestamp(dt)                
                                                sample['Member'] = member
                                                if 'Satellites' in sample:  # ingest satellite location data                    
                                                    try:
                                                        url = "%s/ingest/ambit_geo" % config['url']
                                                        log.info("Sending to %s..." % url)
                                                        response = net.read(url, str(json.dumps(sample)).encode('utf-8'))
                                                        log.info("--> %s" % response)                                                        
                                                    except Exception as e:
                                                        log.error(log.exc(e))
                                                else: # ingest energy data sample 
                                                    ## bh16
                                                    # this data is not interesting, and mucks up the estimating
                                                    continue
                                                    # try:
                                                    #     url = "%s/ingest/ambit" % config['url']
                                                    #     log.info("Sending to %s..." % url)
                                                    #     response = net.read(url, str(json.dumps(sample)).encode('utf-8'))
                                                    #     log.info("--> %s" % response)
                                                    # except Exception as e:
                                                    #     log.error(log.exc(e))
                                            try:
                                                beats = [strings.as_numeric(beat) for beat in data['sml']['DeviceLog']['R-R']['Data'].split()]
                                                d = {'Member': member, 't_utc': start_t, 'Beats': beats}
                                                url = "%s/ingest/ambit_hr" % config['url']
                                                log.info("Sending to %s..." % url)
                                                response = net.read(url, str(json.dumps(d)).encode('utf-8'))
                                                log.info("--> %s" % response)
                                            except Exception as e:
                                                log.error(log.exc(e))                                                        
                                        except Exception as e:
                                            log.error("Parsing error: %s" % log.exc(e))
                                else:
                                    log.warning("--> unknown file type %s, skipping..." % filename)
                        traverse(p)

                except Exception as e:
                    log.error(log.exc(e))
def main():    
    log.info("Checking e-mail box.")

    messages = emailer.fetch()

    log.info( "Found %s new messages..." % len(messages))

    if len(messages) > 0:
        log.info("Fetched %s new messages..." % len(messages))
    for m, message in enumerate(messages):
        if message['from'] not in config['incoming']:
            log.warning("Received bunk email from %s" % message['from'])
            continue
        subject = message['subject'].lower().strip()
        log.info("Subject: %s" % subject)
        kind = None
        kinds = 'ambit', 'sighting', 'breadcrumb', 'image', 'audio'
        for k in kinds:        
            if util.lev_distance(k, subject) <= 2:
                kind = k
                break
        if kind is None and config['satellite'].lower() in subject:
            kind = 'beacon'
        if kind is None and "message sent from mobile number" in subject:
            kind = 'hydrosensor'
        if kind is None:
            log.error("subject not recognized")
        else:
            log.info("--> kind: %s" % kind)
        if kind == 'beacon':
            ingest_beacon(message['body'])
        elif kind == 'hydrosensor':
            hydrosensor_id = subject.strip()[-4:]
            ingest_hydrosensor(hydrosensor_id, message['body'], message['date'])
        else:
            log.info("--> %s attachments" % len(message['attachments']))
            for attachment in message['attachments']:

                try:
                    path = os.path.join(os.path.dirname(__file__), "data", "%s-%s_%s" % (util.timestamp(), m, attachment['filename'].lower()))
                    def write_file():
                        with open(path, 'wb') as f:
                            f.write(attachment['data'])

                    if kind in ('sighting', 'breadcrumb'):
                        if path[-3:] != "csv":
                            log.warning("--> expected csv file, got %s" % path)
                            continue
                        write_file()
                        ingest_geo_feature(path, kind)
                        break

                    elif kind in ('ambit', 'image', 'audio'): 
                        t_protect = model.get_protect(kind)
                        if path[-3:] != "zip":
                            log.warning("--> expected zip file, got %s" % path)
                            continue
                        write_file()            
                        if zipfile.is_zipfile(path) is False:
                            log.warning("--> zip file is corrupt %s" % path)
                            continue
                        p = path[:-4]
                        os.mkdir(p)
                        with zipfile.ZipFile(path, 'r') as archive:
                            archive.extractall(p)
                            def traverse(pd):
                                log.info("--> checking %s" % pd)
                                for i, filename in enumerate(os.listdir(pd)):
                                    if filename[0] == ".":
                                        continue
                                    elif os.path.isdir(os.path.join(pd, filename)):
                                        traverse(os.path.join(pd, filename))
                                    elif kind == 'ambit' and filename[-3:] == "xml":
                                        ingest_ambit(os.path.join(pd, filename), t_protect)
                                    elif kind == 'image' and filename[-3:] == "jpg":
                                        ingest_image(os.path.join(pd, filename), i, t_protect)
                                    elif kind == 'audio' and filename[-3:] == "mp3":
                                        ingest_audio(os.path.join(pd, filename), i, t_protect)
                                    else:
                                        log.warning("--> unknown file type %s, skipping..." % filename)
                            traverse(p)

                except Exception as e:
                    log.error(log.exc(e))
Exemple #14
0
import random, datetime, json, math, requests, json
import numpy as np
from housepy import geo, config, log, util, timeutil
from sklearn.cluster import Birch
from mongo import db
import drawer

PERIOD_SIZE = config['period_size']
PERIODS = int(1440 / PERIOD_SIZE)
LOCATION_SIZE = config['location_size']
try:
    geohashes = util.load(config['locations'])
    LOCATIONS = len(geohashes)
except FileNotFoundError as e:
    log.warning(e)

LON_1, LAT_1 = config['bounds']['NW']
LON_2, LAT_2 = config['bounds']['SE']
MIN_X, MAX_Y = geo.project((LON_1, LAT_1))
MAX_X, MIN_Y = geo.project((LON_2, LAT_2))
RATIO = (MAX_X - MIN_X) / (MAX_Y - MIN_Y)


class Point():
    def __init__(self, lon, lat, t):
        self.lon = lon
        self.lat = lat
        x, y = geo.project((self.lon, self.lat))
        self.x = (x - MIN_X) / (MAX_X - MIN_X)
        self.y = (y - MIN_Y) / (MAX_Y - MIN_Y)
Exemple #15
0
            continue
        params = line.split('\t')
        if fields is None:        
            fields = params
            for f, field in enumerate(fields):
                for (key, label) in config['labels'].items():
                    if key in field and "_cd" not in field:
                        fields[f] = label
            # log.warning("--> grabbed params: %s" % fields)            
            continue
        if nop is None:
            nop = params
            # log.warning("--> nop")
            continue
        data = {fields[f]: param for (f, param) in enumerate(params)}
        data = {(key if key != 'site_no' else 'site'): (strings.as_numeric(value.strip()) if key != 'site_no' else value) for (key, value) in data.items()}
        data = {key: value for (key, value) in data.items() if "_cd" not in key and (type(value) != str or (key == 'site' or key == 'datetime'))}
        if 'datetime' not in data:
            log.warning("datetime missing")
            continue
        data['t_utc'] = timeutil.t_utc(timeutil.string_to_dt(data['datetime'], tz=config['tz']))
        log.info(json.dumps(data, indent=4))
        try:
            entry_id = db.entries.insert_one(data).inserted_id
            log.info("INSERT %s" % entry_id)
        except DuplicateKeyError:
            log.warning("DUPLICATE")
        except Exception as e:
            log.error(log.exc(e))

Exemple #16
0
#!/usr/bin/env python3

import sys, time, os
from housepy import osc, config, log, process
log.info("Starting up...")
try:
    import RPi.GPIO as GPIO
except ImportError:
    log.warning("--> using fake GPIO")
    class GPIO(object):
        BCM = 303
        OUT = "out"
        HIGH, LOW = 1, 0
        PUD_UP = None
        IN = 1
        def setmode(n):
            pass
        def setup(n, m, pull_up_down=None):
            pass
        def output(n, m):
            pass
        def cleanup():
            pass
        def input(pin):
            return None

process.secure_pid(os.path.abspath(os.path.join(os.path.dirname(__file__), "run")))  

# https://www.raspberrypi.org/documentation/usage/gpio/
log.info("Setting up pins...")
GPIO.setmode(GPIO.BCM)
Exemple #17
0
 def not_found(self):
     log.warning("404: Page not found")
     raise tornado.web.HTTPError(404)        
Exemple #18
0
 def play(self, notes):
     if self.playing:
         log.warning("Already playing")
         return
     self.queue.put(notes)
Exemple #19
0
 def play(self, notes):
     if self.playing:
         log.warning("Already playing")
         return  
     self.queue.put(notes)