def addKeyword( uuid ): key = request.GET.get('key', None) if ( key==None ): msg = 'addKeyword: "key" argument required but missing' log.error("/ws/addKeyword: " + msg) return { "uuid": uuid, "error": 1, "msg" : msg } key = helper.strfilter(key, "'/\. ") log.debug('addKeyword request for: %s with key: %s' % (uuid,`key`) ) try: media = mediafactory.MediaFactory() media.loadFromFile(uuid) if media.customtags.has_key("keywords"): keys = media.customtags["keywords"] log.debug("Previous keywords " + `keys`) keys = keys + "," + key # `set' removes duplicate keywords keys = ','.join(set(keys.split(','))) else: keys = key media.update( { 'keywords' : keys }, group="Custom" ) db.insert_key(key) db.connect_key(media.uuid, key) except ExifException as e: msg = str(e) log.debug("Exception: " + msg ) return { "uuid":uuid, "error":1, "msg": msg } except Exception as e: return {"error":1 , "msg": str(e)} return { "uuid":uuid, "error":0, "msg": "Keyword saved!" }
def addKeyword(uuid): key = request.GET.get('key', None) if (key == None): msg = 'addKeyword: "key" argument required but missing' log.error("/ws/addKeyword: " + msg) return {"uuid": uuid, "error": 1, "msg": msg} key = helper.strfilter(key, "'/\. ") log.debug('addKeyword request for: %s with key: %s' % (uuid, ` key `)) try: media = mediafactory.MediaFactory() media.loadFromFile(uuid) if media.customtags.has_key("keywords"): keys = media.customtags["keywords"] log.debug("Previous keywords " + ` keys `) keys = keys + "," + key # `set' removes duplicate keywords keys = ','.join(set(keys.split(','))) else: keys = key media.update({'keywords': keys}, group="Custom") db.insert_key(key) db.connect_key(media.uuid, key) except ExifException as e: msg = str(e) log.debug("Exception: " + msg) return {"uuid": uuid, "error": 1, "msg": msg} except Exception as e: return {"error": 1, "msg": str(e)} return {"uuid": uuid, "error": 0, "msg": "Keyword saved!"}
def update( self, new_md, group="ALL" ): """ Updated an existing media object + media metadata on the file using externally provided json metadata, Before updating you must load the old media file to update e.g.: old = mediafactory.loadFromFile( '4214131214f2' ) old.update( newmetadata ) Before updating a check is beformed for writability of media file. If file is not writable then only custom tags are used (i.e. tags stored in a separate file and exported as a sidecar XMP) """ self.mtime = time.strftime('%d%m%y-%H:%M:%S') if ( not self.is_writable()): log.debug('WARNING: Requested write to group ' + group + \ ' but filetype is not writable. Switching to group "Custom"') group = "Custom" if group=="Custom": #custom tags are saved separately in sessions.js self.customtags.update(new_md) #delete empty tags from the whole after applying the diff (this is also done by exiftool backend) for k in self.customtags.keys(): if self.customtags[k] == "": del (self.customtags[k]) log.debug("deleted: " + k) # save keywords in database if self.customtags.has_key("keywords"): log.debug("saving keywords: " + self.customtags["keywords"] ) for key in self.customtags["keywords"].split(','): db.insert_key(key) db.connect_key(self.uuid, key) self.saveconf() #update mtime and customtags else: #save mediamd self._savedata(new_md) #if customtags has coordinates then update db if ( new_md.has_key("GPSLatitude") and new_md.has_key('GPSLongitude') ): # anonymise function sets these to '' so ignore! if (new_md['GPSLongitude']=="" or new_md["GPSLatitude"]==""): return # invariant: uuid MUST EXIST in sessions table log.debug("geocoding in DB! Newmd is: " + logtool.pp(new_md)) res = db.geotag_uuid(self.uuid, new_md['GPSLongitude'],new_md["GPSLatitude"]) log.debug(`res`) self.saveconf() #update mtime only
def pubring_process(ping_name, content): for line in content: # Elements is a list of the elements in the content we're reading # seperated by spaces. Note, this is every line in a pubring.mix, # not just the headers! elements = line.split(' ') num_elements = len(elements) if num_elements < 2: continue name = None addy = None key = None ver = None valid = None expire = None # First element is the remailer name name_element = elements[0] is_name = name_re.match(name_element) if is_name: name = name_element # Second element is the remailer address addy_element = elements[1] is_addy = addy_re.match(addy_element) if is_addy: addy = addy_element # If we don't have a name and an address there's no point in # trying to process the line. if not name or not addy: continue if num_elements >= 3: # Third element is the remailer key key_element = elements[2] is_key = key_re.match(key_element) if is_key: key = key_element else: key = None if num_elements >= 4: # Forth element is the mixmaster version ver_element = elements[3] is_ver = ver_re.match(ver_element) if is_ver: ver = ver_element else: ver = None # Fifth element is the remailer capstring. At the moment we # don't use this during key checking. Perhaps one day. if num_elements >= 6: # Sixth element is the key's valid-from date valid_element = elements[5] is_valid = date_re.match(valid_element) if is_valid: valid = valid_element else: valid = None if num_elements >= 7: # Seventh element is the key's expiry date expire_element = elements[6] is_expire = date_re.match(expire_element) if is_expire: expire = expire_element else: expire = None # With the key header fully analysed, we now insert it into # the keys database. insert_key(ping_name, name, addy, key, ver, valid, expire)