def execute_many(dbo, sql, params, override_lock = False): """ Runs the action query given with a list of tuples that contain substitution parameters. Eg: "INSERT INTO table (field1, field2) VALUES (%s, %s)", [ ( "val1", "val2" ), ( "val3", "val4" ) ] Returns rows affected override_lock: if this is set to False and dbo.locked = True, we don't do anything. This makes it easy to lock the database for writes, but keep databases upto date. """ if not override_lock and dbo.locked: return try: c, s = connect_cursor_open(dbo) s.executemany(sql, params) rv = s.rowcount c.commit() connect_cursor_close(dbo, c, s) return rv except Exception,err: al.error(str(err), "db.execute_many", dbo, sys.exc_info()) try: # An error can leave a connection in unusable state, # rollback any attempted changes. c.rollback() except: pass raise err
def auto_rotate_image(dbo, imagedata): """ Automatically rotate an image according to the orientation of the image in the EXIF data. """ try: inputd = StringIO(imagedata) im = Image.open(inputd) for orientation in ExifTags.TAGS.keys(): if ExifTags.TAGS[orientation] == "Orientation": break if not hasattr(im, "_getexif") or im._getexif() is None: al.debug("image has no EXIF data, abandoning rotate", "media.auto_rotate_image", dbo) return imagedata exif = dict(im._getexif().items()) if exif[orientation] == 3: im = im.transpose(Image.ROTATE_180) elif exif[orientation] == 6: im = im.transpose(Image.ROTATE_270) elif exif[orientation] == 8: im = im.transpose(Image.ROTATE_90) output = StringIO() im.save(output, "JPEG") rotated_data = output.getvalue() output.close() return rotated_data except Exception, err: al.error("failed rotating image: %s" % str(err), "media.auto_rotate_image", dbo) return imagedata
def get_string(dbo, name, path=""): """ Gets DBFS file contents as a string. Returns an empty string if the file is not found. If no path is supplied, just finds the first file with that name in the dbfs (useful for media files, which have unique names) """ s = "" if path != "": r = db.query_tuple( dbo, "SELECT Content FROM dbfs WHERE Name = '%s' AND Path = '%s'" % (name, path)) if len(r) > 0 and len(r[0]) > 0: s = r[0][0] else: r = db.query_tuple(dbo, "SELECT Content FROM dbfs WHERE Name = '%s'" % name) if len(r) > 0 and len(r[0]) > 0: s = r[0][0] if s != "": try: s = base64.b64decode(s) except: em = str(sys.exc_info()[0]) al.error( "Failed unpacking path=%s, name=%s: %s" % (path, name, em), "dbfs.get_string", dbo) s = "" return s
def query_generator(dbo, sql): """ Runs the query given and returns the resultset as a list of dictionaries. All fieldnames are uppercased when returned. generator function version that uses a forward cursor. """ try: c, s = connect_cursor_open(dbo, timeout=True) # Run the query and retrieve all rows s.execute(sql) c.commit() cols = [] # Get the list of column names for i in s.description: cols.append(i[0].upper()) row = s.fetchone() while row: # Intialise a map for each row rowmap = {} for i in xrange(0, len(row)): v = encode_str(dbo, row[i]) rowmap[cols[i]] = v yield rowmap row = s.fetchone() connect_cursor_close(dbo, c, s) except Exception,err: al.error(str(err), "db.query", dbo, sys.exc_info()) raise err
def connection(dbo): """ Creates a connection to the database and returns it """ try: if dbo.dbtype == "MYSQL": if dbo.password != "": return MySQLdb.connect( host=dbo.host, port=dbo.port, user=dbo.username, passwd=dbo.password, db=dbo.database, charset="utf8", use_unicode=True, ) else: return MySQLdb.connect( host=dbo.host, port=dbo.port, user=dbo.username, db=dbo.database, charset="utf8", use_unicode=True ) if dbo.dbtype == "POSTGRESQL": c = psycopg2.connect( host=dbo.host, port=dbo.port, user=dbo.username, password=dbo.password, database=dbo.database ) c.set_client_encoding("UTF8") return c if dbo.dbtype == "SQLITE": return sqlite3.connect(dbo.database, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) except Exception, err: al.error(str(err), "db.connection", dbo, sys.exc_info())
def post_animal_facebook(dbo, user, oauth_code, oauth_state): """ Post an animal to Facebook oauth_code: Provided by FB redirect, code to obtain user's access token oauth_state: Provided by FB redirect, from our original link the animal ID to post """ # Request an access token for the logged in Facebook user from the # oauth code we were given. try: client_id = FACEBOOK_CLIENT_ID client_secret = FACEBOOK_CLIENT_SECRET redirect_uri = urllib.quote(BASE_URL + "/animal_facebook") fb_url = "https://graph.facebook.com/oauth/access_token?client_id=%s&redirect_uri=%s&client_secret=%s&code=%s" % \ (client_id, redirect_uri, client_secret, oauth_code) al.debug("FB access token request: " + fb_url, "social.post_animal_facebook", dbo) access_token = urllib2.urlopen(fb_url).read() al.debug("FB access token response: " + access_token, "social.post_animal_facebook", dbo) except urllib2.HTTPError, herr: em = str(herr.read()) al.error("Failed getting facebook access token: %s" % em, "social.post_animal_facebook", dbo) raise utils.ASMValidationError("Failed getting Facebook access token.")
def save_values_for_link(dbo, post, linkid, linktype = "animal"): """ Saves incoming additional field values from a form, clearing any existing values first. """ delete_values_for_link(dbo, linkid, linktype) af = get_field_definitions(dbo, linktype) l = dbo.locale for f in af: key = "a." + str(f["MANDATORY"]) + "." + str(f["ID"]) if post.has_key(key): val = post[key] if f["FIELDTYPE"] == YESNO: val = str(post.boolean(key)) elif f["FIELDTYPE"] == MONEY: val = str(post.integer(key)) elif f["FIELDTYPE"] == DATE: if len(val.strip()) > 0 and post.date(key) == None: raise utils.ASMValidationError(_("Additional date field '{0}' contains an invalid date.", l).format(f["FIELDNAME"])) val = python2display(dbo.locale, post.date(key)) sql = db.make_insert_sql("additional", ( ( "LinkType", db.di(f["LINKTYPE"]) ), ( "LinkID", db.di(int(linkid)) ), ( "AdditionalFieldID", db.di(f["ID"]) ), ( "Value", db.ds(val) ) )) try: db.execute(dbo, sql) except Exception,err: al.error("Failed saving additional field: %s" % str(err), "animal.update_animal_from_form", dbo, sys.exc_info())
def query_tuple(self, sql, params=None, limit=0): """ Runs the query given and returns the resultset as a tuple of tuples. """ try: c, s = self.cursor_open() # Add limit clause if set if limit > 0: sql = "%s %s" % (sql, self.sql_limit(limit)) # Run the query and retrieve all rows if params: sql = self.switch_param_placeholder(sql) s.execute(sql, params) else: s.execute(sql) d = s.fetchall() c.commit() self.cursor_close(c, s) return d except Exception as err: al.error(str(err), "Database.query_tuple", self, sys.exc_info()) al.error("failing sql: %s %s" % (sql, params), "Database.query_tuple", self) raise err finally: try: self.cursor_close(c, s) except: pass
def save_values_for_link(dbo, post, linkid, linktype="animal"): """ Saves incoming additional field values from a form, clearing any existing values first. """ delete_values_for_link(dbo, linkid, linktype) af = get_field_definitions(dbo, linktype) l = dbo.locale for f in af: key = "a." + str(f["MANDATORY"]) + "." + str(f["ID"]) if post.has_key(key): val = post[key] if f["FIELDTYPE"] == YESNO: val = str(post.boolean(key)) elif f["FIELDTYPE"] == MONEY: val = str(post.integer(key)) elif f["FIELDTYPE"] == DATE: if len(val.strip()) > 0 and post.date(key) == None: raise utils.ASMValidationError( _( "Additional date field '{0}' contains an invalid date.", l).format(f["FIELDNAME"])) val = python2display(dbo.locale, post.date(key)) sql = db.make_insert_sql("additional", (("LinkType", db.di(f["LINKTYPE"])), ("LinkID", db.di(int(linkid))), ("AdditionalFieldID", db.di(f["ID"])), ("Value", db.ds(val)))) try: db.execute(dbo, sql) except Exception, err: al.error("Failed saving additional field: %s" % str(err), "animal.update_animal_from_form", dbo, sys.exc_info())
def connection(dbo): """ Creates a connection to the database and returns it """ try: if dbo.dbtype == "MYSQL": if dbo.password != "": return MySQLdb.connect(host=dbo.host, port=dbo.port, user=dbo.username, passwd=dbo.password, db=dbo.database, charset="utf8", use_unicode=True) else: return MySQLdb.connect(host=dbo.host, port=dbo.port, user=dbo.username, db=dbo.database, charset="utf8", use_unicode=True) if dbo.dbtype == "POSTGRESQL": c = psycopg2.connect(host=dbo.host, port=dbo.port, user=dbo.username, password=dbo.password, database=dbo.database) c.set_client_encoding("UTF8") return c if dbo.dbtype == "SQLITE": return sqlite3.connect(dbo.database, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES) except Exception, err: al.error(str(err), "db.connection", dbo, sys.exc_info())
def scale_image(imagedata, resizespec): """ Produce a scaled version of an image. imagedata - The image to scale resizespec - a string in WxH format returns the scaled image data """ try: # Turn the scalespec into a tuple of the largest side ws, hs = resizespec.split("x") w = int(ws) h = int(hs) size = w, w if h > w: size = h, h # Load the image data into a StringIO object and scale it file_data = StringIO(imagedata) im = Image.open(file_data) im.thumbnail(size, Image.ANTIALIAS) # Save the scaled down image data into another string for return output = StringIO() im.save(output, "JPEG") scaled_data = output.getvalue() output.close() return scaled_data except Exception, err: al.error("failed scaling image: %s" % str(err), "media.scale_image") return imagedata
def query_columns(self, sql, params=None): """ Runs the query given and returns the column names as a list in the order they appeared in the query """ try: c, s = self.cursor_open() # Run the query and retrieve all rows if params: sql = self.switch_param_placeholder(sql) s.execute(sql, params) else: s.execute(sql) c.commit() # Build a list of the column names cn = [] for col in s.description: cn.append(col[0].upper()) self.cursor_close(c, s) return cn except Exception as err: al.error(str(err), "Database.query_columns", self, sys.exc_info()) al.error("failing sql: %s %s" % (sql, params), "Database.query_columns", self) raise err finally: try: self.cursor_close(c, s) except: pass
def get(key): """ Retrieves a value from our disk cache. Returns None if the value is not found or has expired. """ f = None try: fname = _getfilename(key) # No cache entry found, bail if not os.path.exists(fname): return None # Pull the entry out f = open(fname, "r") o = pickle.load(f) # Has the entry expired? if o["expires"] < int(time.time()): delete(key) return None return o["value"] except Exception,err: al.error(str(err), "cachedisk.get") return None
def scale_all_odt(dbo): """ Goes through all odt files attached to records in the database and scales them down (throws away images and objects so only the text remains to save space) """ mo = dbo.query( "SELECT ID, MediaName FROM media WHERE MediaMimeType = 'application/vnd.oasis.opendocument.text'" ) total = 0 for i, m in enumerate(mo): name = str(m.MEDIANAME) al.debug("scaling %s (%d of %d)" % (name, i, len(mo)), "media.scale_all_odt", dbo) odata = dbfs.get_string(dbo, name) if odata == "": al.error("file %s does not exist" % name, "media.scale_all_odt", dbo) continue path = dbo.query_string("SELECT Path FROM dbfs WHERE Name = ?", [name]) ndata = scale_odt(odata) if len(ndata) < 512: al.error( "scaled odt %s came back at %d bytes, abandoning" % (name, len(ndata)), "scale_all_odt", dbo) else: dbfs.put_string(dbo, name, path, ndata) dbo.update("media", m.ID, {"MediaSize": len(ndata)}) total += 1 al.debug("scaled %d of %d odts" % (total, len(mo)), "media.scale_all_odt", dbo)
def html_to_pdf(htmldata, baseurl = "", account = ""): """ Converts HTML content to PDF and returns the PDF file data. Uses pisa for the conversion. """ try: import sx.pisa3 as pisa except: al.error("trying to convert html to pdf, pisa not found.", "utils.html_to_pdf", None) return "" header = "<!DOCTYPE HTML>\n<html><head><style type='text/css'>\n\n</style>" header += '<meta http-equiv="content-type" content="text/html; charset=utf-8">\n' header += "</head><body>" footer = "</body></html>" htmldata = htmldata.replace("font-size: xx-small", "font-size: 6pt") htmldata = htmldata.replace("font-size: x-small", "font-size: 8pt") htmldata = htmldata.replace("font-size: small", "font-size: 10pt") htmldata = htmldata.replace("font-size: medium", "font-size: 14pt") htmldata = htmldata.replace("font-size: large", "font-size: 18pt") htmldata = htmldata.replace("font-size: x-large", "font-size: 24pt") htmldata = htmldata.replace("font-size: xx-large", "font-size: 36pt") htmldata = fix_relative_document_uris(htmldata, baseurl, account) fin = StringIO(header + str(htmldata) + footer) fout = StringIO() pdf = pisa.CreatePDF(fin, fout) if pdf.err: al.error("errors found converting html to pdf.", "utils.html_to_pdf", None) else: return fout.getvalue()
def scale_pdf_file(inputfile, outputfile): """ Scale a PDF file using the command line. There are different approaches to this and gs, imagemagick and pdftk (among others) can be used. Returns True for success or False for failure. """ KNOWN_ERRORS = [ # GS produces this with out of date libpoppler and Microsoft Print PDF "Can't find CMap Identity-UTF16-H building a CIDDecoding resource." ] code, output = utils.cmd(SCALE_PDF_CMD % { "output": outputfile, "input": inputfile }) for e in KNOWN_ERRORS: # Any known errors in the output should return failure if output.find(e) != -1: al.error("Abandon PDF scaling - found known error: %s" % e, "media.scale_pdf_file") return False # A nonzero exit code is a failure if code > 0: al.error("Abandon PDF scaling - nonzero exit code (%s)" % code, "media.scale_pdf_file") return False return True
def scale_pdf(filedata): """ Scales the given PDF filedata down and returns the compressed PDF data. """ # If there are more than 50 pages, it's going to take forever to scale - # don't even bother trying. pagecount = utils.pdf_count_pages(filedata) if pagecount > 50: al.error("Abandon PDF scaling - has > 50 pages (%s found)" % pagecount, "media.scale_pdf") return filedata inputfile = tempfile.NamedTemporaryFile(suffix=".pdf", delete=False) outputfile = tempfile.NamedTemporaryFile(suffix=".pdf", delete=False) inputfile.write(filedata) inputfile.flush() inputfile.close() outputfile.close() # If something went wrong during the scaling, use the original data if not scale_pdf_file(inputfile.name, outputfile.name): return filedata compressed = utils.read_binary_file(outputfile.name) os.unlink(inputfile.name) os.unlink(outputfile.name) # If something has gone wrong and the scaled one has no size, return the original if len(compressed) == 0: return filedata # If the original is smaller than the scaled one, return the original if len(compressed) > len(filedata): return filedata return compressed
def maint_db_dump_dbfs_base64(dbo): try: for x in dbupdate.dump_dbfs_base64(dbo): print(utils.cunicode(x).encode("utf-8")) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_db_dump_dbfs_base64: %s" % em, "cron.maint_db_dump_dbfs_base64", dbo, sys.exc_info())
def maint_variable_data(dbo): try: configuration.set_variable_data_updated_blank(dbo) animal.update_all_variable_animal_data(dbo, True) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_variable_data: %s" % em, "cron.maint_variable_data", dbo, sys.exc_info())
def execute(dbo, sql, override_lock=False): """ Runs the action query given and returns rows affected override_lock: if this is set to False and dbo.locked = True, we don't do anything. This makes it easy to lock the database for writes, but keep databases upto date. """ if not override_lock and dbo.locked: return try: c, s = connect_cursor_open(dbo) s.execute(sql) rv = s.rowcount c.commit() connect_cursor_close(dbo, c, s) return rv except Exception, err: al.error(str(err), "db.execute", dbo, sys.exc_info()) try: # An error can leave a connection in unusable state, # rollback any attempted changes. c.rollback() except: pass raise err
def get_database_info(alias): """ Returns the dbo object for a sheltermanager.com account or alias. Also returns a dbo with a database property of "DISABLED" for a disabled account, "FAIL" for a problem or "WRONGSERVER" to indicate that the database does not exist on this server. """ alias = re.sub(INVALID_REMOVE, '', alias).lower() dbo = db.get_dbo("POSTGRESQL") dbo.host = "/var/run/postgresql/" # use socket dir to use UNIX sockets to connect to local pgbouncer /var/run/postgresql/ dbo.port = 6432 dbo.dbtype = "POSTGRESQL" dbo.alias = alias a = get_account(alias) if a is None: dbo.database = "FAIL" return dbo dbo.database = str(a["user"]) dbo.username = dbo.database dbo.password = dbo.database # Is this sm.com account disabled or removed from the server? if a["expired"] or a["archived"]: dbo.database = "DISABLED" # Is this the wrong server? if smcom_client.get_this_server() != a["server"]: dbo.database = "WRONGSERVER" al.error("failed login, wrong server: %s not present in %s" % (a["server"], smcom_client.get_this_server())) return dbo
def touch(key, ttlremaining=0, newttl=0): """ Retrieves a value from our disk cache and updates its ttl if there is less than ttlremaining until expiry. This can be used to make our timed expiry cache into a sort of hybrid with LRU. Returns None if the value is not found or has expired. """ f = None try: fname = _getfilename(key) # No cache entry found, bail if not os.path.exists(fname): return None # Pull the entry out with open(fname, "r") as f: o = pickle.load(f) # Has the entry expired? now = time.time() if o["expires"] < now: delete(key) return None # Is there less than ttlremaining to expiry? If so update it to newttl if o["expires"] - now < ttlremaining: o["expires"] = now + newttl with open(fname, "w") as f: pickle.dump(o, f) return o["value"] except Exception as err: al.error(str(err), "cachedisk.touch") return None
def get(key): """ Retrieves a value from our disk cache. Returns None if the value is not found or has expired. """ f = None try: fname = _getfilename(key) # No cache entry found, bail if not os.path.exists(fname): return None # Pull the entry out with open(fname, "r") as f: o = pickle.load(f) # Has the entry expired? if o["expires"] < time.time(): delete(key) return None return o["value"] except Exception as err: al.error(str(err), "cachedisk.get") return None
def maint_variable_data(dbo): try: configuration.set_variable_data_updated_blank(dbo) animal.update_all_variable_animal_data(dbo) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_variable_data: %s" % em, "cron.maint_variable_data", dbo, sys.exc_info())
def maint_db_dump_smcom(dbo): try: for x in dbupdate.dump_smcom(dbo): print unicode(x).encode("utf-8") except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_db_dump: %s" % em, "cron.maint_db_dump_smcom", dbo, sys.exc_info())
def switch_storage(dbo): """ Goes through all files in dbfs and swaps them into the current storage scheme """ rows = dbo.query( "SELECT ID, Name, Path, URL FROM dbfs WHERE Name LIKE '%.%' ORDER BY ID" ) for i, r in enumerate(rows): al.debug( "Storage transfer %s/%s (%d of %d)" % (r.path, r.name, i, len(rows)), "dbfs.switch_storage", dbo) source = DBFSStorage(dbo, r.url) target = DBFSStorage(dbo) # Don't bother if the file is already stored in the target format if source.url_prefix() == target.url_prefix(): al.debug("source is already %s, skipping" % source.url_prefix(), "dbfs.switch_storage", dbo) continue try: filedata = source.get(r.id, r.url) target.put(r.id, r.name, filedata) # Update the media size while we're switching in case it wasn't set previously dbo.execute("UPDATE media SET MediaSize=? WHERE DBFSID=?", (len(filedata), r.id)) except Exception as err: al.error("Error reading, skipping: %s" % str(err), "dbfs.switch_storage", dbo) # smcom only - perform postgresql full vacuum after switching if smcom.active(): smcom.vacuum_full(dbo)
def get_database_info(alias): """ Returns the dbo object for a sheltermanager.com account or alias. Also returns a dbo with a database property of "DISABLED" for a disabled account, "FAIL" for a problem or "WRONGSERVER" to indicate that the database does not exist on this server. """ alias = re.sub(INVALID_REMOVE, '', alias).lower() dbo = db.DatabaseInfo() dbo.host = "/var/run/postgresql/" # use socket dir to use UNIX sockets to connect to local pgbouncer /var/run/postgresql/ dbo.port = 6432 dbo.dbtype = "POSTGRESQL" dbo.alias = alias a = _get_account_info(alias) if a is None or not a.has_key("user"): dbo.database = "FAIL" return dbo dbo.database = str(a["user"]) dbo.username = dbo.database dbo.password = dbo.database # Is this sm.com account disabled? if a["expired"]: dbo.database = "DISABLED" # Is this the wrong server? if smcom_client.get_this_server() != a["server"]: dbo.database = "WRONGSERVER" al.error("failed login, wrong server: %s not present in %s" % (a["server"], smcom_client.get_this_server())) return dbo
def maint_recode_shelter(dbo): try: animal.maintenance_reassign_shelter_codes(dbo) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_recode_shelter: %s" % em, "cron.maint_recode_shelter", dbo, sys.exc_info())
def maint_db_dump_dbfs(dbo): try: dbupdate.dump_dbfs_stdout(dbo) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_db_dump: %s" % em, "cron.maint_db_dump", dbo, sys.exc_info())
def maint_disk_cache(dbo): try: cachedisk.remove_expired() except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running remove_expired: %s" % em, "cron.maint_disk_cache", dbo, sys.exc_info())
def maint_scale_pdfs(dbo): try: media.scale_all_pdf(dbo) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_scale_pdfs: %s" % em, "cron.maint_scale_pdfs", dbo, sys.exc_info())
def execute_many(dbo, sql, params, override_lock=False): """ Runs the action query given with a list of tuples that contain substitution parameters. Eg: "INSERT INTO table (field1, field2) VALUES (%s, %s)", [ ( "val1", "val2" ), ( "val3", "val4" ) ] Returns rows affected override_lock: if this is set to False and dbo.locked = True, we don't do anything. This makes it easy to lock the database for writes, but keep databases upto date. """ if not override_lock and dbo.locked: return try: c, s = connect_cursor_open(dbo) s.executemany(sql, params) rv = s.rowcount c.commit() connect_cursor_close(dbo, c, s) return rv except Exception, err: al.error(str(err), "db.execute_many", dbo, sys.exc_info()) try: # An error can leave a connection in unusable state, # rollback any attempted changes. c.rollback() except: pass raise err
def maint_db_reset(dbo): try: dbupdate.reset_db(dbo) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_db_reset: %s" % em, "cron.maint_db_reset", dbo, sys.exc_info())
def scale_image(imagedata, resizespec): """ Produce a scaled version of an image. imagedata - The image to scale resizespec - a string in WxH format returns the scaled image data """ try: # Turn the scalespec into a tuple of the largest side ws, hs = resizespec.split("x") w = int(ws) h = int(hs) size = w, w if h > w: size = h, h # Load the image data into a StringIO object and scale it file_data = StringIO(imagedata) im = Image.open(file_data) im.thumbnail(size, Image.ANTIALIAS) # Save the scaled down image data into another string for return output = StringIO() im.save(output, "JPEG") scaled_data = output.getvalue() output.close() return scaled_data except Exception,err: al.error("failed scaling image: %s" % str(err), "media.scale_image") return imagedata
def auto_rotate_image(imagedata): """ Automatically rotate an image according to the orientation of the image in the EXIF data. """ try: inputd = StringIO(imagedata) im = Image.open(inputd) for orientation in ExifTags.TAGS.keys(): if ExifTags.TAGS[orientation] == "Orientation": break if not hasattr(im, "_getexif") or im._getexif() is None: al.debug("image has no EXIF data, abandoning rotate", "media.auto_rotate_image") return imagedata exif = dict(im._getexif().items()) if exif[orientation] == 3: im = im.transpose(Image.ROTATE_180) elif exif[orientation] == 6: im = im.transpose(Image.ROTATE_270) elif exif[orientation] == 8: im = im.transpose(Image.ROTATE_90) output = StringIO() im.save(output, "JPEG") rotated_data = output.getvalue() output.close() return rotated_data except Exception,err: al.error("failed rotating image: %s" % str(err), "media.auto_rotate_image") return imagedata
def execute_many(self, sql, params=(), override_lock=False): """ Runs the action query given with a list of tuples that contain substitution parameters. Eg: "INSERT INTO table (field1, field2) VALUES (%s, %s)", [ ( "val1", "val2" ), ( "val3", "val4" ) ] Returns rows affected override_lock: if this is set to False and dbo.locked = True, we don't do anything. This makes it easy to lock the database for writes, but keep databases upto date. """ if not override_lock and self.locked: return if sql is None or sql.strip() == "": return 0 try: c, s = self.cursor_open() sql = self.switch_param_placeholder(sql) s.executemany(sql, params) rv = s.rowcount c.commit() self.cursor_close(c, s) return rv except Exception as err: al.error(str(err), "Database.execute_many", self, sys.exc_info()) al.error("failing sql: %s" % sql, "Database.execute_many", self) try: # An error can leave a connection in unusable state, # rollback any attempted changes. c.rollback() except: pass raise err finally: try: self.cursor_close(c, s) except: pass
def maint_db_reinstall(dbo): try: dbupdate.reinstall_default_data(dbo) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_db_reinstall: %s" % em, "cron.maint_db_reinstall", dbo, sys.exc_info())
def maint_db_diagnostic(dbo): try: move, vacc, med, medt = dbupdate.diagnostic(dbo) print "Removed:\n%d orphaned movements,\n%d orphaned vaccinations\n%d orphaned medical records\n%d orphaned treatments" % (move, vacc, med, medt) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_db_diagnostic: %s" % em, "cron.maint_animal_figures", dbo, sys.exc_info())
def publish_html(dbo): try : if configuration.publishers_enabled(dbo).find("html") != -1: publish.start_publisher(dbo, "html", user="******", async=False) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running html publisher: %s" % em, "cron.publish_html", dbo, sys.exc_info())
def query_tuple_columns(self, sql, params=None, limit=0): """ Runs the query given and returns the resultset as a grid of tuples and a list of columnames """ try: c, s = self.cursor_open() # Add limit clause if set if limit > 0: sql = "%s %s" % (sql, self.sql_limit(limit)) # Run the query and retrieve all rows if params: sql = self.switch_param_placeholder(sql) s.execute(sql, params) else: s.execute(sql) d = s.fetchall() c.commit() # Build a list of the column names cn = [] for col in s.description: cn.append(col[0].upper()) self.cursor_close(c, s) return (d, cn) except Exception as err: al.error(str(err), "Database.query_tuple_columns", self, sys.exc_info()) al.error("failing sql: %s %s" % (sql, params), "Database.query_tuple_columns", self) raise err finally: try: self.cursor_close(c, s) except: pass
def maint_db_dump_smcom(dbo): try: print unicode(dbupdate.dump_smcom(dbo)).encode("utf-8") except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_db_dump: %s" % em, "cron.maint_db_dump_smcom", dbo, sys.exc_info())
def maint_db_dump_merge(dbo): try: print(utils.cunicode(dbupdate.dump_merge(dbo)).encode("utf-8")) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_db_dump_merge: %s" % em, "cron.maint_db_dump_merge", dbo, sys.exc_info())
def get_lat_long(dbo, address, town, county, postcode, country = None): """ Looks up a latitude and longitude from an address using GEOCODE_URL and returns them as lat,long,(first 3 chars of address) Returns None if no results were found. NB: dbo is only used for contextual reference in logging, no database calls are made by any of this code. """ if address.strip() == "": return None try: # Synchronise this process to a single thread to prevent # abusing our geo provider and concurrent requests for the # same address when opening an animal with the same # original/brought in by owner, etc. lat_long_lock.acquire() url = "" if country is None: country = LOCALE_COUNTRY_NAME_MAP[dbo.locale] if BULK_GEO_PROVIDER == "cloudmade": q = normalise_cloudmade(address, town, county, postcode, country) url = CLOUDMADE_URL.replace("{key}", BULK_GEO_PROVIDER_KEY).replace("{q}", q) elif BULK_GEO_PROVIDER == "nominatim": q = normalise_nominatim(address, town, county, postcode, country) url = NOMINATIM_URL.replace("{q}", q) else: al.error("unrecognised geo provider: %s" % BULK_GEO_PROVIDER, "geo.get_lat_long", dbo) al.debug("looking up geocode for address: %s" % q, "geo.get_lat_long", dbo) key = "nom:" + q if cache.available(): v = cache.get(key) if v is not None: al.debug("cache hit for address: %s = %s" % (q, v), "geo.get_lat_long", dbo) return v jr = urllib2.urlopen(url, timeout = GEO_LOOKUP_TIMEOUT).read() j = json.loads(jr) latlon = None if BULK_GEO_PROVIDER == "cloudmade": latlon = parse_cloudmade(dbo, jr, j, q) elif BULK_GEO_PROVIDER == "nominatim": latlon = parse_nominatim(dbo, jr, j, q) # Cache this address/geocode response for an hour if cache.available() and latlon is not None: cache.put(key, latlon, 3600) return latlon except Exception,err: al.error(str(err), "geo.get_lat_long", dbo) return None
def maint_db_reinstall(dbo): try: dbupdate.reinstall_default_data(dbo) except: em = str(sys.exc_info()[0]) al.error( "FAIL: uncaught error running maint_db_reinstall: %s" % em, "cron.maint_db_reinstall", dbo, sys.exc_info() )
def maint_animal_figures(dbo): try: animal.update_all_animal_statuses(dbo) animal.update_all_variable_animal_data(dbo) animal.maintenance_animal_figures(dbo, includeMonths = True, includeAnnual = True) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_animal_figures: %s" % em, "cron.maint_animal_figures", dbo, sys.exc_info())
def maint_db_diagnostic(dbo): try: d = dbupdate.diagnostic(dbo) for k, v in d.iteritems(): print "%s: %s" % (k, v) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running maint_db_diagnostic: %s" % em, "cron.maint_db_diagnostic", dbo, sys.exc_info())
def set_last_connected(dbo): """ Sets the last connected date on a database to today """ al.debug("Setting last connected to now for %s" % dbo.database, "smcom.set_last_connected", dbo) response = smcom_client.update_last_connected(dbo.database) if response != "OK": al.error("Failed setting last connection: %s" % response, "smcom.set_last_connected", dbo)
def delete(key): """ Removes a value from our disk cache. """ try: fname = _getfilename(key) os.unlink(fname) except Exception,err: al.error(str(err), "cachedisk.delete")
def maint_reinstall_default_media(dbo): try: dbupdate.install_default_media(dbo, True) except: em = str(sys.exc_info()[0]) al.error( "FAIL: uncaught error running maint_reinstall_default_media: %s" % em, "cron.maint_reinstall_default_media", dbo, sys.exc_info(), )
def query_json(dbo, sql): """ Runs the query given and returns the resultset as a JSON array with column names. This is more efficient than having query() marshall into dictionaries and then iterating those, so if you're querying to get JSON, use this instead of json(query(dbo, "SQL")) """ try: c, s = connect_cursor_open(dbo) # Run the query s.execute(sql) c.commit() # Loop round the rows rows = "" while 1: d = s.fetchone() if d is None: break row = "{" for i in xrange(0, len(d)): if row != "{": row += ", " # if it's null if d[i] is None: value = "null" # if it's numeric elif is_number(d[i]): value = str(d[i]) # if it's a string else: value = ( '"' + str(d[i]) .replace("\n", "\\n") .replace("\r", "\\r") .replace("\t", "\\t") .replace("`", "'") .replace('"', '\\"') + '"' ) row += '"%s" : %s' % (s.description[i][0].upper(), value) row += "}" if rows != "": rows += ",\n" rows += row json = "[\n" + rows + "\n]" connect_cursor_close(dbo, c, s) return json except Exception, err: al.error(str(err), "db.query_json", dbo, sys.exc_info()) return ""
def email_uncompleted_upto_today(dbo): """ Goes through all system users and emails them their diary for the day - unless the option is turned off. """ if not configuration.email_diary_notes(dbo): return l = dbo.locale try: allusers = users.get_users(dbo) except Exception,err: al.error("failed getting list of users: %s" % str(err), "diary.email_uncompleted_upto_today", dbo)
def parse_nominatim(dbo, jr, j, q): if len(j) == 0: al.debug("no response from nominatim for %s (response %s)" % (q, str(jr)), "geo.parse_nominatim", dbo) return None try: latlon = "%s,%s,%s" % (str(utils.strip_unicode(j[0]["lat"])), str(utils.strip_unicode(j[0]["lon"])), "na") al.debug("contacted nominatim to get geocode for %s = %s" % (q, latlon), "geo.parse_nominatim", dbo) return latlon except Exception,err: al.error("couldn't find geocode in nominatim response: %s, %s" % (str(err), jr), "geo.parse_nominatim", dbo) return None
def maint_recode_shelter(dbo): try: animal.maintenance_reassign_shelter_codes(dbo) except: em = str(sys.exc_info()[0]) al.error( "FAIL: uncaught error running maint_recode_shelter: %s" % em, "cron.maint_recode_shelter", dbo, sys.exc_info(), )
def maint_scale_animal_images(dbo): try: media.scale_animal_images(dbo) except: em = str(sys.exc_info()[0]) al.error( "FAIL: uncaught error running maint_scale_animal_images: %s" % em, "cron.maint_scale_animal_images", dbo, sys.exc_info(), )
def maint_db_dump_personcsv(dbo): try: print utils.csv(person.get_person_find_simple(dbo, "", "all", True, 0)) except: em = str(sys.exc_info()[0]) al.error( "FAIL: uncaught error running maint_db_dump_personcsv: %s" % em, "cron.maint_db_dump_personcsv", dbo, sys.exc_info(), )
def parse_google(dbo, jr, j, q): if len(j) == 0: al.debug("no response from google for %s (response %s)" % (q, str(jr)), "geo.parse_google", dbo) return None try: loc = j["results"][0]["geometry"]["location"] latlon = "%s,%s,%s" % (str(loc["lat"]), str(loc["lng"]), "na") al.debug("contacted google to get geocode for %s = %s" % (q, latlon), "geo.parse_google", dbo) return latlon except Exception,err: al.error("couldn't find geocode in google response. Status was %s: %s, %s" % (j["status"], str(err), jr), "geo.parse_google", dbo) return None
def reports_email(dbo): """ Batch email reports """ al.info("start batch reports_email", "cron.reports_email", dbo) try: # Email any daily reports for local time of now extreports.email_daily_reports(dbo, i18n.now(dbo.timezone)) except: em = str(sys.exc_info()[0]) al.error("FAIL: running daily email of reports_email: %s" % em, "cron.reports_email", dbo, sys.exc_info()) al.info("end batch reports_email", "cron.reports_email", dbo)
def parse_cloudmade(dbo, jr, j, q): if not j.has_key("found") or j["found"] == "0": al.debug("no response from cloudmade for %s (response %s)" % (q, str(jr)), "geo.parse_cloudmade", dbo) return None try: point = j["features"][0]["centroid"]["coordinates"] latlon = "%s,%s,%s" % (str(point[0]), str(point[1]), "na") al.debug("contacted cloudmade to get geocode for %s = %s" % (q, latlon), "geo.parse_cloudmade", dbo) return latlon except Exception,err: al.error("couldn't find geocode in response: %s, %s" % (str(err), jr), "geo.parse_cloudmade", dbo) return None
def publish_html(dbo): try : pc = publish.PublishCriteria(configuration.publisher_presets(dbo)) publishers = configuration.publishers_enabled(dbo) if publishers.find("html") != -1: al.info("start html publisher", "cron.publish_html", dbo) h = publish.HTMLPublisher(dbo, pc, "cron") h.run() al.info("end html publisher", "cron.publish_html", dbo) except: em = str(sys.exc_info()[0]) al.error("FAIL: uncaught error running html publisher: %s" % em, "cron.publish_html", dbo, sys.exc_info())
def maint_db_dump_animalcsv(dbo): try: print utils.csv( animal.get_animal_find_advanced( dbo, {"logicallocation": "all", "includedeceased": "true", "includenonshelter": "true"} ) ) except: em = str(sys.exc_info()[0]) al.error( "FAIL: uncaught error running maint_db_dump_animalcsv: %s" % em, "cron.maint_db_dump_animalcsv", dbo, sys.exc_info(), )