def highboost(): url = req.args.get('link') if not url: return render_template('standard.html') try: # download the image from the url res = requests.get(url) # open the image using PIL im = img.open(sIO(res.content)) # convert the PIL image to a numpy array and turn it into a newt image pic = newt(array(im, dtype=double)) # do a convolution with a 17x17 disk pic.highboost('d 17') # revert to PIL format pic = pic.pic - min(pic.pic) pic = 255*pic/max(pic) im = img.fromarray(pic.astype('uint8')) # save the new image buff = sIO() im.save(buff, 'JPEG', quality=90) buff.seek(0) return send_file(buff, mimetype='image/jpeg') except: return redirect(url)
def srtfilt(): url = req.args.get('link') if not url: return render_template('standard.html') try: # download the image from the url res = requests.get(url) # open the image using PIL im = img.open(sIO(res.content)) # shrink very large images im.thumbnail((512,512), img.ANTIALIAS) # convert the PIL image to a numpy array and turn it into a newt image pic = newt(array(im, dtype=complex)) # do stuff pic.srtfilt() # revert to PIL format #pic = pic.pic - min(pic.pic) #pic = 255*pic/max(pic) im = img.fromarray(pic.pic.astype('uint8')) # save the new image buff = sIO() im.save(buff, 'JPEG', quality=90) buff.seek(0) return send_file(buff, mimetype='image/jpeg') except: return redirect(url)
def getHistoricalRates(indexSymbol): req = urllib2.urlopen(source+indexSymbol) rates = req.read() dReader = csv.DictReader(sIO(rates)) histRates = {} for row in dReader: histRates[dt.datetime.strptime(row['Date'], dateFormat).date()] = (float(row['Adj Close'])/100.0)/365 return histRates
def boxesmap(): siz = double(req.args.get('size')) siz = min([max([siz, 50]), 200]) pic = random.randint(-1, 2, size=(siz, siz)) temp = zeros(pic.shape); value = 1 def pos(x, y): if (x<0): x=pic.shape[0]-1 elif (x>pic.shape[0]-1): x=0 if (y<0): y=pic.shape[1]-1 elif (y>pic.shape[1]-1): y=0 return x, y for t in range(20): for row in range(pic.shape[0]): for col in range(pic.shape[1]): temp[row, col] = pic[pos(row, col-1)] +\ pic[pos(row-1, col)] + pic[pos(row, col)] + pic[pos(row+1, col)] +\ pic[pos(row, col+1)] temp[row, col] = double(temp[row, col] > 0) - double(temp[row, col] < 0) value = sum(pic[:]-temp[:]) for row in range(pic.shape[0]): for col in range(pic.shape[1]): pic[row, col] = temp[row, col] pic = random.randint(-1, 2, size=(siz, siz, 3)) neg = 255, 255, 255 zer = 255, 134, 156 pos = 12, 163, 255 for row in range(pic.shape[0]): for col in range(pic.shape[1]): if (temp[row, col] ==-1): pic[row,col] = neg elif (temp[row, col] ==0): pic[row, col] = zer else: pic[row, col] = pos im = img.fromarray(pic.astype('uint8')) im = im.resize((500, 500)) # save the new image buff = sIO() im.save(buff, 'JPEG', quality=90) buff.seek(0) return send_file(buff, mimetype='image/jpeg')
def hny(): url = req.args.get('link') if not url: return render_template('standard.html') try: # download the image from the url res = requests.get(url) res2 = requests.get("https://c2.staticflickr.com/4/3007/2733380075_7c8019d4eb.jpg") # https://c2.staticflickr.com/6/5330/8808102199_bff96f1f80_o.jpg # open the image using PIL im = img.open(sIO(res.content)) im2 = img.open(sIO(res2.content)).resize(im.size, img.ANTIALIAS) # convert the PIL image to a numpy array and turn it into a newt image a = array(im, dtype=double) b = array(im2, dtype=double) for color in range(3): a[:,:][:,:,color] = a[:,:][:,:,0]/3 + a[:,:][:,:,1]/3 + a[:,:][:,:,2]/3 a[:,:][:,:,color] = 4*a[:,:][:,:,color]/7 + 3*b[:,:][:,:,color]/7 pic = newt(a) # do a convolution with a 17x17 disk #pic.mix(array(im2, dtype=double)) # revert to PIL format pic = pic.pic - min(pic.pic) pic = 255*pic/max(pic) im = img.fromarray(pic.astype('uint8')) # save the new image buff = sIO() im.save(buff, 'JPEG', quality=90) buff.seek(0) return send_file(buff, mimetype='image/jpeg') except: return redirect(url)
def hny(): url = req.args.get('link') if not url: return render_template('standard.html') try: # download the image from the url res = requests.get(url) res2 = requests.get("http://www.themarysue.com/wp-content/uploads/2012/08/c6bfb4fac68932e833f917cd45ad2ff9.jpeg") # open the image using PIL im = img.open(sIO(res.content)) im2 = img.open(sIO(res2.content)).resize(im.size, img.ANTIALIAS) # convert the PIL image to a numpy array and turn it into a newt image a = array(im, dtype=double) b = array(im2, dtype=double) for color in range(3): a[:,:][:,:,color] = a[:,:][:,:,0]/3 + a[:,:][:,:,1]/3 + a[:,:][:,:,2]/3 a[:,:][:,:,color] = 4*a[:,:][:,:,color]/7 + 3*b[:,:][:,:,color]/7 pic = newt(a) # do a convolution with a 17x17 disk #pic.mix(array(im2, dtype=double)) # revert to PIL format pic = pic.pic - min(pic.pic) pic = 255*pic/max(pic) im = img.fromarray(pic.astype('uint8')) # save the new image buff = sIO() im.save(buff, 'JPEG', quality=90) buff.seek(0) return send_file(buff, mimetype='image/jpeg') except: return redirect(url)
def asf(): url = req.args.get('link') if not url: return render_template('scientific.html') try: # download the image from the url res = requests.get(url) # open the image using PIL im = img.open(sIO(res.content)) # convert the PIL image to a numpy array and turn it into a newt image pic = newt(array(im, dtype=double)) # do a convolution with a 17x17 disk pic.dhat('g 7') # revert to PIL format z = zeros(pic.pic[:,:,0].shape) red = dstack((pic.pic[:,:,0],z,z)) green = dstack((z,pic.pic[:,:,1],z)) blue = dstack((z,z,pic.pic[:,:,2])) red = 255*red/max(red) green = 255*green/max(green) blue = 255*blue/max(blue) pic = vstack((red,green,blue)) im = img.fromarray(pic.astype('uint8')) # save the new image buff = sIO() im.save(buff, 'JPEG', quality=90) buff.seek(0) return send_file(buff, mimetype='image/jpeg') except: return redirect(url)
def readStocksFromFile(stockCount, markIndex, sortBy, reverse): if markIndex=="SP": f = open("S&P500.csv") else: f = open("DOW30.csv") vals = f.read() dReader = csv.DictReader(sIO(vals)) stocks = [(row['Symbol'], row[sortBy]) for row in dReader] stocks = sorted(stocks, key=itemgetter(1), reverse=reverse) if stockCount < len(stocks): return stocks[:stockCount] else: return stocks
def laplacian(): url = req.args.get('link') if not url: return render_template('scientific.html') try: wordcloud = wc().generate(url) im = wordcloud.to_image() # save the new image buff = sIO() im.save(buff, 'JPEG', quality=90) buff.seek(0) return send_file(buff, mimetype='image/jpeg') except: return redirect(url)
def parse_rdf(string, model=None, context="none"): if model == None: model = bound_graph() model.parse(sIO(string)) return model
def getHistoricalPrices(stockSymbol): req = urllib2.urlopen(source+stockSymbol) prices = req.read() dReader = csv.DictReader(sIO(prices)) histPrices = [fin.AssetPrice(dt.datetime.strptime(row['Date'], dateFormat).date(), row['Open'], row['High'], row['Low'], row['Close'], row['Volume'], row['Adj Close']) for row in dReader] return histPrices
def url_to_pdf(url): '''Treat give url as a pdf, to be fed into pdfminer functions''' open = urlopen(Request(url)).read() return sIO(open)