def runScipy(): time, load = get_data(config['month'], config['year']) guess = np.ones(len(time))*config['guess_coef'] strategies = ('best1bin', 'best1exp', 'rand1exp', 'randtobest1exp', 'currenttobest1exp', 'best2exp', 'rand2exp', 'randtobest1bin', 'currenttobest1bin', 'best2bin', 'rand2bin', 'rand1bin') def penaltyobj(gen): return model(gen, time, load, config)[0] bounds = [(1e3, 1e5) for i in range(len(time))] # Try polish=False - if True then takes the best population and uses L-BFGS-B to finish # polish = True opt = differential_evolution(penaltyobj, bounds=bounds, polish=True, disp=True) print(opt) fstar = opt.fun xstar = opt.x nfev = opt.nfev print(results(xstar, config)) gen_report([xstar, nfev], "Scipy GA Polished", "Penalized", config, guess=guess, notes="lb 1e3 ub 8e4 Polished with L-BFGS-B, "+opt.message, gen_plot=True) # polish = False opt = differential_evolution(penaltyobj, bounds=bounds, polish=False, disp=True) print(opt) fstar = opt.fun xstar = opt.x nfev = opt.nfev print(results(xstar, config)) gen_report([xstar, nfev], "Scipy GA", "Penalized", config, guess=guess, notes="lb 1e3 ub 8e4, "+opt.message, gen_plot=True)
def get(self): '''Returns a list of the most recent posts, filtered by argument. Maximum 100. Arguments: &tag={TAG} (optional) Filter by this tag. &count={1..100} (optional) Number of items to retrieve (Default:15, Maximum:100). ''' tag = self.request.get('tag', default_value=None) count = self.request.get('count') count = min(int(count), 100) if count else 15 res = Post.get_recent(tag, count) context = { 'tag' : tag, } context.update(self.context) xml_response = results('posts', res, context) # TODO: cache for N minutes self.write_xml(xml_response)
def get(self): '''Delete a post from Delicious. Arguments: &url={URL} (required) the url of the item. ''' url = self.request.get('url', default_value=None) if url is None: self.write_xml(results('result', [], {'code' : 'url or md5 required'})) # encode to md5 only if not already a md5 hash key = hashlib.md5(url).hexdigest() if url.startswith('http') else url post_key = ndb.Key('Post', key) post_key.delete() self.write_xml(results('result', [], {'code' : 'done'}))
def get(self): '''Returns all posts. Please use sparingly. Call the update function to see if you need to fetch this at all. Arguments: &tag={TAG} (optional) Filter by this tag. &start={xx} (optional) Start returning posts this many results into the set. &results={xx} (optional) Return this many results. &fromdt={CCYY-MM-DDThh:mm:ssZ} (optional) Filter for posts on this date or later &todt={CCYY-MM-DDThh:mm:ssZ} (optional) Filter for posts on this date or earlier &meta=yes (optional) Include change detection signatures on each item in a 'meta' attribute. Clients wishing to maintain a synchronized local store of bookmarks should retain the value of this attribute - its value will change when any significant field of the bookmark changes. Returns a change manifest of all posts. Call the update function to see if you need to fetch this at all. This method is intended to provide information on changed bookmarks without the necessity of a complete download of all post data. Each post element returned offers a url attribute containing an URL MD5, with an associated meta attribute containing the current change detection signature for that bookmark. ''' tag = self.request.get('tag', default_value=None) count = self.request.get('results') count = int(count) if count else 1000 if 'hashes' in self.request.params: res = Post.get_all_hashes(tag, count) else: res = Post.get_all(tag, count) context = { 'tag' : tag if tag is not None else '' } context.update(self.context) xml_response = results('posts', res, context) self.write_xml(xml_response)
def get(self): '''Returns one or more posts on a single day matching the Arguments. If no date or url is given, most recent date will be used. Arguments: &tag={TAG}+{TAG}+...+{TAG} (optional) Filter by this tag. &dt={CCYY—MM—DDThh:mm:ssZ} (optional) Filter by this date, defaults to the most recent date on which bookmarks were saved. &url={URL} (optional) Fetch a bookmark for this URL, regardless of date. Note: Be sure to URL—encode the argument value. &hashes={MD5}+{MD5}+...+{MD5} (optional) Fetch multiple bookmarks by one or more URL MD5s regardless of date, separated by URL—encoded spaces (ie. ‘+’). &meta=yes (optional) Include change detection signatures on each item in a ‘meta’ attribute. Clients wishing to maintain a synchronized local store of bookmarks should retain the value of this attribute — its value will change when any significant field of the bookmark changes. ''' tags = self.request.get('tag') url = self.request.get('url') hashes = self.request.get('hashes') meta = self.request.get('meta') # TODO: implement filtering by date: self.request.get('dt') #normalize hashes = set(hashes.split()) if hashes else [] tags = tags.split() if tags else [] meta = meta == 'yes' if url: url = hashlib.md5(url).hexdigest() hashes.add(url) res = Post.get_by_ids(hashes) if tags: res.extend(Post.get_by_tags(tags)) context = { 'tag' : ' '.join(tags), } context.update(self.context) xml_response = results('posts', res, context) # TODO: cache for N minutes self.write_xml(xml_response)
def main(): if request.method=="POST": building_type=request.form["building_type"] print building_type zip_start=request.form["zip_start"] print zip_start zip_end=request.form["zip_end"] print zip_end unit=request.form["unit"] return render_template("results.html", lines=utils.results(), building_type= building_type, zip_start = zip_start, zip_end = zip_end, unit=unit)
def get(self): '''Returns the last update time for the user, as well as the number of new items in the user’s inbox since it was last visited. Use this before calling posts/all to see if the data has changed since the last fetch. ''' update = { 'code' : '200', 'inboxnew' : '0', # not implemented 'message' : 'success', 'time' : to_delicious_date_format(datetime.now()) } self.write_xml(results('update', [], update))
def get(self): '''Returns a list of tags and number of times used by a user. <tags> <tag count="1" tag="activedesktop" /> <tag count="1" tag="business" /> <tag count="3" tag="radio" /> <tag count="5" tag="xml" /> <tag count="1" tag="xp" /> <tag count="1" tag="xpi" /> </tags> ''' res = Post.counts_by_tag() xml_response = results('tags', res, {}) self.write_xml(xml_response)
def get(self): '''Returns a list of dates with the number of posts at each date. Arguments: &tag={TAG} (optional) Filter by this tag ''' tag = self.request.get('tag', default_value=None) dates = Post.counts_by_date(tag) context = { 'tag' : tag if tag is not None else '' } context.update(self.context) xml_response = results('dates', dates, context) # TODO : cache for N minutes self.write_xml(xml_response)
def get(self): '''Returns a list of popular tags, recommended tags and network tags for a user. This method is intended to provide suggestions for tagging a particular url. Arguments: &url={URL} (required) URL for which you'd like suggestions Ex: <suggest> <popular>yahoo!</popular> <popular>yahoo</popular> <popular>web</popular> <popular>tools</popular> <popular>searchengines</popular> <recommended>yahoo!</recommended> <recommended>yahoo</recommended> <recommended>web</recommended> <recommended>tools</recommended> <recommended>search</recommended> <recommended>reference</recommended> <recommended>portal</recommended> <recommended>news</recommended> <recommended>music</recommended> <recommended>internet</recommended> <recommended>home</recommended> <recommended>games</recommended> <recommended>entertainment</recommended> <recommended>email</recommended> <network>for:Bernard</network> <network>for:britta</network> <network>for:deusx</network> <network>for:joshua</network> <network>for:stlhood</network> <network>for:theteam</network> </suggest> ''' self.write_xml(results('suggest', [], {}))
def runScipyCon(): global fevals time, load = get_data(config['month'], config['year']) fevals = 0 def obj(gen): global fevals fevals += 1 return model_obj_only(gen) def tempCon(gen): return np.array(get_T(gen, time, load, config))/100 def rampCon(X): ''' Max ramp up or down does not exceed 2000 MW/hr''' dEdt = [] for i in range(len(X)-1): slope = abs(X[i+1] - X[i]) dEdt.append(slope) return np.array(dEdt)/1000 bounds = [(1e3, 1e5) for i in range(len(time))] Temp_Con = NonlinearConstraint(tempCon, lb=config['tes_min_t']/100, ub=config['tes_max_t']/100) Ramp_Con = NonlinearConstraint(rampCon, lb=0, ub=config['max_ramp_rate']/1000) # Try polish=False - if True then takes the best population and uses L-BFGS-B to finish opt = differential_evolution(obj, bounds=bounds, constraints={Temp_Con, Ramp_Con}, polish=False, disp=True) print(opt) fstar = opt.fun xstar = opt.x nfev = opt.nfev print(results(xstar, config)) print("fevals:", fevals) gen_report([xstar, nfev], "Scipy GA", "Constrained", config, notes="lb 1e3 ub 8e4, Scaled, "+opt.message, gen_plot=True)
def get(self): '''Add a post to Delicious. Arguments: &url={URL} (required) the url of the item. &description={...} (required) the description of the item. &extended={...} (optional) notes for the item. &tags={...} (optional) tags for the item (comma delimited). &dt={CCYY—MM—DDThh:mm:ssZ} (optional) datestamp of the item (format “CCYY—MM—DDThh:mm:ssZ”). Requires a LITERAL “T” and “Z” like in ISO8601 at http://www.cl.cam.ac.uk/~mgk25/iso—time.html for Example: "1984—09—01T14:21:31Z" &replace=no (optional) don’t replace post if given url has already been posted. &shared=no (optional) make the item private ''' url = self.request.get('url', default_value=None) description = self.request.get('description', default_value=None) if url is None or description is None: # Not returning an error code for compatibility with delicious API # self.error(500) self.write_xml(results('result', [], {'code' : 'something went wrong'})) return extended = self.request.get('extended') tags = self.request.get('tags') dt = self.request.get('dt') replace = self.request.get('replace') shared = self.request.get('shared') # cleaning dt = datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ') if dt else datetime.now() tags = tags.split() if tags else [] replace = replace == 'yes' # replace is true iff replace == 'yes' shared = shared == 'yes' post = Post( id=hashlib.md5(url).hexdigest(), href=url, description=description, extended=extended, tags=tags, time=dt, private=shared) post.put() self.write_xml(results('result', [], {'code' : 'done'}))
def runCustom(animate=False): time, load = get_data(config['month'], config['year']) # config['max_ramp_rate'] = 3000 def my_con_max_temp(gen): inequalities = model_con_max_T(gen, time, load, config) for i, con in enumerate(inequalities): if con >= 0: inequalities[i] = 0 return np.sum(inequalities) def my_con_min_temp(gen): inequalities = model_con_min_T(gen, time, load, config) for i, con in enumerate(inequalities): if con >= 0: inequalities[i] = 0 return np.sum(inequalities) def my_con_max_ramp(gen): inequalities = model_con_max_ramp(gen, config) for i, con in enumerate(inequalities): if con >= 0: inequalities[i] = 0 return np.sum(inequalities) # def my_con_max_ramp(X): # '''For custom GA. # Max ramp up or down does not exceed 2000 MW/hr''' # dEdt = [] # max_ramp = 2000 # for i in range(len(X)-1): # slope = abs(X[i+1] - X[i]) # if slope > max_ramp: # dEdt.append(slope) # else: # dEdt.append(0) # return np.sum(dEdt) populations = [] def callback(gen): populations.append(gen) guess = np.ones(len(time))*config['guess_coef'] bounds = [(1e3, 8e4) for i in range(len(time))] constraints = ({'fun':my_con_max_temp, 'type':'ineq', 'scale':100}, {'fun':my_con_min_temp, 'type':'ineq', 'scale':100}, {'fun':my_con_max_ramp, 'type':'ineq', 'scale':1000}) ga = GA(model_obj_only, bounds = bounds, maxiter=100, mscale=100, tol=1e-3, constraints=constraints, pmutate=0.5, callback=callback) sol = ga.optimize(verbose=True) xstar = sol[0] nfev = ga.fevals print(sol) print(results(sol[0], config)) gen_report([xstar,nfev], "Custom GA", "Constrained", config, notes="lb 1e3 ub 8e4", gen_plot=True, guess=guess) # save_iters(populations, "GA_iters3.csv") if animate: def update(i): fig.clear() plt.xlabel('Time') plt.ylabel("Generation") plt.plot(time, load) plt.plot(time, populations[i]) fig = plt.figure() plt.xlabel("Time") plt.ylabel("Generation") anim = animation.FuncAnimation(fig, update, frames=len(populations), interval = 500) plt.show()
import numpy as np from utils import gen_report, results from ScipyBaseModel import config xstar = guess = np.ones(24) * config['capacity'] * 0.95 fevals = 1200 out = [xstar, fevals] print(results(xstar, config)) gen_report(out, optimizer="Test", opt_type="Penalty", config=config, notes="testing")