def load_logged_in_user(): """If a user id is stored in the session, load the user object from the database into ``g.user``.""" user_id = session.get('user_id') if user_id is None: g.user = None else: get_cursor().execute('SELECT * FROM user_account WHERE id = %s', (user_id, )) g.user = get_cursor().fetchone()
def process(model, records): image_ref = dict() images = [] i = 0 for record in records: try: meme_id = record[0] image_url = record[5] resp = urllib.request.urlopen(image_url) img = np.asarray(bytearray(resp.read()), dtype="uint8") img = cv2.imdecode(img, cv2.IMREAD_COLOR) img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) img = cv2.resize(img, dsize=(300, 300)) images.append(img) image_ref[meme_id] = i i += 1 except Exception as ex: logging.exception("Failed to process ", str(meme_id)) images = np.array(images) images / 255 predict = classifier.model.predict(images) intermediate_score = intermediate_layer_model.predict(images) data = [] for k, v in image_ref: index = np.argmax(predict[v]) percentage = np.max(predict[v]) pre_softmax_score = intermediate_score[v][index] predicted_percentage = str(percentage) # PREDICTED_PERCENTAGE predicted_class = template[template['ID'] == index + 1]['TITLE'].to_string( index=False) # PREDICTED_CLASS pre_softmax_score = str(pre_softmax_score) class_mean_score = average_scores[index] if percentage > 0.98 and ( pre_softmax_score > average_scores[index] * 0.95) and ( pre_softmax_score < average_scores[index] * 1.05): prediction = template[template['ID'] == index + 1]['TITLE'].to_string(index=False) else: prediction = 'Unknown' data.append( (predicted_percentage, predicted_class, pre_softmax_score, class_mean_score, prediction, meme_id)) database.get_cursor().executemany(util.UPDATE_MEME_QUERY, data)
def login(): form = LoginForm() if form.validate_on_submit(): email = form.email.data password = form.password.data connection = get_connection() cursor = get_cursor() error = None cursor.execute('SELECT * FROM user_account WHERE email = %s', (email, )) user = cursor.fetchone() if user is None or not check_password_hash(user['password'], password): error = 'Incorrect email/password combination.' else: # store the user id in a new session and return to the index cursor.execute("SELECT update_last_login(%s);", (user['id'], )) connection.commit() session.clear() session['user_id'] = user['id'] flash("Logged in!", 'success') return redirect(url_for('index')) flash(error, 'error') return render_template('auth/login.html', form=form)
def get_default_address(self, type_string): # Returns default address of type type_string db = database.connect() cursor = database.get_cursor(db) cursor.execute("select id from address_types where address_type = :input_str", \ input_str = str(type_string)) type_id = cursor.fetchone() if type_id: type_id = type_id[0] else: raise ValueError("Type string given (%s) not valid type" % str(type_string)) cursor.execute("select id from addresses \ where default_flag = 1 and address_type_id = :input_type \ and person_id = :input_pid" , \ input_type = type_id, input_pid = self.get_id()) address_id = cursor.fetchone() database.disconnect(db) if address_id: address_reference = address.Address(address_id[0]) else: address_reference = None return address_reference
def run(self): self.log("Running daemon ...") self.store.start() while True: query = "SELECT id, url, is_archived FROM query_urls WHERE is_active = 1" conn = database.get_connection() cursor = database.get_cursor(conn) cursor.execute(query) active_ids = [] # this loop spawns new threads after reading from the database while True: row = cursor.fetchone() if not row: cursor.close() break self.log("Found url %s at %s" % (row['id'], row['url'])) active_ids.append(row['id']) if row['id'] not in self.tweet_threads: self.log("Spawning thread for row %s \n" % (row['id'])) self.log("Spawing thread for query_url_id %s " % (row['id'],)) self.tweet_threads[row['id']] = TweetThread(row['id'], row['url'], self.store) self.tweet_threads[row['id']].start() conn.close() for thread_id in self.tweet_threads.iterkeys(): if thread_id not in active_ids: self.log("Stopped thread %s" % (row['id'],)) self.tweet_threads[thread_id].stop() del(self.tweet_threads[thread_id]) if len(self.tweet_threads) == 0: break time.sleep(600)
def set_default_flag(self, state): if state != True and state != False: raise ValueError("Requires True or False") if state == True: flag = 1 elif state == False: flag = 0 db = database.connect() cursor = database.get_cursor(db) # Get id of own type cursor.execute("select id from address_types where address_type = :input_type", \ input_type = self.get_type()) type_id = cursor.fetchone()[0] if state == True and self.get_person(): # If address has a person remove other true flags so only one remains of this type cursor.execute("update addresses set default_flag = 0 \ where person_id = :input_id and default_flag = 1 \ and address_type_id = :input_tid" , \ input_id = self.get_person().get_id(), input_tid = type_id) database.commit(db) cursor.execute("update addresses set default_flag = :input_flag \ where id = :input_id" , \ input_flag = flag, input_id = self.get_id()) database.commit(db) database.disconnect(db)
def create_campaign(): form = CampaignCreationForm() connection = get_connection() cursor = get_cursor() if form.validate_on_submit(): try: with connection: with cursor: cursor.execute( """INSERT INTO campaign(name, description, image, amount_requested) VALUES (%s, %s, %s, %s) RETURNING id;""", (form.name.data, form.description.data, form.image.data, form.amount_requested.data)) id = cursor.fetchone()[0] cursor.execute( """INSERT INTO campaign_relation(user_account_id, campaign_id, user_role) VALUES (%s, %s, %s); """, (session['user_id'], id, 'owner')) return redirect(url_for("campaign.view_campaign", id=id)) except Exception as e: current_app.logger.error(e) return render_template("campaign/create.html", form=form)
def pre_orders(options): start_time = time.time() cursor = database.get_cursor() the_world = spy_world.Spy_world(cursor) team_dict = the_world.active_teams() # Stats first, it's used for some favour stuff #------------------------ for team_id, the_team in team_dict.items(): stat_f.build_team_stats(the_world.cursor, the_team, the_world) # Now to assign the stats team_q.mass_get_team_stats(the_world.cursor, team_dict, common.current_turn()) # Pre orders - Teams #------------------------ cursor.execute("BEGIN") print(database.shell_text("Team pre-orders"), end="") for team_id, the_team in team_dict.items(): try: team_f.pre_orders(the_world, the_team) except Exception as e: cursor.execute("ROLLBACK") print(database.shell_text(" - [r]Failure[/r]")) print("Failure running pre-orders for %s" % the_team.name) print(database.shell_text("[r]Re run as 'rob3 start -l True[/r]'")) raise print(database.shell_text(" - [g]Done[/g]")) # Pre orders - System #------------------------ print(database.shell_text("System pre-orders"), end="") try: # Army history army_f.location_history(the_world) # Player history player_f.turn_history(the_world) # Power history power_f.turn_history(the_world) # Artefact history artefact_f.turn_history(the_world) # Operatives catching operative_f.catch_operatives(the_world) # Border history team_f.border_history(the_world) except Exception as e: cursor.execute("ROLLBACK") print(database.shell_text(" - [r]Failure[/r]")) print(database.shell_text("[r]Re run as 'rob3 start -l True[/r]'")) raise cursor.execute("COMMIT") print(database.shell_text("\nNow run [g]rob orders[/g]"))
def modify_address(self, new_address_reference): # Ensuring address' ID is in addresses table AND it is a billing address db = database.connect() cursor = database.get_cursor(db) cursor.execute("select id from addresses \ where id = :input_id" , \ input_id = new_address_reference.get_id()) address_id = cursor.fetchone() if not address_id: raise ValueError("Address not found in addresses table") else: address_id = address_id[0] address_reference = address.Address(address_id) if not address_reference.get_type() == "billing": raise ValueError("Address must be billing type. Type is %s." % address_reference.get_type()) cursor.execute("update credit_cards set billing_addr_id = :input_addr \ where id = :input_id" ,\ input_addr = address_id, input_id = self.get_id()) database.commit(db) database.disconnect(db)
def output_json_map(options, the_world=None, skip_upload=False): from json_lib import mapper_j if the_world != None: cursor = the_world.cursor else: cursor = database.get_cursor() files = {} # create map #------------------------ the_map = mapper_j.JSON_map_maker() map_source = the_map.map_grid(cursor) # print(map_source) # exit() with open('%s/map/latest.json' % common.data['woa_folder'], 'w') as f: f.write(map_source) f.write(padding) with open('%s/map/turn_%d_normal.json' % (common.data['woa_folder'], common.current_turn()), 'w') as f: f.write(map_source) f.write(padding) # Save as files files['latest.json'] = '%s/map/latest.json' % common.data['woa_folder'] files['turn_%d_normal.json' % common.current_turn()] = '%s/map/turn_%d_normal.json' % (common.data['woa_folder'], common.current_turn()) if not skip_upload: upload("ftp.woarl.com", "*****@*****.**", ftp_pass['map'], files, options.delay, options.verbose) print(database.shell_text('[g]Json map uploaded[/g]'))
def get_bill_text_json(bill_id): """ Returns the JSON data for the full text of a single specific bill. Related React.js component: <BillText /> :param bill_id: bill_id that corresponds to the single bill requested :return: a jsonify'd dictionary with all the desired bill information """ # get the cursor cursor = get_cursor() query = ("SELECT text_en " "FROM bills_billtext " "WHERE id = (%s)") # If any parameters are used in the above query, insert them in the parameters tuple # Insert parameters in the order used in the query unless using %(name)s placeholders # see http://initd.org/psycopg/docs/usage.html#passing-parameters-to-sql-queries for more information parameters = (bill_id, ) # Execute query cursor.execute(query, parameters) # Fetch results bill_text_results = cursor.fetchone() # return a JSON response to React (includes header, no extra work needed) return jsonify(results=bill_text_results)
def output_json_data(options, the_world=None, skip_upload=False): from json_lib import data_j, oh_j if not the_world: cursor = database.get_cursor() else: cursor = the_world.cursor # Output the OH.js first output = oh_j.get_data(cursor) with open('%s/oh_data.js' % (common.data['cache_path']), 'w') as f: f.write(output) f.write(padding) # Write a second copy to the Rob3 localhost folder for Rob3 CGI to see with open('%s/oh_data.js' % (common.data['rob_fpath']), 'w') as f: f.write(output) f.write(padding) files = { 'oh_data.js': '%s/oh_data.js' % (common.data['cache_path']) } for d, func in data_j.handle_dict.items(): output = func(cursor) with open('%s/data_%s.json' % (common.data['cache_path'], d), 'w') as f: f.write(output) f.write(padding) files['%s.json' % (d)] = '%s/data_%s.json' % (common.data['cache_path'], d) if not skip_upload: upload("ftp.woarl.com", "*****@*****.**", ftp_pass['data'], files, options.delay, options.verbose) print(database.shell_text('[g]Data lists uploaded[/g]'))
def output_tmap(options, the_world=None, skip_upload=False): from pages.map import team_map if not the_world: cursor = database.get_cursor() the_world = world.World(cursor) else: cursor = the_world.cursor team_dict = team_q.get_real_active_teams(cursor) # if len(args) == 0: # team_list, team_dict = team_q.get_real_active_teams() # else: # team_list, team_dict = team_q.get_teams_in_list(team_list=args, by_id=False) files = {} # for team_id in team_dict.keys(): for team_id in progressbar(team_dict.keys(), "Creating TMaps: ", 60, True): the_team = the_world.teams()[team_id] md5_name = team_f.team_hash(the_team.name) html_source = team_map._draw_map(cursor, team_id, build=1) f = open('%s/tmap/%s.html' % (common.data['woa_folder'], md5_name), 'w') f.write(html_source) f.write(padding) f.close() files['%s.html' % (md5_name)] = '%s/tmap/%s.html' % (common.data['woa_folder'], md5_name) if not skip_upload: upload("ftp.woarl.com", "*****@*****.**", ftp_pass['tmap'], files, options.delay, options.verbose) print(database.shell_text('[g]Team maps uploaded[/g]'))
def output_wh(options, the_world=None, skip_upload=False): from classes import wh if not the_world: cursor = database.get_cursor() the_world = world.World(cursor) else: cursor = the_world.cursor team_dict = team_q.get_real_active_teams(cursor) the_wh = wh.Wh(the_world.cursor, the_world) the_wh.setup(true_team_list=team_dict.keys()) files = {} for team_id in progressbar(team_dict.keys(), "Creating WHs: ", 60, True): try: the_team = the_world.teams()[team_id] md5_name = team_f.team_hash(the_team.name) output = the_wh.make_wh(team_id) f = open('%s/wh/%s.html' % (common.data['woa_folder'], md5_name), 'w') f.write(output) f.write(padding) f.close() files['%s.html' % (md5_name)] = '%s/wh/%s.html' % (common.data['woa_folder'], md5_name) except Exception as e: print("Team name: %s" % the_team.name) raise if not skip_upload: upload("ftp.woarl.com", "*****@*****.**", ftp_pass['wh'], files, options.delay, options.verbose) print(database.shell_text('[g]War helpers uploaded[/g]'))
def modify_expiration_date(self, new_expiration_month, new_expiration_year): # Type- and value-checking inputs if not isinstance(new_expiration_month, int): raise ValueError("Expiration month must be integer value") if not isinstance(new_expiration_year, int): raise ValueError("Expiration year must be integer value") if new_expiration_month > 12 or new_expiration_month < 1: raise ValueError("Expiration month must be between 1 and 12") if not len(str(new_expiration_year)) == 4: raise ValueError("Expiration year must be 4 digit integer") # Formatting the date expiration_date = format_date(new_expiration_month, new_expiration_year) db = database.connect() cursor = database.get_cursor(db) cursor.execute("update credit_cards set expiration_date = :input_date \ where id = :input_id" ,\ input_date = expiration_date, input_id = self.get_id()) database.commit(db) database.disconnect(db)
def get_lore(options): from data_classes import lore_entry from lore import pages if len(sys.argv) < 3: print("Usage: $rob lore <cat> <page> <type:html/bbcode/plain>") exit() cursor = database.get_cursor() try: category = sys.argv[2] page = sys.argv[3] if len(sys.argv) > 4: formatting = sys.argv[4] else: formatting = "plaintext" if len(sys.argv) > 5: level = sys.argv[5].lower() else: level = "public" except Exception as e: raise if formatting == "html": print(pages.get_html(cursor, category, page, level)) elif formatting == "bbcode": print(pages.get_bbcode(cursor, category, page, level)) elif formatting == "plain" or formatting == "plaintext": print(pages.get_plaintext(cursor, category, page, level))
def remove_product(self, product): db = database.connect() cursor = database.get_cursor(db) product_id = product.get_id() cursor.execute("select quantity from warehouse_to_product \ where product_id = :input_pid and warehouse_id = :input_wid" , \ input_pid = product_id, input_wid = self.get_id()) current_quantity = cursor.fetchone() if current_quantity: # The item is already in this table in the DB, just decriment quantity decrimented_quantity = int(current_quantity[0]) - 1 if decrimented_quantity > 0: # Removing one will not remove all instances of that product cursor.execute("update warehouse_to_product set quantity = :input_quantity \ where product_id = :input_pid and warehouse_id = :input_wid" , \ input_quantity = decrimented_quantity, input_pid = product_id, \ input_wid = self.get_id()) database.commit(db) else: # Remove the line from the DB if product has quantity of zero cursor.execute("delete from warehouse_to_product \ where product_id = :input_pid and warehouse_id = :input_wid" , \ input_pid = product_id, input_wid = self.get_id()) database.commit(db) else: # The item is not yet in the warehouse's stock, so do nothing pass database.disconnect(db)
def new_order(customer): db = database.connect() cursor = database.get_cursor(db) returned_id = cursor.var(database.cx_Oracle.NUMBER) cursor.execute("select id from order_statuses where order_status = :input_status", \ input_status = "pending") status_id = cursor.fetchone()[0] if isinstance(customer, person.Person): cursor.execute("insert into orders (person_id, status_id) \ values (:input_pid, :input_sid) \ returning id into :output_id" , \ input_pid = customer.get_id(), input_sid = status_id, \ output_id = returned_id) database.commit(db) else: raise ValueError("Requires valid person instance") returned_id = int(returned_id.getvalue()) this_order = Order(returned_id) database.disconnect(db) ship_addr = customer.get_default_address("shipping") if ship_addr: this_order.modify_shipping_address(ship_addr) return this_order
def set_host_profile(host, new_profile): with get_cursor() as cur: cur.execute( f"INSERT INTO pp_host (name) VALUES (%s) ON CONFLICT DO NOTHING;", (host, )) cur.execute( f""" SELECT pp_profile_link.profile FROM pp_profile_link JOIN pp_host ON pp_profile_link.host = pp_host.id WHERE pp_host.name = %s ORDER BY pp_profile_link.id DESC LIMIT 1; """, (host, )) result = cur.fetchone() if result: old_profile = result[0] if new_profile == old_profile: return cur.execute( f""" INSERT INTO pp_profile_link (host, profile, date) SELECT pp_host.id, %s, now() FROM pp_host WHERE pp_host.name = %s; """, (new_profile, host)) conn.commit()
def modify_quantity(self, product, new_quantity): db = database.connect() cursor = database.get_cursor(db) if isinstance(new_quantity, int) and new_quantity >= 0: product_id = product.get_id() cursor.execute("select quantity from warehouse_to_product \ where product_id = :input_pid and warehouse_id = :input_wid" , \ input_pid = product_id, input_wid = self.get_id()) current_quantity = cursor.fetchone() if current_quantity: # Ensuring product is in warehouse stock current_quantity = int(current_quantity[0]) if current_quantity == new_quantity: # Do nothing if quantity doesn't change pass elif new_quantity == 0: # Remove line in DB is new quantity is zero cursor.execute("delete from warehouse_to_product \ where product_id = :input_pid and warehouse_id = :input_wid" , \ input_pid = product_id, input_wid = self.get_id()) database.commit(db) else: # Otherwise just update the quantity cursor.execute("update warehouse_to_product set quantity = :input_quantity \ where product_id = :input_pid and warehouse_id = :input_wid" , \ input_quantity = new_quantity, input_pid = product_id, \ input_wid = self.get_id()) database.commit(db) else: raise ValueError("new quantity must be positive integer value") database.disconnect(db)
def ops_catching(): from functions import operative_f the_world = spy_world.Spy_world(database.get_cursor()) the_world.cursor.execute("BEGIN") operative_f.catch_operatives(the_world, verbose=True) the_world.cursor.execute("ROLLBACK") exit("Exit()")
def output_to(options, the_world=None, skip_upload=False): from functions import to_f files = {} if not the_world: cursor = database.get_cursor() the_world = world.World(cursor) else: cursor = the_world.cursor the_world.prep_for_to() headers = to_f.headers(the_world) footers = to_f.footers(the_world) js = to_f.javascript(the_world) output = to_f.make_to(the_world) to_source = "".join([headers, js, output, footers]) # the_to = urllib.request.urlopen(common.data['rob_url'], 'mode=to&dev_mode=0&ajax=1') # the_to = urllib.request.urlopen(web_url, 'mode=to&dev_mode=0&ajax=1') # to_source = the_to.read() f = open('%s/to/index.html' % common.data['woa_folder'], 'w') f.write(to_source) f.write(padding) f.close() files['index.html'] = '%s/to/index.html' % common.data['woa_folder'] if not skip_upload: upload("ftp.woarl.com", "*****@*****.**", ftp_pass['to'], files, options.delay, options.verbose) print(database.shell_text('[g]Team Overview uploaded[/g]'))
def cache_all_sites(): c = get_cursor() c.execute('select distinct source as site from notice') sites = c.fetchall() file = site_file load_site_map() global siteMap threads = [] n = 0 while sites: site = sites.pop() site = site['site'] if site not in siteMap: t = Thread(target=get_site_name, args=(site, )) threads.append(t) t.start() else: siteMap[site] = siteMap[site].strip() if len(threads) == 5 or not sites: for t in threads: t.join(5) n += 1 threads = [] with open(file, 'w', encoding='utf8') as f: json.dump(siteMap, f, ensure_ascii=False) with open(file, 'w', encoding='utf8') as f: json.dump(siteMap, f, ensure_ascii=False)
def new_product(name, type_string, description=None, nutrition_facts=None, alcohol_content=None, size=1): # Adding new product to database, returning reference to new product instance db = database.connect() cursor = database.get_cursor(db) # Getting type_id from type_string, ensuring it's a valid type string (returns int ID) cursor.execute( "select id from product_types where product_type = :input_type", input_type=type_string) type_id = cursor.fetchone()[0] if type_id: if isinstance(type_id, int): # Adding new product to database (ONLY name and type_id) and retrieving generated product ID returned_id = cursor.var(database.cx_Oracle.NUMBER) cursor.execute("insert into products (name, product_type_id, product_size) values \ (:product_name, :ptype_id, :psize) returning id into :new_product_id" , product_name = name, \ ptype_id = type_id, new_product_id = returned_id, psize = size) database.commit(db) # Getting product reference returned_id = int(returned_id.getvalue()) new_product = Product(returned_id) # Now adding description, nutrition facts, and alcohol content (automatically generating defaults) new_product.modify_description(description) new_product.modify_nutrition_facts(nutrition_facts) new_product.modify_alcohol_content(alcohol_content) return new_product
def add_workout_route(): """ Add a workout """ cur = db.get_cursor() data = request.get_json() user_id = data['user_id'] date = data['date'] short_name = data['short_name'] weight = data['weight'] reps = data['reps'] sets = data['sets'] notes = data['notes'] print data print user_id # Make sure thelift exists in the database... really can't happen cur.execute("SELECT COUNT(*) AS count FROM lifts WHERE short_name = %s", [short_name]) if cur.fetchone()['count'] == 0: abort(425) cur.execute(""" INSERT INTO workouts (user_id, workout_date, short_name, weight, reps, sets, notes) VALUES (%s, %s, %s, %s, %s, %s, %s); """, [user_id, date, short_name, weight, reps, sets, notes]) return "Added the workout!"
def remove(self): db = database.connect() cursor = database.get_cursor(db) cursor.execute("delete from products where id = :input_id", input_id=self.get_id()) database.commit(db) database.disconnect(db)
def get_sessions_json(): """ Returns all sessions of parliament with their name and id in JSON. Related React.js component: <BillSearch /> :return: a jsonify'd dictionary with all the desired profile information """ # Get the cursor cursor = get_cursor() # Select the name and id of all existing sessions query = ( "SELECT id, name " "FROM core_session " "ORDER BY id DESC " ) # Execute query cursor.execute(query) # Fetch results session_results = cursor.fetchall() # return a JSON response to React (includes header, no extra work needed) return jsonify(results=session_results)
def download(self): status = self.get_status() if status in (STATUS_DOWNLOADED, STATUS_IGNORED): return print('downloading...', self.url) try: resp = requests.get(self.url, timeout=10) except Exception as e: if type(e) is requests.exceptions.Timeout: print('Timeout occurred...', self.url) else: print("Error occurred when crawling", self.url, e) return if resp.status_code != 200: print("Error occurred when crawling", self.url, resp.status_code) if resp.status_code in (404, 403, 400): self.update_status(STATUS_IGNORED) return url = resp.url text = get_content(resp) if not text: return # TODO c = get_cursor() now = datetime.datetime.now() with c.connection as conn: conn.execute( 'update notice set url=?,status=?,updated_at=? where id=?', (url, STATUS_DOWNLOADED, now, self._id)) conn.execute( 'insert into notice_detail (notice_id, raw_html) VALUES (?,?)', (self._id, text))
def download_new_notices(): print('Downloading Notices...') c = get_cursor() c.execute( 'select url,title,source from notice where status=? order by id desc', (STATUS_INIT, )) rows = c.fetchall() from threading import Thread def download(row): import time, random time.sleep(random.randint(0, 3)) NoticeDownloader(row['url'], row['title'], row['source']).download() threads = [] n = 0 while rows: row = rows.pop() t = Thread(target=download, args=(row, )) threads.append(t) t.start() if len(threads) == 5: for t in threads: t.join(5) n += 1 threads = [] print('Downloading Finished!', n, ' new notices downloaded...')
def wrapped_handler(*args): environ, start_response = args[-2:] req = webob.Request(environ) cursor = get_cursor() extra = (cursor, req) try: isAjax = re.search("application/json", req.environ["HTTP_ACCEPT"]) >= 0 except KeyError: isAjax = False try: fnArgs = args[:-2] + extra resp = fn(*fnArgs, **req.urlvars) # commit on sucess #cursor.connection.commit() if isinstance(resp, tuple): template, context = resp resp = webob.Response( content_type="text/html", expires=time.time() + 5, body=LOADER.load(template).generate( **context).render("xhtml")) elif isinstance(resp, basestring): resp = webob.Response(body=resp) except webob.exc.HTTPException, e: resp = e
def new_warehouse(capacity, street, city, state_string, zip_code, apartment_no=None): # Adding new warehouse to database, and returning reference to that warehouse db = database.connect() cursor = database.get_cursor(db) warehouse_address = address.Address.new_address(street, city, state_string, zip_code,\ "warehouse", apt_no = apartment_no) address_id = warehouse_address.get_id() if isinstance(capacity, (int, float)): returned_id = cursor.var(database.cx_Oracle.NUMBER) capacity = int(capacity) cursor.execute("insert into warehouses \ (capacity, address_id) values (:input_capacity, :input_address) \ returning id into :new_warehouse_id" , \ input_capacity = capacity, input_address = address_id, \ new_warehouse_id = returned_id) database.commit(db) returned_id = int(returned_id.getvalue()) database.disconnect(db) return Warehouse(returned_id)
def trade_real(options): from functions import city_f, trade_f, sad_f from classes import world from rules import sad_rules import sys w = world.World(database.get_cursor()) city_f.apply_city_matrix(w) res = sad_f.supply_and_demand(w) # trade_f.print_reports(w, res, ['res_summary', 'res_surplus', 'res_demand', 'res_producers', 'production', 'demand', 'wealth']) # sad_f.print_reports(w, res, ['res_summary', 'demand', 'wealth']) total_sat = 0 big_sat = 0 big_count = 0 wealth = 0 for k, v in w.live_cities().items(): total_sat += v.satisfaction() wealth += v.wealth if v.size > 15000: big_sat += v.satisfaction() big_count += 1 print("Avg wealth: %s" % (wealth/len(w.live_cities()))) print("Average: %s" % (total_sat/len(w.live_cities()))) print("Big average: %s" % (big_sat/big_count))
def get_recent_workouts_route(): """ Gets all of a users workouts """ cur = db.get_cursor() data = {} data['best_lift'] = 200 data['recent_lift'] = 100 return jsonify(data)
def request_reset(): form = RequestResetForm() connection = get_connection() cursor = get_cursor() if form.validate_on_submit(): with connection: with cursor: cursor.execute("SELECT * FROM user_account WHERE email = %s", (form.email.data, )) row = cursor.fetchone() if row['email'] is not None: date_hash = hashlib.md5(str( row['last_login']).encode()).hexdigest() import smtplib from email.mime.text import MIMEText message = MIMEText("""You've requested a password change! Head over to %s""" % \ (request.host + url_for("auth.reset_verify", uid=row['id'], date_hash=date_hash))) message['Subject'] = "Password reset request" message['From'] = "*****@*****.**" message['To'] = row['email'] smtp = smtplib.SMTP(host="127.0.0.1", port=8080) smtp.sendmail("*****@*****.**", row['email'], message.as_string()) current_app.logger.info(message) return render_template("auth/request_reset.html", hash=date_hash) return render_template("auth/request_reset.html", form=form)
def get_initial_json(): """ Returns the basic politician information (id, name, last name, headshot, party slug, party name, currently active) in JSON format. Related React.js component: <SearchBox /> :return: a jsonify'd dictionary of all the basic elements needed for the initial search/render """ # Get the cursor cursor = get_cursor() # Create and execute the query for the initial data needed to load the React app # We're not picking anything specific for this, so no parameters needed, just execute it directly cursor.execute( "SELECT p.id, p.name, p.name_family, p.headshot, c.slug, c.short_name, e.end_date, r.name AS riding_name " "FROM core_politician p, core_party c, core_electedmember e, core_riding r " # table_name abbreviation, ... "WHERE p.gender NOT LIKE '' " # appears to help filter for "real" MPs? "AND p.headshot NOT LIKE '' " # appears to help filter for "real" MPs? "AND p.id = e.politician_id " # joins politician info with elected member rows (multiple if re-elected) "AND e.party_id = c.id " # joins elected member rows with associated party rows "AND r.id = e.riding_id " "AND e.start_date = " # we only want their most recent party data, so let's pick that one "( " # we want (e.start_date = most recent e.start_date) " SELECT start_date " " FROM core_electedmember " # get all electedmember records associated with each politicians " WHERE politician_id = p.id " # yep, we can still use p.id in here! " ORDER BY start_date " # sort them by start_date " DESC LIMIT 1 " # limiting to one only selects the most recent date ") " "AND (e.end_date > '2006-01-01' OR e.end_date IS NULL) " "ORDER BY p.name_family" # order by last name ) # Fetch results to a dictionary labelled "raw" as there are some evaluations that need to happen raw_pol_results = cursor.fetchall() # Create a list to store our final result pol_results = [] # Iterate through each result for row in raw_pol_results: # Add desired data to results in dictionary form pol_results.append({ # 'JSON-KEY' : row['DATABASE-KEY'], JSON-KEY is used in React only 'id': row['id'], 'name': row['name'], 'name_family': row['name_family'], 'imgurl': row['headshot'].split('/') [-1], # extracts filename only, no directory data 'party_slug': row['slug'], 'party_name': row['short_name'], 'active': False if row['end_date'] else True, 'riding': row['riding_name'] }) # boolean: if an end_date exists, they are no longer active # Return JSON as well just in case we want to use it with AJAX calls return jsonify(results=pol_results)
def get_capacity(self): db = database.connect() cursor = database.get_cursor(db) cursor.execute("select capacity from warehouses where id = :warehouse_id", \ warehouse_id = self.get_id()) cap = cursor.fetchone()[0] database.disconnect(db) return cap
def modify_name(self, new_name): db = database.connect() cursor = database.get_cursor(db) if new_name: cursor.execute("update products set name = :name_string where id = :product_id", name_string = new_name, \ product_id = self.get_id()) database.commit(db) database.disconnect(db)
def get_name(self): db = database.connect() cursor = database.get_cursor(db) cursor.execute("select name from products where id=:product_id", product_id=self._id) name = cursor.fetchone()[0] database.disconnect(db) return name
def archive(item, id = None): print "Starting to archive ..." conn = database.get_connection() cursor = database.get_cursor(conn) cursor.execute("SELECT url FROM query_urls WHERE id = %s", (item,)) result = cursor.fetchone() cursor.close() url = result['url'] if id is None: cursor = database.get_cursor() cursor.execute("SELECT min(twitter_id) AS twitter_id FROM tweets WHERE query_url_id = %s", (item, )) result = cursor.fetchone() if result is not None: id = result['twitter_id'] conn.close() archiver = Archiver(url, item, id) archiver.run()
def get_host_profile(host): with get_cursor() as cur: cur.execute( f""" SELECT profile FROM pp_profile_link WHERE host = (SELECT id FROM pp_host WHERE name = %s) ORDER BY id DESC LIMIT 1; """, (host, )) return cur.fetchone()[0]
def build_voi_battle(options): options.delay = 0 options.verbose = True from functions import voi_f battle = " ".join(sys.argv[2:len(sys.argv)]) files = voi_f.build_battle(world.World(database.get_cursor()), battle=battle) cli_f.upload_voi(options, files)
def get_product_types(): db = database.connect() cursor = database.get_cursor(db) types = [] cursor.execute("select product_type from product_types") type_tuples = cursor.fetchall() for type_tuple in type_tuples: types.append(type_tuple[0]) return types
def new_person(username, password, first_name, last_name, type_string, middle_initial = None, \ salary = None, job_title = None): db = database.connect() cursor = database.get_cursor(db) returned_id = cursor.var(database.cx_Oracle.NUMBER) # getting person type ID from type_string cursor.execute("select id from person_types where person_type = :input_type", \ input_type = type_string) type_id = cursor.fetchone() if type_id: type_id = type_id[0] else: raise ValueError("Type given (%s) not valid person type" % str(type_string)) hashed_password = hash_password(username, password) if middle_initial: # Trimming middle initial middle_initial = middle_initial[0] cursor.execute("insert into persons \ (username, password, first_name, middle_initial, last_name, \ person_type_id, balance) \ values (:input_username, :input_password, \ :input_first, :input_middle, :input_last, :input_type_id, 0) \ returning id into :output_id" , \ input_username = username, input_password = hashed_password, \ input_first = first_name, input_middle = middle_initial, \ input_last = last_name, input_type_id = type_id, \ output_id = returned_id) database.commit(db) else: cursor.execute("insert into persons \ (username, password, first_name, last_name, \ person_type_id, balance) \ values (:input_username, :input_password, \ :input_first, :input_last, :input_type_id, 0) \ returning id into :output_id" , \ input_username = username, input_password = hashed_password, \ input_first = first_name, \ input_last = last_name, input_type_id = type_id, \ output_id = returned_id) database.commit(db) database.disconnect(db) returned_id = returned_id.getvalue() reference = Person(returned_id) if salary: reference.modify_salary(salary) if job_title: reference.modify_job_title(job_title) return reference
def build_voi_campaign(options): options.delay = 0 options.verbose = True from functions import voi_f campaign = " ".join(sys.argv[2:len(sys.argv)]) files = voi_f.build_campaign(world.World(database.get_cursor()), campaign=campaign) cli_f.upload_voi(options, files)
def new_cities(options): from queries import city_q city_dict = city_q.get_new_cities(database.get_cursor()) for city_id, the_city in city_dict.items(): cmd = "open http://localhost/rob3/web.py?mode=view_map\\&{map_link}".format( map_link = the_city.map_link_args().replace('&', '\\&'), ) os.system(cmd)
def get_initial_json(): """ Returns the basic politician information (id, name, last name, headshot, party slug, party name, currently active) in JSON format. Related React.js component: <SearchBox /> :return: a jsonify'd dictionary of all the basic elements needed for the initial search/render """ # Get the cursor cursor = get_cursor() # Create and execute the query for the initial data needed to load the React app # We're not picking anything specific for this, so no parameters needed, just execute it directly cursor.execute( "SELECT p.id, p.name, p.name_family, p.headshot, c.slug, c.short_name, e.end_date, r.name AS riding_name " "FROM core_politician p, core_party c, core_electedmember e, core_riding r " # table_name abbreviation, ... "WHERE p.gender NOT LIKE '' " # appears to help filter for "real" MPs? "AND p.headshot NOT LIKE '' " # appears to help filter for "real" MPs? "AND p.id = e.politician_id " # joins politician info with elected member rows (multiple if re-elected) "AND e.party_id = c.id " # joins elected member rows with associated party rows "AND r.id = e.riding_id " "AND e.start_date = " # we only want their most recent party data, so let's pick that one "( " # we want (e.start_date = most recent e.start_date) " SELECT start_date " " FROM core_electedmember " # get all electedmember records associated with each politicians " WHERE politician_id = p.id " # yep, we can still use p.id in here! " ORDER BY start_date " # sort them by start_date " DESC LIMIT 1 " # limiting to one only selects the most recent date ") " "AND (e.end_date > '2006-01-01' OR e.end_date IS NULL) " "ORDER BY p.name_family" # order by last name ) # Fetch results to a dictionary labelled "raw" as there are some evaluations that need to happen raw_pol_results = cursor.fetchall() # Create a list to store our final result pol_results = [] # Iterate through each result for row in raw_pol_results: # Add desired data to results in dictionary form pol_results.append({ # 'JSON-KEY' : row['DATABASE-KEY'], JSON-KEY is used in React only 'id': row['id'], 'name': row['name'], 'name_family': row['name_family'], 'imgurl': row['headshot'].split('/')[-1], # extracts filename only, no directory data 'party_slug': row['slug'], 'party_name': row['short_name'], 'active': False if row['end_date'] else True, 'riding' : row['riding_name']}) # boolean: if an end_date exists, they are no longer active # Return JSON as well just in case we want to use it with AJAX calls return jsonify(results=pol_results)
def get_parent(self, cursor): cursor = database.get_cursor() query = """SELECT * FROM orders WHERE post_id = %d""" % self.id try: cursor.execute(query) except Exception as e: raise Exception("Database error: %s\nQuery: %s" % (str(e.args[0]).replace("\n", ""), query)) row = cursor.fetchone() self.parent = order_post.Order_post(the_world=None, row=row)
def get_lift_variants(short_name): """ """ cur = db.get_cursor() cur.execute(""" SELECT name, variant_name FROM lifts, lift_variants WHERE lifts.short_name = lift_variants.short_name AND lifts.short_name = %s""", [short_name]) lift_names = cur.fetchall() return Response(json.dumps(lift_names), mimetype='application/json')
def get_risk_for_condition(condition, age, sex): cur = get_cursor() cur.execute( """ SELECT IF(MIN(rate) IS NULL, 0, rate) AS rate FROM conditions WHERE year = 2012 AND age = %s AND sex = %s AND `condition` = %s""", (age, sex, condition)) data = cur.fetchone() return data[0]
def main(verbose=False): cursor = database.get_cursor() output = [] output.append(players_reports.run(cursor, verbose)) output.append(city_reports.run(cursor, verbose)) if output != []: print(database.shell_text("\n".join(output))) else: print(database.shell_text("[g]All reports green[/g]\n"))
def current_turn(force_requery=False): if not force_requery: if data['current_turn'] != None: return data['current_turn'] cur = database.get_cursor(True) query = "SELECT turn FROM turns ORDER BY turn DESC LIMIT 1 OFFSET 1;" try: cur.execute(query) except Exception as e: raise Exception("Database error: %s\nQuery: %s" % (str(e.args[0]).replace("\n",""), query)) for row in cur: data['current_turn'] = row['turn'] return row['turn']
def test_calculate_favour(self): return self.test_targets.append(deity_rules.calculate_favour) # Basically to make sure it runs under live data w = world.World(database.get_cursor()) for t, the_team in w.teams().items(): the_team.get_deities(w.cursor) for deity_id, the_deity in w.deities().items(): deity_rules.calculate_favour(w, the_team, the_deity.name) deity_rules.calculate_favour(w, the_team, deity_id)
def promer_score(clustera, clusterb): from database import get_cursor ida = clustera._id idb = clusterb._id if ida==idb: return 1 if not ida or not idb: print "raising valuerror" raise ValueError("scoring unreal clusters is impossible") # enforce ida<idb ida,idb = min(ida,idb),max(ida,idb) with get_cursor() as cur: cur.execute("select score from promer where l= %s and r =%s;",(ida,idb)) result = cur.fetchone() if result: return result[0] with get_cursor() as cur: score = _call_promer(clustera.fna(),clusterb.fna()) try: cur.execute("insert into promer (score,l,r) values(%s,%s,%s);",(score,ida,idb)) except IntegrityError: print "possible threading problem caught. Do clusters %s and %s have a score?" %(ida,idb) return score
def find_requests(options): from queries import team_q from functions import request_f cursor = database.get_cursor() team_dict = team_q.get_real_active_teams(cursor) topic_list = [] lookup = {} for k, v in team_dict.items(): if v.request_topic > 0: topic_list.append(str(v.request_topic)) lookup[str(v.request_topic)] = v.id topic_list = ",".join(topic_list) # print(topic_list) getter_data = "p=%s&mode=find_request_topics&topic_list=%s" % (common.data['getterPass'], topic_list) topics_to_read = urllib.request.urlopen(common.data['getter_url'], getter_data).read().strip().decode('utf-8') topic_list = topics_to_read.split("\n") if len(topic_list) < 1: if options.verbose: print(database.shell_text("No requests found")) return teams_to_read = [] for t in topic_list: if t == '': continue teams_to_read.append(lookup[t]) for t in teams_to_read: print(request_f.msn_run_orders(database.get_cursor(), t)) time.sleep(options.delay) if options.verbose: print(database.shell_text("[g]Ran %d requests[/g]" % len(teams_to_read)))
def build_map(cursor=None): if cursor == None: cursor = database.get_cursor() # Drop the map_continent_tiles table query = "DROP TABLE map_continent_tiles" try: cursor.execute(query) except Exception as e: pass # Map preprocessor sync_f.fill_table(cursor, mapper.Map_continent().table_info, continent_list) sync_f.check_table(cursor, mapper.Map_continent_tiles, fix=True, show_fixes=False) path_preprocessor_f.run(cursor)
def main(cursor=None, check_all=False, verbose=True): if not cursor: cursor = database.get_cursor() if verbose: print(database.shell_text("''Starting system checks''")) cities_check.run(cursor, check_all, verbose) military_check.run(cursor, check_all, verbose) players_check.run(cursor, check_all, verbose) ops_check.run(cursor, check_all, verbose) teams_check.run(cursor, check_all, verbose) if verbose: print(database.shell_text("''Checks complete''"))
def city_wealth(cursor, verbose): queries = [] w = world.World(database.get_cursor()) city_f.apply_city_matrix(w, compress=False) if verbose: it = cli_f.progressbar(w.live_cities().items(), "cities_check.city_wealth: ", 40, with_eta = True) else: it = w.live_cities().items() for k, c in it: c.wealth = city_rules.wealth_rate(w, c) queries.append("UPDATE cities SET wealth = %d WHERE id = %d;" % (c.wealth, k)) database.query(cursor, *queries)