def expand(self): self.fetch_data() # Get rating # cursor.execute("SELECT AVG(rating) AS average FROM " + tbl_image_rating + " WHERE image = %s GROUP BY image", [imageId]) # rating = cursor.fetchone() # if rating != None: # image['rating'] = rating # get author name if self.author: self.author_name = db.fetch("SELECT name FROM " + db.tbl_author + " WHERE id=%s", [self.author], one=True, as_list=True) # get labels labels = db.fetch( "SELECT id, name FROM " + db.tbl_label + " INNER JOIN " + db.tbl_image_label + " ON (label = id AND image = %s)", [self.id]) if labels: self.labels = labels # get meta data self.open() exif = {} iptc = {} exif_info = self.image._getexif() or {} for tag, value in exif_info.items(): decoded = ExifTags.TAGS.get(tag, str(tag)) exif[decoded] = value iptc_info = IptcImagePlugin.getiptcinfo(self.image) or {} for tag, value in iptc_info.items(): decoded = iptc_tags.get(tag, str(tag)) iptc[decoded] = value self.exif = exif self.iptc = iptc
def set_labels(self, label_names): assert hasattr(self, 'id'), "id must be set before fetching data" # select ids for each label labels = [] for label_name in label_names: label = db.fetch("SELECT id FROM " + db.tbl_label + " WHERE name=%s", [label_name], one=True, as_list=True) if label is None: label = db.execute("INSERT INTO " + db.tbl_label + "(name) VALUES(%s) RETURNING id", [label_name]) db.commit() labels.append(label) # get current labels current_labels = db.fetch("SELECT label FROM " + db.tbl_image_label + " WHERE image=%s", [self.id], as_list=True) # update database to_be_added = diff(labels, current_labels) to_be_deleted = diff(current_labels, labels) for label in to_be_added: db.execute("INSERT INTO " + db.tbl_image_label + "(image, label) VALUES(%s,%s)", [self.id, label]) for label in to_be_deleted: db.execute("DELETE FROM " + db.tbl_image_label + " WHERE image=%s AND label=%s", [self.id, label]) # if label is not used anymore, delete it permanently count = db.fetch("SELECT COUNT(image) FROM label_image WHERE label=%s", [label], one=True, as_list=True) if not count: db.execute("DELETE FROM " + db.tbl_label + " WHERE id=%s", [label]) db.commit() return labels
def index(): bundles = db.fetch("SELECT bundle AS path, COUNT(id) AS count FROM " + db.tbl_image + " GROUP BY bundle ORDER BY path") current_labels = to_list(request.args.get('labels', None)) current_not_labels = to_list(request.args.get('notlabels', None)) related_labels = get_related_labels(current_labels, current_not_labels) labels = db.fetch("SELECT id, name, COUNT(image) AS count FROM " + db.tbl_label + " INNER JOIN " + db.tbl_image_label + " ON (label = id) GROUP BY id ORDER BY name ASC") return render_template('index.html', config=config, bundles=bundles, labels=labels, current_labels=current_labels, current_not_labels=current_not_labels, related_labels=related_labels)
def fetch_data(self): # do not fetch twice if hasattr(self, 'ctime'): return if hasattr(self, 'id'): # initialized by id data = db.fetch("SELECT * FROM " + db.tbl_image + " WHERE id=%s", [self.id], one=True) else: # initialized by path bundle = os.path.dirname(self.path) name = os.path.basename(self.path) bundle = bundle.replace(config.ROOT_DIR, '') data = db.fetch("SELECT * FROM " + db.tbl_image + " WHERE bundle=%s AND name=%s", [bundle, name], one=True) for k, v in data.items(): setattr(self, k, v)
def sync_bundle(path, bundle, should_update_metadata=False): current_app.logger.debug("Path: %s Bundle: %s" % (path, bundle)) # List files in directory files = [f for f in os.listdir(path) if f.lower().endswith('.jpg')] files.sort() items = {f: 2 for f in files} # Look what is already present in database images = db.fetch("SELECT id, name FROM " + db.tbl_image + " WHERE bundle=%s", [bundle]) ids = {} for image in images: if image['name'] not in items: items[image['name']] = 0 if items[image['name']] == 3: # Remove duplicates im = GalleriaImage.fromid(image['id']) im.delete(keep_file=True) continue items[image['name']] += 1 ids[image['name']] = image['id'] # Analyze what was found for name in sorted(items): # Image is in the directory but not in database if items[name] == 2: image = GalleriaImage.create(bundle, name) # lgtm [py/multiple-definition] # Image is in the database but not in the directory elif items[name] == 1: image = GalleriaImage.fromid(ids[name]) image.delete() # Image is in sync, update metadata if requested elif should_update_metadata: image = GalleriaImage.fromid(ids[name]) image.update_metadata()
def get_related_labels(labels, not_labels, unlimited=True): current_app.logger.debug(labels) if not labels and not not_labels: return [] limit = '' if unlimited else ' LIMIT 20' sql_in = ','.join(labels + not_labels) having = [] if labels: having.extend(map(lambda x: 'SUM(CASE WHEN label=%s THEN 1 ELSE 0 END) > 0' % x, labels)) if not_labels: having.extend(map(lambda x: 'SUM(CASE WHEN label=%s THEN 1 ELSE 0 END) = 0' % x, not_labels)) having = ' AND '.join(having) db.execute("CREATE TEMPORARY TABLE matches (image INTEGER NOT NULL PRIMARY KEY)") db.execute("INSERT INTO matches SELECT image FROM " + db.tbl_image_label + " GROUP BY image HAVING " + having) related = db.fetch("SELECT id, name, COUNT(A.image) AS count FROM matches INNER JOIN " + db.tbl_image_label + " A ON matches.image=A.image INNER JOIN " + db.tbl_label + " ON (A.label = id) WHERE A.label NOT IN (" + sql_in + ") GROUP BY id HAVING COUNT(A.image) > 0 ORDER BY count DESC, name ASC" + limit) db.execute("DROP TABLE matches") db.commit() return related
def main(): try: common.reset_timestamp() # p = period.g_period period.describe() cache = db.fetch() # print 1 /0 timesheets.create_timesheets(cache) excel.encache(cache) # db.save_state(cache) excel.verify(cache) expenses.create_expense_report(cache) statements.create_statements(cache) all_invoices = invoices.enumerate_invoices(cache) invoices.create_text_invoice_summary(all_invoices) the_tweaks = cache["InvTweaks"] # tweaks.load(cache) accumulated_tweaks = tweaks.accum_tweaks_to_job_level(the_tweaks) # db.save_state(cache) post.post_main(cache, accumulated_tweaks) recoveries.create_recovery_report(cache, the_tweaks) wip.create_wip_report(cache, True) # self.cbox_text_wip.IsChecked()) health.create_health_report(cache) html.create_html() finally: # print "pydra.main().finally" if registry.get_defaulted_binary_reg_key("autopickle", False): # print "saving cache" db.save_state(cache) return cache # useful if we want to pickle it
def get_db_ad_info(): # <dict> ad_id:tuple(ad_id, advertiser_id, position, path, sex, price) ad_info = db.fetch("ad_info") dict_ad_info = {} for ad in ad_info: if dict_ad_info.get(ad[0], "no") == "no": dict_ad_info[ad[0]] = ad else: pass return dict_ad_info
def undo(track, callback): """Replaces the track with the stored version in the database""" callback("Looking up in database") try: orig = db.fetch(track) except KeyError: callback("Not Found") return callback("Found, saving") track.replace(orig) track.save()
def get_disabled_cities(start_string_of_city): """ This is used for auto complete drop down in admin page. All the city names that start with start_string_of_city will be returned. """ query = """select city from disabled_cities where city like %s LIMIT 10;""" search_term = start_string_of_city like_pattern = '{}%'.format(start_string_of_city) cities = db.fetch(query, (like_pattern, )) return cities
def handle_cmd(cmd, channel): global slack_client print("Handling Command") default_msg = "Response is Not Valid.Please enter a valid response.Enter*{}*.".format( response_text) response = None if cmd.lower() == "yes": print("User Responded Yes") response = "OK.I will turn on the Guest mode for them." db.fetch("Guest", None) elif cmd.lower() == "no": print("User Responded No") response = "OK.I will inform the Neighbors." Notify.notify_neighbor("Bhuvaneshwaran") slack_client.api_call("chat.postMessage", channel=botchannel, text=response or default_msg) print("Handling Complete")
def load(uid: int): execute('SELECT * FROM twelfth_table WHERE student_id = %s', (uid, )) data = fetch() if len(data) == 0: return None return Twelfth( data[0]['student_id'], data[0]['twelfth_school_name'], data[0]['twelfth_cgpa'], data[0]['twelfth_board'], data[0]['twelfth_year'], )
def check_bundle(bundle): """ Checks if bundle is mentioned in database and deletes curresponding images :param bundle: bundle path """ images = db.fetch("SELECT id FROM " + db.tbl_image + " WHERE bundle=%s", [bundle]) if not images: return for image in images: im = GalleriaImage.fromid(image['id']) im.delete()
def get_all_cities(start_string_of_city): """ This is used for auto complete drop down in home page. All the city names that start with start_string_of_city will be returned. """ query = """select DISTINCT(city) from airport_codes where city like %s LIMIT 10;""" search_term = start_string_of_city like_pattern = '{}%'.format(start_string_of_city) cities = db.fetch(query, (like_pattern, )) return cities
def stats_db(filename): """Returns database information as [(label, value), ].""" result = [("Database", filename), ("Created", datetime.datetime.fromtimestamp(os.path.getctime(filename))), ("Last modified", datetime.datetime.fromtimestamp(os.path.getmtime(filename))), ("Size", format_bytes(os.path.getsize(filename))), ] counts = db.fetch("counts", "type, SUM(count) AS count", group="type") cmap = dict((x["type"], x["count"]) for x in counts) for name, tables in conf.InputTables: countstr = "{:,}".format(sum(cmap.get(t) or 0 for t in tables)) result += [("%s events" % name.capitalize(), countstr)] return result
def load(uid: int): execute('SELECT * FROM sgpa_table WHERE student_id = %s', (uid, )) data = fetch() if len(data) == 0: return Sgpa(uid, []) entry_list = [] for entry in data: entry_dict = { 'semester': entry['sgpa_semester'], 'value': entry['sgpa_value'], } entry_list.append(entry_dict) return Sgpa(uid, entry_list)
def load(uid: int): execute('SELECT * FROM project_table WHERE student_id = %s', (uid,)) data = fetch() if len(data) == 0: return Project(uid, []) entry_list = [] for entry in data: execute('SELECT * FROM project_professor_table WHERE student_id = %s AND project_title = %s', (uid, entry['project_title'])) prof_data = fetch() prof_set = set() for prof in prof_data: prof_set.add(prof['professor_email']) entry_dict = { 'start_date': entry['project_start_date'], 'end_date': entry['project_end_date'], 'title': entry['project_title'], 'description': entry['project_description'], 'professor_list': prof_set } entry_list.append(entry_dict) return Project(uid, entry_list)
def keyboard(table, period=None): """Handler for showing the keyboard statistics page.""" days, input = db.fetch("counts", order="day", type=table), "keyboard" if period and not any(v["day"][:len(period)] == period for v in days): return bottle.redirect(request.app.get_url("/<input>", input=input)) count = sum(v["count"] for v in days if not period or v["day"][:len(period)] == period) tabledays = set(x["type"] for x in db.fetch("counts", day=("LIKE", period + "%"))) if period else {} where = (("day", period), ) if period else () if period and len(period) < 8: # Month period, query by known month days mydays = [v["day"] for v in days if v["day"][:7] == period] where = (("day", ("IN", mydays)), ) cols, group = "realkey AS key, COUNT(*) AS count", "realkey" counts_display = counts = db.fetch(table, cols, where, group, "count DESC") if "combos" == table: counts_display = db.fetch(table, "key, COUNT(*) AS count", where, "key", "count DESC") events = db.select(table, where=where, order="stamp", limit=conf.MaxEventsForStats) stats, events = stats_keyboard(events, table, count) dbinfo = stats_db(conf.DbPath) return bottle.template("heatmap_keyboard.tpl", locals(), conf=conf)
def sync_users_for_sms(): s = sender.Sender() # print(res) res = db.fetch(SQL['users_for_sms']) contacts = map(lambda x: sender.Contact(s, email=x.get('email'), phone='+' + x.get('phone'), name=' '.join( [x.get('last_name', ''), x.get('io_name', '')]) ), res) result = (s.saveContacts(contacts)) return jsonify(result)
def lookup(self, tag): tag = str(tag) url = LOCAL_CACHE.inverted.get(tag) if url: return url url = self.cache.get(tag) if url: LOCAL_CACHE[url] = tag return url url = fetch(self.table, tag) if url: LOCAL_CACHE[url] = tag self.cache.set(tag, url) return url
def user_sending_photo(message): contacts = "@{}".format(message.from_user.username) db.update_data(message.from_user.id, 'contacts', contacts) db.update_data(message.from_user.id, 'photo', message.photo[-1].file_id) result = db.fetch(message.from_user.id) announc = Announcement(result) bot.send_photo( message.chat.id, photo=announc.photo, caption="Поздравляю! Можете отправить объявление на модерацию.\n\n{}". format(announc.showInfo()), reply_markup=keyboard.endKeyboard(), parse_mode="Markdown") db.set_state(message.chat.id, config.States.S_START.value)
def load(uid: int): execute('SELECT * FROM student_table WHERE student_id = %s', (uid,)) data = fetch() if len(data) == 0: return None return Student( data[0]['student_id'], data[0]['student_name'], data[0]['student_phone'], data[0]['student_email'], data[0]['student_dob'], data[0]['student_branch'], data[0]['student_minor'], data[0]['student_year'], )
def load(uid: int): execute('SELECT * FROM language_table WHERE student_id = %s', (uid, )) data = fetch() if len(data) == 0: return Language(uid, []) entry_list = [] for entry in data: entry_dict = { 'language_name': entry['language_name'], 'speaking': entry['speaking'], 'reading': entry['reading'], 'writing': entry['writing'] } entry_list.append(entry_dict) return Language(uid, entry_list)
def show_song(song_id): if not db.is_in_database(song_id): raise InvalidUsage( f"Song with song_id {song_id} does not exists in database. Try \"POST\"", status_code=409) query = f"SELECT * FROM music WHERE song_id = '{song_id}'" rows = db.fetch(query) songs = list(map(db.LinkedSong.from_db_row, rows)) if request.content_type in {"application/json", None}: return jsonify(songs) elif request.content_type == "text/csv": return create_csv_response(db.LinkedSong, songs)
def load(uid: int): execute('SELECT * FROM extra_curricular_table WHERE student_id = %s', (uid, )) data = fetch() if len(data) == 0: return ExtraCurricular(uid, []) entry_list = [] for entry in data: entry_dict = { 'title': entry['extra_curricular_title'], 'start': entry['extra_curricular_start_date'], 'end': entry['extra_curricular_end_date'], 'text_description': entry['extra_curricular_description'] } entry_list.append(entry_dict) return ExtraCurricular(uid, entry_list)
def get_db_user_click(): # <dict> user1:set(ad1, ad2..) db_user_click = db.fetch("user_click") dict_user_click = {} for i in db_user_click: if dict_user_click.get(i[0], "no") == "no": dict_user_click[i[0]] = [i[1]] else: list_ad = dict_user_click[i[0]] if i[1] not in list_ad: list_ad.append(i[1]) dict_user_click[i[0]] = list_ad else: pass return dict_user_click
def update_accrual_balance(db, land): def right_balance(period, balance, end_balance, amount): return (balance == period['start_balance']) \ and (end_balance == period['end_balance']) \ and (amount == period['amount']) def update_accruals(period, balance, end_balance, amount): db.execute( SQL_accruals_update, ("%0.2f" % (balance, ), "%0.2f" % (end_balance, ), "%0.2f" % (amount, ), datetime.datetime.now().isoformat(), period['id'])) periods = db.fetch(SQL_land_periods_cursor, (land.get('id'))) balance = None old_balance = 0 for period in periods: # balance = balance or decimal.Decimal(period.get('start_balance', 0) or 0) balance = old_balance amount = decimal.Decimal(period.get('calc_accrued', 0) or 0) end_balance = balance - amount + \ decimal.Decimal(period.get('calc_payed', 0) or 0) if not right_balance(period, balance, end_balance, amount): land['status'] = 'updated' if not land.get('periods'): land['periods'] = [] land['periods'].append({ 'period': period['period'], 'old': { 'start': "%0.2f" % (period['start_balance'], ), 'end': "%0.2f" % (period['end_balance'], ), 'accrued': "%0.2f" % (period['amount'], ) }, 'new': { 'start': "%0.2f" % (balance or 0, ), 'end': "%0.2f" % (end_balance or 0, ), 'accrued': "%0.2f" % (amount or 0, ), 'payed': "%0.2f" % (period.get('calc_payed', 0) or 0, ) } }) update_accruals( period, balance, end_balance, decimal.Decimal(period.get('calc_accrued', 0) or 0)) balance = end_balance old_balance = end_balance # print(land) return land
def inputindex(input): """Handler for showing keyboard or mouse page with day and total links.""" stats = {} countminmax = "SUM(count) AS count, MIN(day) AS first, MAX(day) AS last" tables = ("moves", "clicks", "scrolls") if "mouse" == input else ("keys", "combos") for table in tables: stats[table] = db.fetchone("counts", countminmax, type=table) periods, month = [], None for data in db.fetch("counts", "day AS period, count, 'day' AS class", order="day DESC", type=table): if not month or month["period"][:7] != data["period"][:7]: month = {"class": "month", "period": data["period"][:7], "count": 0} periods.append(month) month["count"] += data["count"] periods.append(data) stats[table]["periods"] = periods dbinfo = stats_db(conf.DbPath) return bottle.template("input.tpl", locals(), conf=conf)
def get(self): args = parser.parse_args() query = args['query'] words = [x.strip().lower() for x in query.split()] words = list(set(words)) if len(words) == 0: return [] tentative = [word for word in words if word not in stopwords] if len(tentative) > 0: words = tentative word_values = ",".join(["({})".format(quote_string(_)) for _ in words]) non_pk_cols = ','.join(['title', 'description', 'content']) r = fetch(""" DROP TEMPORARY TABLE IF EXISTS words; DROP TEMPORARY TABLE IF EXISTS words2; CREATE TEMPORARY TABLE words(word VARCHAR(40)); CREATE TEMPORARY TABLE words2(word VARCHAR(40)); INSERT INTO words VALUES {1}; INSERT INTO words2 VALUES {1}; SELECT post_id, {0}, MAX(tf_idf) FROM ( SELECT post_id, {0}, word, LOG(((SELECT COUNT(*) FROM posts) + 1) / (doc_freq)) * tf as tf_idf FROM ( SELECT p.post_id, {0}, w.word, IFNULL(doc_freq,0) as doc_freq, ROUND((LENGTH(p.content) - LENGTH(REPLACE(LOWER(p.content), w.word, ""))) / LENGTH(w.word)) AS tf FROM posts p JOIN words w LEFT JOIN ( SELECT word, COUNT(*) as doc_freq FROM posts p, words2 w WHERE LOCATE(w.word, LOWER(p.content)) > 0 GROUP BY word ) AS tf ON w.word = tf.word ) as d ) AS a GROUP BY post_id ORDER BY MAX(tf_idf) DESC;""".format(non_pk_cols, word_values), multi=True) posts = fit(r, ('post_id', 'title', 'description', 'content', 'tf_idf')) for post in posts: post['topics'] = Post.fetch_topics(post['post_id']) return posts
def load(uid: int): execute('SELECT * FROM internship_table WHERE student_id = %s', (uid, )) data = fetch() if len(data) == 0: return Internship(uid, []) entry_list = [] for entry in data: entry_dict = { 'start_date': entry['internship_start_date'], 'end_date': entry['internship_end_date'], 'organization': entry['internship_organization'], 'designation': entry['internship_designation'], 'description': entry['internship_description'] } entry_list.append(entry_dict) return Internship(uid, entry_list)
def get_covid_status(city): query = """select cs.state_code , cs.deaths , cs.hospitalized , cs.inICU , cs.onVentilator , cs.positive , cs.recovered from covid_status as cs , airport_codes as ac where ac.city = %s and ac.state_code = cs.state_code;""" queryReult = db.fetch(query, (city, )) if (len(queryReult) == 0): return None status = queryReult[0] # statusDict = {"stateCode": status[0], "deaths": status[1], "hospitalized": status[2], "inICU": status[3], "onVentilator": status[4], "positive": status[5], "recovered": status[6]} statusDict = { "Deaths": status[1], "Hospitalized": status[2], "Positive Cases": status[5], "Recovered": status[6] } return statusDict
def get_distance(city1, city2): query = """ select distance from city_distance where (city_distance.city1 = %s and city_distance.city2 = %s) or (city_distance.city2 = %s and city_distance.city1 = %s);""" dist = db.fetch(query, ( city1, city2, city2, city1, )) if len(dist) == 0: return 100000000 return dist[0][0]
def get_best_hotel(city): query = """ select hotels.hotel_id from hotels , hotels_rating where hotels.hotel_id = hotels_rating.hotel_id and hotels.city = %s and hotels.enabled group by hotels.hotel_id , hotels_rating.rating order by rating desc limit 10; """ hotels = db.fetch(query, (city, )) temp_hotels = [] for h in hotels: temp_hotels.append(h[0]) return temp_hotels
def get_direct_connection(city1, city2, dep_date): query = """select fl.flight_id from airport_codes as ac1 , airport_codes as ac2 , flights as fl where fl.origin = ac1.airport_code and fl.dest = ac2.airport_code and ac1.city = %s and ac2.city = %s and fl.fl_date = %s limit 3; """ direct_con = db.fetch(query, ( city1, city2, dep_date, )) temp_conn = [] for c in direct_con: temp_conn.append(c[0]) return temp_conn
def get_connecting_flights(city1, city2, dep_date): query = """select rc.flight_ids from (with recursive reach_carr (f,t,all_ids,flight_ids,last_arr_time,cost) as ( (select origin,dest,ARRAY[origin] , ARRAY[fl.flight_id] ,fl.crs_arr_time, fl.distance from flights as fl , airport_codes as ac1 , airport_codes as ac2 where fl.origin = ac1.airport_code and ac1.city = %s and ac1.enabled and fl.dest = ac2.airport_code and ac2.enabled and fl.fl_date = %s) union (select rc.f,fl.dest,all_ids || fl.origin , flight_ids || fl.flight_id , fl.crs_arr_time , rc.cost + fl.distance from reach_carr as rc,flights as fl , airport_codes as ac1 , airport_codes as ac2 where (rc.t = fl.origin) and (rc.f = ac1.airport_code and ac1.city = %s and ac1.enabled) and (rc.t <> ac2.airport_code and ac2.city = %s and ac2.enabled) and fl.crs_dep_time > rc.last_arr_time and fl.fl_date = %s and fl.dest <> ANY(all_ids) and array_length(all_ids,1) < 3 )) select * from reach_carr) as rc , airport_codes as ac1 , airport_codes as ac2 where (rc.f = ac1.airport_code and ac1.city = %s) and (rc.t = ac2.airport_code and ac2.city = %s) group by rc.flight_ids , rc.cost order by cost asc limit 5;""" conn = db.fetch(query, ( city1, dep_date, city1, city2, dep_date, city1, city2, )) temp_conn = [] for c in conn: temp_conn.append(c[0]) return temp_conn
def is_cached(cls, sym, t0, t1): """\ Check if db already has the data we need Args: sym (str) : ticker symbol t0 (str) : start date t1 (str) : end date Returns: True if data required is cached in db; False otherwise """ # convert datestr to yyyy-mm-dd format t0 = datestr_to_datetime(t0).strftime('%Y-%m-%d') t1 = datestr_to_datetime(t1).strftime('%Y-%m-%d') # dates we need (to download, potentially) need = set(trading_dates(t0, t1)) # dates we have (from db cache) have = set(flatten(fetch(sym, t0, t1, cols='date'))) # compute set diff and determine if download is needed return len(need - have) == 0
import pdb import pickle import pprint import common import db def pic(): data = db.fetch() db.save_state(data) #pic() #data = db.load_state() #pic() data = db.fetch() pp = pprint.PrettyPrinter(indent=4) for e in sorted(data['employees']): print e times = filter(lambda x: x['Person'] == e, data['timeItems']) aggregates = common.aggregate(times, lambda x: (x['JobCode'], x['Task'])) for key, vals in aggregates: total_times = common.summate(vals, common.mkKeyFunc('TimeVal')) print key, " ", total_times #pdb.set_trace() #pp.pprint(aggregates) print #print data['timeItems'] print "Finished"
def pic(): data = db.fetch() db.save_state(data)
def update_metadata(self): self.open() iptc_info = IptcImagePlugin.getiptcinfo(self.image) or {} exif_info = self.image._getexif() or {} fields = [] values = [] if IPTC_OBJECT_NAME in iptc_info: title = iptc_info[IPTC_OBJECT_NAME].decode('utf-8') fields.append("description") values.append(title) timestamp = None if IPTC_DATE_CREATED in iptc_info and IPTC_TIME_CREATED in iptc_info: timestamp_str = iptc_info[IPTC_DATE_CREATED].decode('utf-8') + iptc_info[IPTC_TIME_CREATED].decode('utf-8') if len(timestamp_str) == 14: timestamp_str = timestamp_str + '+0000' timestamp = datetime.strptime(timestamp_str, '%Y%m%d%H%M%S%z') elif EXIF_DATE_TIME_ORIGINAL in exif_info: timestamp_str = exif_info[EXIF_DATE_TIME_ORIGINAL] if len(timestamp_str) == 19: timestamp_str = timestamp_str + '+0000' elif timestamp_str[-3] == ':': timestamp_str = kill_char(timestamp_str, len(timestamp_str)-3) timestamp = datetime.strptime(timestamp_str, '%Y:%m:%d %H:%M:%S%z') elif EXIF_DATE_TIME_DIGITIZED in exif_info: timestamp_str = exif_info[EXIF_DATE_TIME_DIGITIZED] timestamp = datetime.strptime(timestamp_str, '%Y:%m:%d %H:%M:%S') if timestamp: fields.append("stime") values.append(timestamp) if IPTC_BYLINE in iptc_info: author_name = iptc_info[IPTC_BYLINE].decode('utf-8') author_id = db.fetch("SELECT id FROM " + db.tbl_author + " WHERE name=%s", [author_name], one=True, as_list=True) if not author_id: author_id = db.execute("INSERT INTO " + db.tbl_author + "(name) VALUES (%s) RETURNING id", [author_name]) fields.append("author") values.append(author_id) if IPTC_KEYWORDS in iptc_info: labels = [] if isinstance(iptc_info[IPTC_KEYWORDS], list): for label in iptc_info[IPTC_KEYWORDS]: labels.append(label.decode('utf-8')) else: labels.append(iptc_info[IPTC_KEYWORDS].decode('utf-8')) self.set_labels(labels) fields.append("width") values.append(self.image.width) fields.append("height") values.append(self.image.height) if fields: values.append(self.id) fields_str = ", ".join(map(lambda x: "%s=%%s" % x, fields)) db.execute("UPDATE " + db.tbl_image + " SET " + fields_str + " WHERE id=%s", values) db.commit()
def history(user_id, day): authorized = session.get('auth', None) if not authorized or authorized not in config.ADMINS: return redirect(url_for('login', next=request.url)) images = None users = None user = None where = [] join = None label = request.args.get('-filt.label', None) if label: where.append(db.tbl_image_label + '.label = %s' % label) join = 'INNER JOIN ' + db.tbl_image_label + ' ON (' + db.tbl_image_log + '.image = ' + db.tbl_image_label + '.image)' status = request.args.get('-filt.status', None) if status: where.append('status = %s' % status) oa = OneAll(site_name=config.ONEALL_SITE_NAME, public_key=config.ONEALL_PUBLIC_KEY, private_key=config.ONEALL_PRIVATE_KEY) if user_id: user = {'id': user_id} if '-' in user_id: oa_user = oa.user(user_id) app.logger.debug("User: %s" % oa_user['identities']) for identity in oa_user['identities']: if identity['provider'] == 'vkontakte': identity['provider'] = 'vk' if identity['displayName']: user['displayName'] = identity['displayName'] user['identities'] = oa_user['identities'] else: user['displayName'] = user_id grp = '' if day: sel = ' date_trunc(\'day\', MAX(' + db.tbl_image_log + '.ctime)) AS day, ' where.append('date_trunc(\'day\', ' + db.tbl_image_log + '.ctime) = \'%s\'' % day) else: sel = ' date_trunc(\'day\', ' + db.tbl_image_log + '.ctime) AS day, ' grp = ', 6' query = 'SELECT id, bundle, name, width, height,' + sel + ' MIN(status) AS status, MAX(' + db.tbl_image_log + \ '.ctime) AS ctime FROM ' + db.tbl_image_log + ' INNER JOIN ' + db.tbl_image + ' ON (' + \ db.tbl_image_log + '.image = id)' if join: query += ' ' query += join query += ' WHERE "user" = \'%s\'' % user_id if where: query += ' AND ' query += ' AND '.join(where) query += ' GROUP BY id' + grp + ' ORDER BY ctime DESC' app.logger.debug("Query: %s" % query) images = db.fetch(query) else: query = 'SELECT DISTINCT "user" AS id, date_trunc(\'day\', ctime) AS day FROM ' + db.tbl_image_log if join: query += ' ' query += join if where: query += ' WHERE ' query += ' AND '.join(where) query += ' ORDER BY 2 DESC' app.logger.debug("Query: %s" % query) users = db.fetch(query) for u in users: if '-' in u['id']: oa_user = oa.user(u['id']) app.logger.debug("User: %s" % oa_user['identities']) for identity in oa_user['identities']: if identity['displayName']: u['displayName'] = identity['displayName'] break return render_template('history.html', config=config, images=images, users=users, user=user)
def user_has_reviewed(hotel_id, user_id): query=""" select * from users, reviews where users.uname=reviews.review_username and userid=%s and hotel_id=%s """ entries = db.fetch(query, (user_id, hotel_id)) return len(entries)>0
def select(bundle): should_sync_bundle = bundle is not None where = [] having = [] join = None group = None order = request.args.get('-nav.order', 'stime') labels = request.args.get('-filt.labels', None) notlabels = request.args.get('-filt.notlabels', None) if labels: should_sync_bundle = False where.append(db.tbl_image_label + '.label IS NOT NULL') # mysql syntax: SUM(IF(label=%s,1,0)) > 0 having.extend(map(lambda x: 'SUM(CASE WHEN label=%s THEN 1 ELSE 0 END) > 0' % x, labels.split(','))) join = 'INNER JOIN ' + db.tbl_image_label + ' ON (id = image)' group = 'id' if notlabels: should_sync_bundle = False # todo: where clause should not be added twice where.append(db.tbl_image_label + '.label IS NOT NULL') # mysql syntax: SUM(IF(label=%s,1,0)) = 0 having.extend(map(lambda x: 'SUM(CASE WHEN label=%s THEN 1 ELSE 0 END) = 0' % x, notlabels.split(','))) join = 'INNER JOIN ' + db.tbl_image_label + ' ON (id = image)' group = 'id' censored = request.args.get('-filt.censored', None) if censored is not None: should_sync_bundle = False where.append('censored = %s' % censored) elif not request.args.get('any', None) and (not labels or '424' not in labels.split(',')): where.append('censored = 0') if bundle: where.append('bundle = \'%s\'' % bundle) sfrom = request.args.get('-filt.from', None) if sfrom: should_sync_bundle = False where.append('stime >= \'%s\'' % sfrom) still = request.args.get('-filt.till', None) if still: should_sync_bundle = False where.append('stime <= \'%s\'' % still) if should_sync_bundle: path = ''.join([config.ROOT_DIR, bundle]) should_update_metadata = bool(request.args.get('updatemetadata', None)) sync_bundle(path, bundle, should_update_metadata) # noinspection SqlResolve query = 'SELECT id, name, bundle, description, width, height FROM ' + db.tbl_image qlen = len(query) if join: query += ' ' query += join if where: query += ' WHERE ' query += ' AND '.join(where) if group: query += ' GROUP BY ' query += group if having: query += ' HAVING ' query += ' AND '.join(having) if len(query) == qlen: abort(413) query += ' ORDER BY ' query += order app.logger.debug("Query: %s" % query) images = db.fetch(query) for image in images: image['path'] = ''.join([request.script_root, image['bundle'], '/', image['name']]) return jsonify(images=images)
def get_airport_data(airport_code): airport = db.fetch("select * from airport_codes where airport_code=%s", (airport_code, )) return airport
def recognition(): # UTF-8 string printing enabled def print_utf8_text(image, xy, text, color): # utf-8 characters fontName = 'FreeSerif.ttf' font = ImageFont.truetype(fontName, 24) # select font img_pil = Image.fromarray(image) # convert image to pillow mode draw = ImageDraw.Draw(img_pil) # prepare image draw.text((xy[0], xy[1]), text, font=font, fill=(color[0], color[1], color[2], 0)) # b,g,r,a image = np.array(img_pil) # convert image to cv2 mode (numpy.array()) return image recognizer = cv2.face.LBPHFaceRecognizer_create() recognizer.read('trainer/trainer.yml') cascadePath = "Cascade/haarcascade_frontalface_default.xml" faceCascade = cv2.CascadeClassifier(cascadePath) font = cv2.FONT_HERSHEY_SIMPLEX # id sayacı id = 0 #names = ['None'] # dizi sırası id numaralarına karşılık gelir cam = cv2.VideoCapture(0) # video yakalamaya başla cam.set(3, 1000) # set video widht cam.set(4, 800) # set video height # yüz için çerçeve boyutu ayarlama minW = 0.1 * cam.get(3) minH = 0.1 * cam.get(4) while True: ret, img = cam.read() # img = cv2.flip(img, 2)# Flip vertically gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) faces = faceCascade.detectMultiScale( gray, scaleFactor=1.2, minNeighbors=5, minSize=(int(minW), int(minH)), ) for (x, y, w, h) in faces: cv2.rectangle(img, (x, y), (x + w, y + h), (0, 255, 0), 2) id, confidence = recognizer.predict(gray[y:y + h, x:x + w]) # yüzün eşleşme oranını kıyasla if (confidence < 75): # id = names[id] id = db.fetch(str(id), "uName") confidence = " {0}%".format(round(100 - confidence)) else: id = "bilinmiyor" confidence = " {0}%".format(round(100 - confidence)) color = (255, 255, 255) img = print_utf8_text(img, (x + 5, y - 25), str(id), color) # cv2.putText(img, str(id), (x + 5, y - 5), font, 1, (255, 255, 255), 2) cv2.putText(img, str(confidence), (x + 5, y + h - 5), font, 1, (255, 255, 0), 1) cv2.imshow('camera', img) k = cv2.waitKey(10) & 0xff # Press 'ESC' for exiting video if k == 27 or k == ord('q'): break # Do a bit of cleanup print("\n [INFO] Programdan çıkıyor ve ortalığı temizliyorum") cam.release() cv2.destroyAllWindows()
if group: sql += " GROUP BY " + group if order: get_direction = lambda c: (c if isinstance(c, basestring) else "DESC" if c else "ASC") sql += " ORDER BY " for i, col in enumerate(order): name = col[0] if isinstance(col, (list, tuple)) else col direction = "" if name == col else " " + get_direction(col[1]) sql += (", " if i else "") + name + direction if limit: sql += " LIMIT %s" % (", ".join(map(str, limit))) return sql, args if "__main__" == __name__: import db db.init(":memory:", "CREATE TABLE test (id INTEGER PRIMARY KEY, val TEXT)") print("Inserted ID %s." % db.insert("test", val=None)) for i in range(5): print("Inserted ID %s." % db.insert("test", {"val": i})) print("Fetch ID 1: %s." % db.fetch("test", id=1)) print("Fetch all up to 3, order by val: %s." % db.fetchall("test", order="val", limit=3)) print("Updated %s row where val is NULL." % db.update("test", {"val": "new"}, val=None)) print("Select where val IN [0, 1, 2]: %s." % db.fetchall("test", val=("IN", range(3)))) print("Delete %s row where val=0." % db.delete("test", val=0)) print("Fetch all, order by val: %s." % db.fetchall("test", order="val")) db.execute("DROP TABLE test") db.close()
def recognise(): facecascade = cv2.CascadeClassifier('Haar/haarcascade_frontalcatface.xml') eye = cv2.CascadeClassifier('Haar/haarcascade_eye.xml') spec = cv2.CascadeClassifier('Haar/haarcascade_eye_tree_eyeglasses.xml') count = 0 recognizer1 = cv2.face.createLBPHFaceRecognizer() recognizer2 = cv2.face.createEigenFaceRecognizer() recognizer1.load('trainer/trainedData1.xml') recognizer2.load('trainer/trainedData2.xml') username = "******" # Initialize and start the video frame capture cam = PiCamera() cam.resolution = (160, 120) cam.framerate = 32 rawCapture = PiRGBArray(cam, size=(160, 120)) # allow the camera to warmup time.sleep(0.1) lastTime = time.time() * 1000.0 # Loop for frame in cam.capture_continuous(rawCapture, format="bgr", use_video_port=True): # Read the video frame image = frame.array # Convert the captured frame into grayscale gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # Get all face from the video frame faces = facecascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags=cv2.CASCADE_SCALE_IMAGE) print time.time() * 1000.0 - lastTime, " Found {0} faces!".format( len(faces)) lastTime = time.time() * 1000.0 # For each face in faces for (x, y, w, h) in faces: # Create rectangle around the face #cv2.rectangle(img, (x-20,y-20), (x+w+20,y+h+20), (0,255,0), 4) cv2.circle(image, (x + w / 2, y + h / 2), int((w + h) / 3), (255, 255, 255), 1) facecrp = cv2.resize((gray[y:y + h, x:x + w]), (110, 110)) # Recognize the face belongs to which ID Id, confidence = recognizer1.predict(facecrp) Id1, confidence1 = recognizer2.predict(facecrp) # Check the ID if exist Name = findid.ID2Name(Id, confidence) Name2 = findid.ID2Name(Id1, confidence1 / 100) print("Eigen:", Name) print("LBPH", Name2) # print(Id1,confidence1,Name,Name2,username,count) if (count == 0): username = Name2 count += 1 if (count > 0 and username == Name2): count += 1 if count == 10: break findid.DispID(x, y, w, h, Name, gray) if Name2 is not None: cv2.putText(image, Name2, ((x + w / 2 - (len(Name2) * 7 / 2)), y - 20), cv2.FONT_HERSHEY_DUPLEX, .4, [255, 255, 255]) else: findid.DispID(x, y, w, h, "Face Not Recognized", gray) cv2.imshow('Face', image) rawCapture.truncate(0) if cv2.waitKey(1) & 0xFF == ord('q'): break if count == 10: break print(username) cv2.imwrite("tmp/face.jpg", image) db.fetch(username, "tmp/face.jpg") cam.close() cv2.destroyAllWindows() return username
GPIO.setmode(GPIO.BCM) HALL_SENSOR1 = 7 HALL_SENSOR2 = 8 H_LIGHT = 2 H_FAN = 3 H_MS = 4 H_TV = 17 port = [{"Fan": 3, "Light": 2, "Tv": 17, "Music_System": 4}] GPIO.setup(H_FAN, GPIO.OUT) GPIO.setup(H_LIGHT, GPIO.OUT) GPIO.setup(H_MS, GPIO.OUT) GPIO.setup(H_TV, GPIO.OUT) u = 0 GPIO.setup(HALL_SENSOR1, GPIO.IN) GPIO.setup(HALL_SENSOR2, GPIO.IN) count = 0 try: while True: if GPIO.input(HALL_SENSOR1) and GPIO.input(HALL_SENSOR2): u += 1 name = fr.recognise() key_list = {'Fan', 'Light', 'Modem', 'TV', 'Music_System'} needlist = db.fetch(name, key_list, 1) for r in needlist: for key in port: if key[r] is not None: print(key[r], type(key[r])) GPIO.output(item, GPIO.HIGH) time.sleep(1) except KeyboardInterrupt: GPIO.cleanup()