def query(self): query = self.config.get('query') if not query: table_name = self.config.get('table') table = Table(table_name, self.meta, autoload=True) query = table.select() else: query = sql_text(query) log.info("Query: %s", query) return query
def create_hash_token(self, msisdn, profile_id, remember=False): import hashlib dk = hashlib.sha224( "%s-%s-%s" % (msisdn, 'I got to let the avalanche REST!', profile_id)) token = dk.hexdigest() expiry = ' now() + interval 1 day ' if remember else ' now() + interval 30 day ' sql = "insert into android_auth set id=null, profile_id=:pfid, token=:token, expiry=%s" % expiry result = self.connection.execute(sql_text(sql), { 'pfid': profile_id, 'token': token }) insert_id = result.lastrowid return token if insert_id else None
def get(self): args = resolvepermalink_parser.parse_args() key = args['key'] data = {} configconn = configdb.connect() sql = sql_text(""" SELECT data FROM {table} WHERE key = :key """.format(table=PERMALINKS_TABLE)) try: data = json.loads(configconn.execute(sql, key=key).first().data) except: pass return jsonify(data)
def get_flurnamen(self, egrid, conn): """Get Flurnamen for plot with EGRID. :param str egrid: EGRID :param Connection conn: DB connection """ flurnamen = [] sql = sql_text(self.flurnamen_sql) result = conn.execute(sql, egrid=egrid) for row in result: flurnamen.append(row['flurname']) return flurnamen
def get_mybets(self, token, page=1, limit=10): profile_id = self.validate_token(token) if not profile_id: return [] offset = (page - 1) * limit sql = "select date_format(b.created, '%s'), b.bet_id, (select count(*) from bet_slip "\ " where bet_id=b.bet_id) as total_matches, jackpot_bet_id, total_odd,"\ " bet_message, bet_amount, possible_win, b.status from bet b left join jackpot_bet j "\ " on j.bet_id = b.bet_id where profile_id=:pfid order by b.created "\ " desc limit %s, %s" % ('%Y-%m-%d %H:%i', offset, limit) self.logger.info("%s, %s" % (sql, profile_id)) result = self.connection.execute(sql_text(sql), { 'pfid': profile_id }).fetchall() return result
def check_bonus_daily_award_limit(self, profile_id, re_award_amount): houurz = int(self.scorepesa_bonus_cfgs['limit_check_no_of_hours']) t_q = "select sum(bonus_amount) from profile_bonus where updated > DATE_SUB(NOW(), INTERVAL {0} HOUR) and status in ('CLAIMED','USED') and profile_id = :pf and created_by in ('referral_message', 'referral_message_re_award')".format(houurz) awarded_bonus = self.connection.execute(sql_text(t_q), {'pf': profile_id}).fetchone() self.logger.info("checking daily bonus awarded so far ..../::: {0} :sql: {1} :profile: {2}".format(awarded_bonus, t_q, profile_id)) if awarded_bonus and awarded_bonus[0]: amount = float(re_award_amount) self.daily_bonus_claimed = amount if awarded_bonus[0] is not None: amount = float(awarded_bonus[0]) + float(re_award_amount) self.daily_bonus_claimed = amount if float(amount) <= float(self.scorepesa_bonus_cfgs['scorepesa_bonus_re_award_daily_limit']): return True return False return True
def get(self): args = resolvepermalink_parser.parse_args() key = args['key'] data = {} configconn, permalinks_table, user_permalink_table = db_conn() sql = sql_text(""" SELECT data FROM {table} WHERE key = :key """.format(table=permalinks_table)) try: data = json.loads(configconn.execute(sql, key=key).first().data) except: pass return jsonify(data)
def get_match_details(self, game_id, pick, sub_type=1, parent_match_id=None): try: self.logger.info("[+] get_match_details data....[][] {0} "\ "[+] {1} [+] {2} [+] {3} [][]".format(game_id, pick, sub_type, parent_match_id)) if parent_match_id: sql = "select sub_type_id, odd_key as pick_key, odd_value,"\ " m.parent_match_id, special_bet_value from event_odd e inner join `match` "\ "m on e.parent_match_id=m.parent_match_id where "\ "m.parent_match_id=:pmid and e.sub_type_id=:sub_type and odd_key=:pick" dpars = { 'sub_type': sub_type, 'pick': pick, 'pmid': parent_match_id } else: sql = "select sub_type_id, odd_key as pick_key, odd_value, "\ "m.parent_match_id, special_bet_value from event_odd e inner join `match` m on "\ "e.parent_match_id=m.parent_match_id where m.game_id=:gmid and "\ "e.sub_type_id=:sub_type and odd_key=:pick" dpars = {'sub_type': sub_type, 'gmid': game_id, 'pick': pick} result = self.db.engine.execute(sql_text(sql), dpars).fetchone() if result: sub_type_id, pick_key, odd_value, parent_match_id, special_bet_value = result else: sub_type_id, pick_key, odd_value, parent_match_id, special_bet_value =\ sub_type, pick, None, parent_match_id, '' data = { "sub_type_id": sub_type_id, "pick_key": pick_key, 'special_bet_value': special_bet_value, "odd_value": odd_value, "parent_match_id": parent_match_id } self.logger.info("[+] bet match detail fetch [][] %s [+] %r "\ "[+] %r [+] %r [][]" % (sql, result, dpars, data)) return data except Exception, e: self.logger.error( "[x] Exception match detail fetch [][] %r [] %s [][]" % (e, sql)) return {}
def load_metadata(self): """Load metadata for all layers from MetaDB.""" if not self.db_url: # skip if no MetaDB in config self.logger.debug( "No MetaDB connection configured, skipping loading of metadata" ) return try: # connect to MetaDB db_engine = DatabaseEngine().db_engine(self.db_url) conn = db_engine.connect() # build query SQL sql = sql_text(self.layers_sql) # execute query result = conn.execute(sql) for row in result: # check required columns if 'layer_name' not in row: self.logger.error( "Missing column 'layer_name' in layers query") break if 'layer_ref' not in row: self.logger.error( "Missing column 'layer_ref' in layers query") break # collect metadata layer_metadata = dict(row) layer_metadata['_products'] = self.load_products_metadata( row['layer_ref'], conn) # render templates html_metadata = self.render_metadata_templates(layer_metadata) # add to metadata lookup self.metadata[row['layer_name']] = { 'metadata': layer_metadata, 'html_contents': html_metadata } # close database connection conn.close() except Exception as e: self.logger.error("Could not load metadata:\n%s" % e)
def check_profile_has_bet(self, msisdn, profile_id=None): try: profile_id = self.get_profile_id_for_msisdn(msisdn) if profile_id is None else profile_id self.logger.info("check if profile has bet [][] {0}".format(profile_id)) sql = "select bet_id from bet where profile_id=:pfid order by bet_id asc limit 1" results = self.connection.execute(sql_text(sql), {'pfid': profile_id}).fetchone() self.logger.info("check if bet has bet results [][] {0} [][][]...".format(results)) if results: self.logger.info("extract check if bet bet_id[][] {0}".format(results[0])) if results[0]: self.logger.info("confirmed have bet proceed and award bonus....") return True return False except Exception, ex: self.logger.error("check if profile has bet exception:: {0} ::".format(ex)) return False
def _count_excluded_events(self, session): excluded_uuids = session.query(File.uuid) \ .filter(File.is_folder) \ .filter(File.excluded).all() excluded_uuids = [u.uuid for u in excluded_uuids] if not excluded_uuids: return 0 excluded_events = session.query(Event).from_statement(sql_text( """ select final_e.* from events final_e where final_e.id in ( select max(last_event.id) from events last_event where last_event.file_id in ( select moved_file.id from events move_event, files moved_file where moved_file.id = move_event.file_id and move_event.id in ( select max(event.id) from events event, files file where file.id = event.file_id and file.excluded and event.type == 'move' group by file.id ) and ( move_event.folder_uuid is null or move_event.folder_uuid not in ({}) ) ) group by last_event.file_id ) order by final_e.is_folder desc, final_e.id """.format( ','.join(["'{}'".format(uuid) for uuid in excluded_uuids]), ))) \ .all() if not excluded_events: return 0 excluded_count = len( list( filter( lambda e: not is_contained_in_dirs( self._db.get_path_from_event(e, session), self. _excluded_dirs), excluded_events))) return excluded_count
def award_bonus_on_request(self, profile_id, msisdn, amount, bonus_type='customer_engage_bonus'): try: self.logger.info("award bonus {0}::{1}::{2}::{3}".format(profile_id, msisdn, amount, bonus_type)) profile_bonus_dict = { "profile_id": profile_id, "referred_msisdn": msisdn, "bonus_amount": float(amount), "status":'CLAIMED', "expiry_date": datetime.now()+timedelta(days =1), "created_by": bonus_type, "bet_on_status": 1, "date_created": datetime.now(), "updated": datetime.now() } result_proxy = self.connection.execute(ProfileBonu.__table__.insert(), profile_bonus_dict) profile_bonus_id = result_proxy.inserted_primary_key self.logger.info("Profile bonus insert proxy.... {0}".format(profile_bonus_id)) bonus_trx_dict = { "profile_id":profile_id, "profile_bonus_id": profile_bonus_id, "account":"%s_%s" % (profile_id, 'VIRTUAL'), "iscredit":1, "reference": profile_bonus_id, "amount": float(amount), "created_by": bonus_type, "created":datetime.now(), "modified":datetime.now() } #update bonus balance to the re-adjusted bonus balance based on bet stake amount of referrred friend self.connection.execute(BonusTrx.__table__.insert(), bonus_trx_dict) self.logger.info('Profile bonus created for ..%s ::id:: %s ' % (msisdn, profile_bonus_id)) #update profile for this dude to get bonus profileUpdate = """INSERT INTO profile_balance(profile_id, balance, bonus_balance, transaction_id, created) VALUES (:pf, 0, :amount, :trx_id, NOW()) ON DUPLICATE KEY UPDATE bonus_balance = (bonus_balance+%0.2f)""" % (float(amount), ) self.connection.execute(sql_text(profileUpdate), {'pf': profile_id, 'amount': float(amount), 'trx_id': -1}) self.logger.info('Bonus amount kshs.%s awarded for.... %s ' % (amount, msisdn)) #msisdn = self.get_msisdn_for_profile(claimer_profile_id) message = 'CONGRATULATIONS! You have been awarded a bonus of Kshs. %0.2f. www.scorepesasports.com.' % float(amount) message_type = 'BULK' short_code = 101010 correlator = '' link_id = '' payload = urllib.urlencode({"message": message, "msisdn":msisdn, "message_type":message_type, "short_code":short_code, "correlator":correlator, "link_id":link_id}) self.send_notification(payload) except Exception, ex: self.logger.info('Failed to award bonus %s::%r '% (msisdn, ex))
def create_speed_dial_profile(self, data): try: data['msisdn'] = self.get_msisdn_for_profile( data.get("profile_id")) self.logger.info("creating speeddial profile.... {0}".format(data)) sql = "INSERT IGNORE INTO speed_dial_profile (profile_id, msisdn, date_created) VALUES(:profile_id, :msisdn, :created)" params = { "profile_id": data.get("profile_id"), "msisdn": data.get("msisdn"), "created": datetime.now() } self.connection.execute(sql_text(sql), params) return True except Exception, exk: self.logger.error( "Exception on create speed dial profile ::: {0}".format(exk)) return False
def profile_bonus_flag_created_by(self, profile_id, profile_bonus_id, status=None): trxx = self.connection.begin() try: created_by = "referral_message_re_award" pbQ = """update profile_bonus set created_by=:created_by where profile_id=:profile_id and profile_bonus_id=:id""" params = {"profile_id": profile_id, "id": profile_bonus_id, "created_by": created_by} if status is not None: pbQ = """update profile_bonus set created_by=:created_by, status=:status where profile_id=:profile_id and profile_bonus_id=:id""" created_by = "referral_message" params = {"profile_id": profile_id, "id": profile_bonus_id, "created_by": created_by, "status": status} self.logger.info("update referal bonus re-award flag created by query {0} :: params :: {1}".format(pbQ, params)) self.connection.execute(sql_text(pbQ), params) trxx.commit() except Exception as e: trxx.rollback() self.logger.error("Re-award bonus flag exception :: {0} ::".format(e))
def get(self): username = get_jwt_identity() if not username: return jsonify({}) configconn = configdb.connect() sql = sql_text(""" SELECT data FROM {table} WHERE username = :user """.format(table=USER_PERMALINK_TABLE)) try: data = json.loads( configconn.execute(sql, user=username).first().data) except: data = {} return jsonify(data)
def scorepesa_app_update_version(self, data): try: self.logger.info( "update scorepesa app version data.... {0}".format(data)) vsql="INSERT INTO mobile_app_version (current_version, created_at)"\ " VALUES(:current_version, :created_at) ON DUPLICATE KEY UPDATE"\ " current_version=:current_version" params = { "current_version": data.get("vnum"), "created_at": datetime.now() } self.connection.execute(sql_text(vsql), params) return {"updated": True} except Exception, exk: self.logger.error( "Exception on create mobile app version ::: {0}".format(exk)) return {"updated": False}
def get_account_balance(self, profile_id): bal = False if profile_id: bal = self.db.engine.execute( sql_text( "select balance, bonus_balance from profile_balance where profile_id = :value" ), { 'value': profile_id }).fetchone() if bal: available_bonus = float(bal[1]) self.balance, self.bonus = float(bal[0]), available_bonus else: self.balance, self.bonus = 0, 0 self.logger.info("returned balance and bonus ::{0}::{1}".format( self.balance, self.bonus)) return self.balance, self.bonus
def delete(self, key): username = get_jwt_identity() if not username: return jsonify({"success": False}) # Delete into databse conn, permalinks_table, user_permalink_table, user_bookmark_table = db_conn( ) sql = sql_text(""" DELETE FROM {table} WHERE key = :key and username = :username """.format(table=user_bookmark_table)) conn.execute(sql, key=key, username=username) conn.close() return jsonify({"success": True})
def get(self): username = get_jwt_identity() if not username: return jsonify({}) configconn, permalinks_table, user_permalink_table = db_conn() sql = sql_text(""" SELECT data FROM {table} WHERE username = :user """.format(table=user_permalink_table)) try: data = json.loads( configconn.execute(sql, user=username).first().data) except: data = {} return jsonify(data)
def get(self): """Submit query Returns additional information at clicked map position. """ args = mapinfo_parser.parse_args() tenant = tenant_handler.tenant() config_handler = RuntimeConfig("mapinfo", app.logger) config = config_handler.tenant_config(tenant) db = db_engine.db_engine(config.get('db_url')) table = config.get('info_table') info_geom_col = config.get('info_geom_col') info_display_col = config.get('info_display_col') info_title = config.get('info_title') try: pos = args['pos'].split(',') pos = [float(pos[0]), float(pos[1])] except: return jsonify({"error": "Invalid position specified"}) try: srid = int(re.match(r'epsg:(\d+)', args['crs'], re.IGNORECASE).group(1)) except: return jsonify({"error": "Invalid projection specified"}) conn = db.connect() sql = sql_text(""" SELECT {display} FROM {table} WHERE ST_contains({table}.{geom}, ST_SetSRID(ST_Point(:x, :y), :srid)) LIMIT 1; """.format(display=info_display_col, geom=info_geom_col, table=table)) result = conn.execute(sql, x=pos[0], y=pos[1], srid=srid) info_result = [] for row in result: info_result = [[info_title, row[info_display_col]]] conn.close() return jsonify({"results": info_result})
def get(self, key): username = get_jwt_identity() if not username: return jsonify({"success": False}) conn, permalinks_table, user_permalink_table, user_bookmark_table = db_conn( ) sql = sql_text(""" SELECT data FROM {table} WHERE username = :user and key = :key """.format(table=user_bookmark_table)) try: data = json.loads( conn.execute(sql, user=username, key=key).first().data) except: data = {} return jsonify(data)
def ussd_match_sport_ids(self, data): try: #profile_id, new = self.create_profile(data, 1) prematch_markets = self.scorepesa_ussd_cfgs[ 'scorepesa_ussd_sport_markets'] ex_sport_id = self.scorepesa_ussd_cfgs['exclude_sport_ids'] self.logger.info("[+] get_match_details sport id data "\ "[][] {0} [+] {1} [][]".format(data, prematch_markets)) sql = "select s.sport_id, sport_name, e.sub_type_id as market"\ " from sport s inner join competition c on (c.sport_id=s.sport_id)"\ " inner join `match` m on m.competition_id=c.competition_id "\ " inner join event_odd e on e.parent_match_id=m.parent_match_id"\ " where e.sub_type_id in({0}) and s.sport_id not in({1}) "\ " group by s.sport_id, e.sub_type_id"\ .format(prematch_markets, ex_sport_id) result = self.db.engine.execute(sql_text(sql)).fetchall() data = [] if result: for res in result: sport_id, sport_name, market = res data.append({ "sport_id": sport_id, "sport_name": sport_name, "sub_type_id": market }) else: sport_id, sport_name, market = 14, "soccer", 10 data = { "sport_id": sport_id, "sport_name": sport_name, "sub_type_id": market } self.logger.info("[+] bet match detail ussd match sport "\ "details[][] %s [+] %r [+] %r [][]" % (sql, result, data)) return data except Exception, e: self.logger.error("[x] Exception match sport details[][] %r [][]" % (e)) return None
def ussd_match_top_leagues(self, data): try: top_league_sport_id = self.scorepesa_ussd_cfgs[ 'top_league_sport_id'] self.logger.info("[+] 2222222222 get_league_details top leagues data [+] {0}"\ " [+] for sportId [+] {1} [][]".format(data, top_league_sport_id)) sql = "select s.sport_id, s.sport_name, c.competition_id as league_id,"\ " c.competition_name as league_name, c.priority "\ " from sport s inner join competition c on (c.sport_id=s.sport_id) "\ " where s.sport_id={0} group by c.competition_id order by c.priority"\ " desc limit 10"\ .format(top_league_sport_id) result = self.db.engine.execute(sql_text(sql)).fetchall() data = [] if result: for res in result: sport_id, sport_name, league_id, league_name, priority = res data.append({ "sport_id": sport_id, "sport_name": sport_name, "league_id": league_id, "league_name": league_name, "priority": priority }) else: sport_id, sport_name, league_id, league_name, priority = 79, "soccer", None, None, 0 data = { "sport_id": sport_id, "sport_name": sport_name, "league_id": league_id, "league_name": league_name, "priority": priority } self.logger.info("[+] soccer ussd top leagues detail [][]"\ " %s [+] %r [+] %r [][]" % (sql, result, data)) return data except Exception, e: self.logger.error( "[x] Exception soccer top league details[][] %r [][]" % (e)) return []
def post(self): args = createpermalink_parser.parse_args() url = args['url'] parts = urlparse(url) query = parse_qs(parts.query, keep_blank_values=True) for key in query: query[key] = query[key][0] data = {"query": query, "state": request.json} # Insert into databse configconn, permalinks_table, user_permalink_table, user_bookmark_table = db_conn( ) datastr = json.dumps(data) hexdigest = hashlib.sha224(datastr.encode('utf-8')).hexdigest()[0:9] date = datetime.date.today().strftime(r"%Y-%m-%d") sql = sql_text(""" INSERT INTO {table} (key, data, date) VALUES (:key, :data, :date) """.format(table=permalinks_table)) attempts = 0 while attempts < 100: try: configconn.execute(sql, key=hexdigest, data=datastr, date=date) break except: pass hexdigest = hashlib.sha224( (datastr + str(random.random())).encode('utf-8')).hexdigest()[0:9] attempts += 1 configconn.close() # Return if attempts < 100: result = { "permalink": parts.scheme + "://" + parts.netloc + parts.path + "?k=" + hexdigest } else: result = {"message": "Failed to generate compact permalink"} return jsonify(**result)
def exists(self, id): """Check if a feature exists. :param int id: Dataset feature ID """ sql = sql_text((""" SELECT EXISTS(SELECT 1 FROM {table} WHERE {pkey}=:id) """).format(table=self.table_name, pkey=self.primary_key)) # connect to database conn = self.db.connect() # execute query result = conn.execute(sql, id=id) exists = result.fetchone()[0] # close database connection conn.close() return exists
def show(self, id, client_srid): """Get a feature. :param int id: Dataset feature ID :param int client_srid: Client SRID or None for dataset SRID """ srid = client_srid or self.srid # build query SQL # select id and permitted attributes columns = (', ').join( self.escape_column_names([self.primary_key] + self.attributes) ) geom_sql = self.geom_column_sql(srid) sql = sql_text((""" SELECT {columns}%s FROM {table} WHERE {pkey} = :id LIMIT 1; """ % geom_sql).format( columns=columns, geom=self.geometry_column, table=self.table_name, pkey=self.primary_key )) # connect to database and start transaction (for read-only access) conn = self.db_read.connect() trans = conn.begin() # execute query feature = None result = conn.execute(sql, id=id) for row in result: # NOTE: result is empty if not found feature = self.feature_from_query(row, srid) # roll back transaction and close database connection trans.rollback() conn.close() return feature
def get_building_addresses(self, egrid, conn): """Get building addresses inside plot with EGRID. :param str egrid: EGRID :param Connection conn: DB connection """ addresses = [] sql = sql_text(self.building_addresses_sql) result = conn.execute(sql, egrid=egrid) for row in result: addresses.append({ 'street': row['strassenname'], 'number': row['hausnummer'], 'zip': row['plz'], 'city': row['ortschaft'] }) return addresses
def ussd_check_user_exists(self, message): try: userQ = "select profile_id from profile where msisdn=:value" msisdn = message.get("msisdn") userExist = self.db.engine.execute(sql_text(userQ), { 'value': msisdn }).fetchone() self.logger.info( "[+] user exists result [][][] {0} [][]".format(userExist)) if userExist: self.logger.info( "Found user exists already... return response...") return {"userExist": True} else: profile_id, new = self.create_profile(message, 1) return {"userExist": False} except Exception, e: self.logger.error( "[-] Exception on ussd_check_user_exists {0}".format(e)) return {"userExist": "error"}
def update(self, id, feature): """Update a feature. :param int id: Dataset feature ID :param object feature: GeoJSON Feature """ # build query SQL sql_params = self.sql_params_for_feature(feature) srid = sql_params['client_srid'] geom_sql = self.geom_column_sql(srid) sql = sql_text((""" UPDATE {table} SET ({columns}) = ({values_sql}) WHERE {pkey} = :{pkey} RETURNING {return_columns}%s; """ % geom_sql).format(table=self.table_name, columns=sql_params['columns'], values_sql=sql_params['values_sql'], pkey=self.primary_key, return_columns=sql_params['return_columns'], geom=self.geometry_column)) update_values = sql_params['bound_values'] update_values[self.primary_key] = id # connect to database conn = self.db.connect() # execute query # NOTE: use bound values feature = None result = conn.execute(sql, **update_values) for row in result: # NOTE: result is empty if not found feature = self.feature_from_query(row, srid) # close database connection conn.close() return feature
def basic_info_egrid(self, egrid): """Return basic plot information given the plot EGRID. :param string egrid: The plot EGRID """ try: sql = sql_text(self.basic_info_by_egrid_sql) conn = self.db.connect() result = conn.execute(sql, egrid=egrid, srid=self.QUERY_SRID, buffer=self.QUERY_BUFFER) plots = self.format_basic_info(result, conn) conn.close() return {'plots': plots, 'success': True} except Exception as e: self.logger.error(e) return {'error': str(e), 'success': False}
def create_speed_dial_history(self, data): try: data['msisdn'] = self.get_msisdn_for_profile( data.get("profile_id")) self.logger.info("creating speeddial history.... {0}".format(data)) sql = "INSERT INTO speed_dial_history (profile_id, msisdn, source, header_info, created) VALUES(:profile_id, :msisdn, :source, :header_info, :created)" params = { "profile_id": data.get("profile_id"), "msisdn": data.get("msisdn"), "source": data.get("source"), "header_info": data.get("header_data"), "created": datetime.now() } self.connection.execute(sql_text(sql), params) #create speed dial profile self.create_speed_dial_profile(data) return True except Exception, exk: self.logger.error( "Exception on create speed dial history ::: {0}".format(exk)) return False