def generate_train_dataset(): client = DBClient() fonts = client.all_fonts() images = client.train_images() random.shuffle(images) X_filenames = [img['filename'] for img in images] Y_ids = [img['font'] - 1 for img in images] Y = to_categorical(Y_ids, num_classes=len(fonts)) np.save('data/train/x_filenames.npy', X_filenames) np.save('data/train/y_labels.npy', Y)
def main(): columns = input("Remove columns: ").split() client = DBClient() client.connect_db(MONGO_DB_HOST, MONGO_DB_USER, MONGO_DB_PASSWORD) client.set_db(DATABASE_NAME) client.set_collection(BASE_SEISMIC_COLLECTION) client.unset(*columns)
def get_client_and_plot(): client = DBClient() splot = SeismicPlot() connect(client) return client, splot
def check_all_time_highs(data, log_to_slack=True): db_client = DBClient() all_max_vals = db_client.get_all_max_vals() selected_coins = {max_val[1] for max_val in all_max_vals} current_val_map = { coin['symbol'].lower(): coin['price_usd'] for coin in data if coin['symbol'].lower() in selected_coins } # db_client.seed_max_values() for max_value in all_max_vals: coin_id, symbol, old_ath = max_value current_value = float(current_val_map[symbol]) if current_value > float(old_ath): db_client.update_max_value(coin_id, current_value) message = "<!channel> {} is at its all time high at {}!".format( symbol, str(current_value)) if log_to_slack: slack.chat.post_message(channel_id, message) else: print(message)
def main(): client = DBClient() client.connect_db(MONGO_DB_HOST, MONGO_DB_USER, MONGO_DB_PASSWORD) client.set_db(DATABASE_NAME) client.set_collection(BASE_SEISMIC_COLLECTION) parser = Parser() update_info(parser, client) generate_learn_model(client)
def check_percent_change(data): db_client = DBClient() all_max_vals = db_client.get_all_max_vals() selected_coins = {max_val[1] for max_val in all_max_vals} current_val_map = { coin['symbol'].lower(): coin['percent_change_24h'] for coin in data if coin['symbol'].lower() in selected_coins } for coin in current_val_map: percent = current_val_map[coin] if float(percent) > 10: slack.chat.post_message( channel_id, '<!channel> {} increased by {}% in the past 24 hours.'.format( coin, percent)) elif float(percent) < -10: slack.chat.post_message( channel_id, '<!channel> {} decreased by {}% in the past 24 hours.'.format( coin, percent))
def main(): client = DBClient() client.connect_db(MONGO_DB_HOST, MONGO_DB_USER, MONGO_DB_PASSWORD) client.set_db(DATABASE_NAME) client.set_collection(BASE_SEISMIC_COLLECTION) parser = Parser() start = input("Start (yyyy-mm-nn): ") end = input("End (yyyy-mm-dd): ") update_info(parser, client, start, end)
def generate_training_data(): client = DBClient() fonts = client.all_fonts() for i, font in enumerate(fonts): for j in range(600): random_text(font=font['filename'], lowercase=True) uid = upload_train_data('data/pil/random_text.png') client.add_image(f'train/{uid}.jpg', font['id']) for j in range(600): random_text(font=font['filename'], lowercase=False) uid = upload_train_data('data/pil/random_text.png') client.add_image(f'train/{uid}.jpg', font['id']) now = datetime.now() current_time = now.strftime("%H:%M:%S") print(f"#{i}: {font['name']} done at {current_time}.")
# Executable for Discord Interface import re import discord from discord.ext import commands from db import DBClient from core.command import run_command from util.config import TOKEN MAX_MSG_LENGTH_CHARACTERS = 2000 bot = commands.Bot(command_prefix='!') client = DBClient() def _get_message(ctx): """ Gets the command message content and strips off the command itself. :param ctx: Discord Context of command message :return: Message string only """ m = ctx.message.content regex = '\\' + str(bot.command_prefix) + '\\w+' return re.sub(re.compile(regex), '', m, 1).strip() def _get_attachment(ctx): """ Gets the FIRST attachment of a message and returns
def test_db_connect(self): self.assertTrue(DBClient()) self.assertEqual(len(DBClient().get_all_max_vals()), 5)
from db import DBClient db_client = DBClient() db_client.create_max_val_tables() db_client.seed_max_values()
app = Flask(__name__) @app.route('/product') def list_products(): product_list = app.db.fetch_products() if product_list != None: for i in range(len(product_list)): try: d = app.discount.verify(request.headers['X-USER-ID'], product_list[i]['id']) product_list[i]['discount'] = d except Exception as err: app.logger.exception(f'An exception ocurred: {inst}') pass return { "products": product_list, } else: return {"data": product_list} if __name__ == '__main__': args = service_cmd().parse_args() app.db = DBClient(args.graphql_server, app.logger) app.discount = DiscountClient(args.discount_server, app.logger) app.run(host=args.host, port=args.port, debug=args.debug)
def step_impl(context, url): context.client = DBClient(url, None)
def load_fonts_into_schema(): client = DBClient() client.drop_schema() client.create_schema() fonts = read_all_fonts(root_dir='data/fonts') client.bulk_add_fonts(fonts=fonts)
run_parser = subparsers.add_parser( 'run', help='run indexer' ) run_parser.add_argument( '-t', '--threads', dest='threads', action='store', type=int, help='number of threads to start', default=5 ) query_parser = subparsers.add_parser( 'query', help='make a query to indexer' ) query_parser.add_argument( '-l', '--limit', dest='limit', action='store', type=int, help='how many words to print', default=10 ) drop_parser = subparsers.add_parser( 'drop', help='drop indexer database' ) params, other_params = parser.parse_known_args() if params.action == "query": indexer = WordIndexer() result = indexer.query(limit=params.limit) print(json.dumps(result, indent=4)) elif params.action == "run": run(threads=params.threads) elif params.action == "drop": client = DBClient() client.drop_database(client.db_name) else: parser.print_help()
def __init__(self, modelId): self.modelId = modelId self.client = DBClient.create()
import time sys.path.append(env.infra_path) from mq import MQClient from db import DBClient import keepalive import mlog import logging import threading mlog.configLoggers(['main', 'mq'], env.logs_folder, env.debug_mode) logger = logging.getLogger('main') try: mqc = MQClient(env) dbc = DBClient(env) def handleKeepAlive(kadata): comp = kadata['component'] upd = {'name': comp, 'lts': time.time()} dbc.db.monitor.update_one({'name': comp}, {'$set': upd}, upsert=True) def pingDB(): while True: if dbc.ping(): keepalive.beat(mqc, 'DB') time.sleep(10) mqc.on_topic('keepalive', handleKeepAlive) dbThread = threading.Thread(target=pingDB)
def generate_font_samples(): client = DBClient() fonts = client.all_fonts() for i, font in enumerate(fonts): random_text(font=font['filename'], lowercase=False) shutil.copyfile('data/pil/random_text.png', f'data/samples/{stringcase.snakecase(font["name"])}.jpg')