def delete_package(pkg_id): """ Remove a VNF Package from the local catalog and its repository files :param pkg_id: VNF Package identifier :return: a JSON with status and error reason if it occurs """ database = DatabaseConnection() _, data = database.list_vnf_instances(vnf_pkg_id=pkg_id) if len(data) > 0: return jsonify({'status': ERROR, 'reason': "A VNF Instance depends on this VNF Package!"}) resp, catalog = database.list_catalog(pkg_id) pkg_dir_id = catalog[0]['dir_id'] if resp == OK: result = database.remove_vnf_package(pkg_id) if result == OK: try: rmtree('repository/' + pkg_dir_id) except OSError as e: logger.error("%s '%s'", e.strerror, e.filename) return jsonify({'status': ERROR, 'reason': e.strerror}) return jsonify({'status': OK}) return jsonify({'status': ERROR, 'reason': status[404]})
def choosing_channel(self, bot, update, user_data): if not update.message.text.startswith('#'): update.message.reply_text( 'IRC channels have to begin with a #. Try again.') return self.CHOOSING_CHANNEL user_data['channel'] = update.message.text if self.bridge_exists(update.message.chat.id): telegramBot.sendMessage( chat_id=update.message.chat_id, text="This Group is already part of a bridge. Abort!") return ConversationHandler.END # test if the channel is already part of a bridge, if so offer a secondary bridge # which is only one way, tg->irc # this way you can post media without disturbing the whole primary group dbc = DatabaseConnection() if dbc.irc_channel_alredy_bridged(user_data['channel'], user_data['irc_server_description']): update.message.reply_text( 'This channel is already party of a bridge. Would you like to create a secondary bridge?' ' This way you can post stuff in the IRC without disturbing the primary TG Group.', reply_markup=self.yes_no_keyboard()) return self.CREATE_SECONDARY_BRIDGE update.message.reply_text( 'Thanks. Should i try to join %s on %s?' % (user_data['channel'], user_data['irc_server_description']), reply_markup=self.yes_no_keyboard()) return self.CONNECT_CHANNEL
def list_vnf_pkg_cps(vnf_pkg_id, internal=False): """Retrieve all connection points of a VNF Package stored in repository If using an internal call directly to this function use the :param internal param as True. In internal call it returns just an dict. However in external calls it returns an HTTP Response json dict. :param vnf_pkg_id: VNF Package ID :param internal=False used to distinguish internal (function) and external (HTTP) calls to the server :return: a dict with all connection points """ database = DatabaseConnection() _, data = database.list_catalog(vnf_pkg_id) dir_id = data[0]['dir_id'] vnfd_path = 'repository/%s/vnfd.json' % dir_id with open(vnfd_path) as vnfd_file: vnfd_data = vnfd_file.read() vnfd_data = json.loads(vnfd_data) node_templates = vnfd_data['vnfd']['attributes']['vnfd']['topology_template']['node_templates'] cps = {} for atr in node_templates.keys(): if 'tosca.nodes.nfv.CP' in node_templates[atr]['type']: virtual_link = node_templates[atr]['requirements'][0]['virtualLink']['node'] network_name = node_templates[virtual_link]['properties']['network_name'] cps[atr] = {'virtual_link': virtual_link, 'network_name': network_name} if not internal: return jsonify(cps) else: return cps
def irc_token_received(self, from_user, token): if self.validated: # if the primary bridge is validated we check all the secondary bridges for key, secondary_bridge in self.secondary_bridges.items(): if not secondary_bridge.validated and secondary_bridge.token == token: secondary_bridge.validated = True dbc = DatabaseConnection() dbc.set_secondary_bridge_validation(secondary_bridge) self.send_to_irc('system', 'Secondary bridge is now validated!') return self.send_to_irc('system', 'Why? The Bridge is already validated') return if self.irc_channel.is_user_op(from_user) and self.token == token: self.set_validation() self.send_to_irc( 'system', 'Successfully registered the new bridge. Have fun!') self.send_to_tg( 'system', 'Successfully registered the new bridge. Have fun!') else: self.send_to_irc( 'system', 'The token is invalid and/or the sender is no operator.')
class UserServices(): def __init__(self): self.db = DatabaseConnection() self.collection = "users" def authenticate(self, username, password): user = self.db.findOne(self.collection, {"username": username}) if user != None: if user["password"] == password: return True else: return False else: return False def authorize(self, sid): session = self.db.findOne("sessions", {"sid": sid}) if session: return session["username"] else: return False def getFirstName(self, username): user = self.db.findOne(self.collection, {"username": username}) return user["firstName"]
def __init__(self, conffile=None): self._db = DatabaseConnection(conffile) # self check ver = self._db.query('SELECT VERSION()')[0][0] satt_log('Connected to database: MySQL version {0}'.format(ver)) self._rating_methods = RatingMethod(self._db.query)
def liveGames(): '''Display info about a single round itdentified by it's timestamp''' db = DatabaseConnection(app.config["DB_PATH"]) rounds = db.getLiveGames() return flask.render_template("livegames.html", liveGameRounds=rounds, noRounds=not bool(rounds))
def run(self): print "starting server...." self.properties['wzpath'] = ['wz', 'WZ', 'Wz', 'wZ'] connection = DatabaseConnection.getConnection() cursor = connection.cursor() DatabaseConnection.execute(cursor, "UPDATE accounts SET loggedin = 0") print "Loading " + ServerConstants.SERVERNAME World.init()
def instantiate_vnf(vnf_pkg_id): """ Instantiates a given VNF in NFVO :param vnf_pkg_id: :return: flask.Response: OK, vnfd_id, vnf_id and vnf_ip if success, or ERROR and its reason if not """ database = DatabaseConnection() resp, catalog = database.list_catalog(vnf_pkg_id=vnf_pkg_id) if resp != OK: jsonify({'status': ERROR, 'reason': 'VNF Package not found!'}) dir_id = catalog[0]['dir_id'] vnfd_name = catalog[0]['vnfd_name'] vnf_type = catalog[0]['vnf_type'] vnfd_path = 'repository/%s/vnfd.json' % dir_id with open(vnfd_path) as vnfd_file: vnfd_data = vnfd_file.read() vnfd_data = json.loads(vnfd_data) vnfd_data['vnfd']['name'] = vnfd_name if vnf_type == CLICK_VNF: function_path = 'repository/%s/vnf.click' % dir_id with open(function_path) as function_file: function_data = function_file.read() response = tacker_agent.vnf_create(vnfd_data, unique_id(), function_data) else: response = tacker_agent.vnf_create(vnfd_data, unique_id()) if response['status'] != OK: return jsonify(response) db_res, data = database.insert_vnf_instance(vnf_pkg_id, response['vnfd_id'], response['vnf_id']) # Rollback actions if database inserting fails if db_res != OK: error_message = 'Database error: %s' % data logger.error("Executing rollback actions...\n%s", error_message) resp_delete = tacker_agent.vnf_delete(response['vnf_id']) if resp_delete == OK: logger.info("Rollback done!") else: error_message.join([' ', resp_delete['reason']]) logger.error(error_message) return jsonify({'status': ERROR, 'reason': error_message}) # return instantiated vnf data return jsonify(response)
def destroy_vnffg(vnffg_id): """Destroy the VNFFG and its VNFs This function destroys the VNFFG and its VNFFGDs, and also all the VNFs and its VNFDs that are specified in the VNFFG :param vnffg_id: the NFVO unique identifier of the VNFFG :return: OK if succeed, or ERROR and its reason if not """ database = DatabaseConnection() state, data = tacker_agent.vnffg_show(vnffg_id) if state != OK: return jsonify({'status': state, 'reason': data}) vnffgd_id = data['vnffgd_id'] vnf_mapping = data['vnf_mapping'] vnffg_vnfs = [] for vnf_id in vnf_mapping.values(): vnffg_vnfs.append(vnf_id) # destroying VNFFG resp, data = tacker_agent.vnffg_delete(vnffg_id) if state != OK: return jsonify({'status': resp, 'reason': data}) # remove SFC_Instance from database resp, sfc_instance = database.list_sfc_instances(vnffg_id=vnffg_id) if resp == OK: if len(sfc_instance) > 0: _id = sfc_instance[0]['_id'] database.remove_sfc_instance(_id) # TODO: How many time we should wait before remove the VNFFGD? time.sleep(2) # destroying VNFFGD resp, data = tacker_agent.vnffgd_delete(vnffgd_id) if state != OK: return jsonify({'status': resp, 'reason': data}) # destroying VNFs message = '' for vnf_id in vnffg_vnfs: vnf_data = destroy_vnf(vnf_id) # returns a flask.Response object vnf_data = json.loads(vnf_data.response[0]) # parsing from string do json if vnf_data['status'] != OK: message = ' '.join([message, '\nVNF id %s: ' % vnf_id, vnf_data['reason']]) if message: return jsonify({'status': ERROR, 'reason': message}) return jsonify({'status': OK})
def create_bridge(self, tg_group_id, irc_server_description, channel, token, validated=False, save_to_db=False, only_add_channel=False): irc_connection = self.irc_connections.get_or_create_irc_server_by_description(irc_server_description) irc_channel = irc_connection.join_channel(channel, only_add=only_add_channel) bridge = Bridge(irc_channel, tg_group_id, validated, self, token) self.bridges[tg_group_id] = bridge if save_to_db: dbc = DatabaseConnection() dbc.add_bridge(bridge) return True
def __init__(self, irc_channel, tg_group_id, validated, tg_bot, token): self.irc_channel = irc_channel self.irc_channel.bridge = self self.tg_group_id = tg_group_id self.validated = validated self.tg_bot = tg_bot self.token = token self.secondary_bridges = {} dbc = DatabaseConnection() dbc.get_all_secondary_bridges(self)
def create_app(config_name): """FlaskAPI provides an implementation of browsable APIs""" app = FlaskAPI(__name__) APP_ROOT = os.path.dirname(app.instance_path) app.config.from_object(application_config[config_name]) app.config.from_pyfile(APP_ROOT + '/config.py') database_connection = DatabaseConnection() database_connection.create_tables() return app
def get_or_create_irc_server_by_description(self, description): if description in self.connections: return self.connections[description] dbc = DatabaseConnection() irc_server = dbc.get_irc_server_by_descripton(description) irc_connection = IRCConnections.IRCConnection(irc_server, self._vectronconfig) self.connections[irc_server.description] = irc_connection irc_connection.start() logging.debug('New IRC Connection to %s', irc_server.description) return irc_connection
def include_package(): """ Includes a VNF Package in the local catalog and in the file repository :return: an 'OK' JSON status """ # TODO: consider also upload the VNF Image to the Openstack Tacker database = DatabaseConnection() vnfd = request.json['vnfd'] # vnf = request.json['vnf'] descriptor = json.loads(request.json['descriptor']) category = descriptor['category'] vnf_type = descriptor['type'] vnf_description = descriptor['description'] vnfd_status, data = vnfd_json_yaml_parser(vnfd) if vnfd_status == OK: vnfd = data else: return jsonify({'status': vnfd_status, 'reason': data}) vnfd_name = vnfd['vnfd']['name'] if vnf_type not in [CLICK_VNF, GENERAL_VNF]: return jsonify({'status': ERROR, 'reason': 'VNF Package unknown type: %s' % vnf_type}) dir_id = unique_id() dir_name = 'repository/' + dir_id os.makedirs(dir_name) vnfd = json.dumps(vnfd, indent=2, sort_keys=True) vnfd_file = open(dir_name + '/vnfd.json', 'w') vnfd_file.write(vnfd) vnfd_file.close() if vnf_type == CLICK_VNF: vnf = request.json['vnf'] vnf_file = open(dir_name + '/vnf.click', 'w') vnf_file.write(vnf) vnf_file.close() # TODO: needs to add a function name result = database.insert_vnf_package(category, vnfd_name, dir_id, vnf_type, vnf_description) if result == OK: return jsonify({'status': OK}) else: return jsonify({'status': result[0], 'reason': result[1]})
def delete_bridge(self, bot, update, user_data): if update.message.text == 'yes': if self.bridge_exists(update.message.chat.id): dbc = DatabaseConnection() bridge = self.bridges[update.message.chat_id] dbc.delete_bridge(bridge) bridge.irc_channel.irc_server.irc_connection_thread.disconnect_channel( bridge.irc_channel.channel) del self.bridges[update.message.chat_id] update.message.reply_text('Done! The bridge is deleted.') return ConversationHandler.END elif self.secondary_bridge_exists(update.message.chat.id): dbc = DatabaseConnection() bridge = self.get_bridge_by_id(update.message.chat.id) secondary_bridge = bridge.secondary_bridges[ update.message.chat.id] bridge.remove_secondary_bridge(secondary_bridge) dbc.delete_secondary_bridge(secondary_bridge, commit=True) update.message.reply_text('Done! The bridge is deleted.') return ConversationHandler.END elif update.message.text == 'no': update.message.reply_text('No? Ok...') return ConversationHandler.END else: update.message.reply_text('Error. This code should be unreachable') return ConversationHandler.END
def start_tg_bot(): parser = argparse.ArgumentParser(description='TG / IRC bridge bot') parser.add_argument('--config', metavar='path', type=str, required=True, help='path to config file') parser.add_argument('--debug', dest='debug', action='store_true', required=False, default=False, help='Activate debugging output') cliargs = parser.parse_args() if cliargs.debug: logger.setLevel(logging.DEBUG) logger.info('Loglevel set to DEBUG') vectronconfig = load_from_file(cliargs.config) tg_bot = TGBot(vectronconfig) dbc = DatabaseConnection() dbc.create_tables() # irc_server = IRCServer('Freenode', 'chat.freenode.net') # dbc.add_irc_server(irc_server) # irc_server = IRCServer('Quakenet', 'irc.quakenet.org', port=6667, ssl=False) # dbc.add_irc_server(irc_server) # irc_server = IRCServer('IRCNet', 'open.ircnet.org', port=6666) # dbc.add_irc_server(irc_server) # irc_server = IRCServer('Undernet', 'eu.undernet.org', port=6667) # dbc.add_irc_server(irc_server) bridges = dbc.get_all_bridges() for bridge in bridges: tg_bot.create_bridge(bridge['tg_id'], bridge['irc_server_description'], bridge['irc_channel'], bridge['token'], bridge['validated'], only_add_channel=True) # delete old files if the data retention is > 0 if vectronconfig['webserver_data_retention'] > 0: cron_delete = CronDelete(vectronconfig) cron_delete.start() tg_bot.start_webhook()
class Tasks: def __init__(self): self.db = DatabaseConnection() # self.db.drop_tables() # tasks = [] def get_tasks(self): return self.db.get_db_tasks() def add_task(self, task): self.db.create_task(task) # tasks.append(task) def clear_all(self): self.db.drop_tables()
def start(self, telegramBot, update): if not update.message.chat.type == 'group': telegramBot.sendMessage(chat_id=update.message.chat_id, text='You can only use me in a group!') return ConversationHandler.END if update.message.chat.get_member( update.message.from_user.id).status != ChatMember.CREATOR: telegramBot.sendMessage( chat_id=update.message.chat_id, text="Only the creator is allowed to configure me!") return ConversationHandler.END if self.bridge_exists( update.message.chat.id) or self.secondary_bridge_exists( update.message.chat.id): telegramBot.sendMessage( chat_id=update.message.chat_id, text="This Group is already part of a bridge. Abort!") return ConversationHandler.END servers = DatabaseConnection().get_all_irc_server_descriptions() update.message.reply_text( 'Hi! I\'m BridgeBot. If you can, you should host your own BridgeBot. This way you can ' + 'change the config for e.g. preventing files from being deleted. You can get the code on github: https://github.com/PostalBlab/VectronBot. ' + 'If you want to use this Bot, i need some informations. You can cancel this dialogue with /cancel ' + 'Here is a list of all public supported irc servers. Please tell me which one you want to use. ' 'If your prefered server is not in the list you can contact the maintainer @PostalDude. He can also add hidden servers where only you know ' 'the needed description:\n' + '\n'.join('- {}'.format(k) for k in servers)) return self.CHOOSING_SERVER
def main(): # coin_name_array = util.read_subs_from_file(general["subreddit_file"]) # coin_name_array = util.read_subs_from_file(general["binance_file"]) coin_name_array = util.read_subs_from_file(general["poloniex_file"]) auth = util.get_postgres_auth() db = DatabaseConnection(**auth) # all_subreddits = db.get_all_subreddits() all_subreddits = [coin[-1] for coin in coin_name_array] start_time = datetime.datetime.utcnow() - datetime.timedelta(hours=12) # end_time = datetime.datetime.utcnow() - datetime.timedelta(hours=24) end_time = datetime.datetime.utcnow() # growths = percentage_price_growths(db, all_subreddits, start_time, end_time) growths = average_growth(db, all_subreddits, start_time, end_time) print(growths) # covariance(db, all_subreddits) db.close()
def main(): parser = argparse.ArgumentParser(description="Simple Query") parser.add_argument('hours', metavar='h', type=float) args = parser.parse_args() auth = util.get_postgres_auth() db = DatabaseConnection(**auth) cur_utc = datetime.datetime.utcnow() subreddits = db.get_subreddits_with_data(cur_utc - datetime.timedelta( hours=args.hours)) growths = get_growths(db, subreddits, datetime.datetime.utcnow(), args.hours) for g in growths: print(g) db.close()
def __init__(self, conffile = None): self._db = DatabaseConnection(conffile) # self check ver = self._db.query('SELECT VERSION()')[0][0] satt_log('Connected to database: MySQL version {0}'.format(ver)) self._rating_methods = RatingMethod(self._db.query)
def rounds(): '''Show rounds played on the server''' start = flask.request.args.get("start") end = flask.request.args.get("end") if not start or not end: start = datetime.datetime.now() - datetime.timedelta(days=7) end = datetime.datetime.now() else: start = datetime.datetime.fromtimestamp(start) end = datetime.datetime.fromtimestamp(end) db = DatabaseConnection(app.config["DB_PATH"]) rounds = db.roundsBetweenDates(start, end) return flask.render_template("rounds.html", rounds=rounds)
class TaskhiveCategories(QObject): def __init__(self): QObject.__init__(self) self.db = DatabaseConnection() @pyqtSlot(result=QVariant) def getCategories(self): categories = self.db.getCategories() return categories
def __init__(self): BenchmarkReport.__init__(self) # use this to print out what is happening self._stdout = StdoutReporter() self.run_id = int(time()) self.tool_params = '{0}'.format(configs.configs['params']) # replace apostrophes in tool_params self.tool_params = self.tool_params.replace('\'', '\\\'') from database import DatabaseConnection self._db = DatabaseConnection() ver = self._db.query('SELECT VERSION()')[0][0] satt_log('Connected to database: MySQL version {0}'.format(ver)) self._rating_methods = RatingMethod(self._db.query)
def get_vnf_package(vnf_id): """Retrieves a VNF Package stored in Catalog from a given NFVO VNF ID :param vnf_id: :return: """ database = DatabaseConnection() res, data = database.list_vnf_instances(vnf_id=vnf_id) if res == OK: if data: return jsonify({'status': OK, 'package': data[0]}) else: return jsonify({'status': ERROR, 'reason': 'VNF Package not found in Catalog!'}) # if happens a database error return jsonify({'status': ERROR, 'reason': data})
def __init__(self): # Create the QApplication first so that QThread, QTimer, etc. can be used self.qt_app = QApplication() self.qt_app.aboutToQuit.connect(self.quit) self.database_connection = DatabaseConnection(self) self.user_interface = UserInterface(self) self.qt_app.exec_()
def maps(): '''Show an overview of maps''' db = DatabaseConnection(app.config["DB_PATH"]) start = datetime.datetime.now() - datetime.timedelta(days=4000) end = datetime.datetime.now() rounds = db.roundsBetweenDates(start, end) distinctMaps = db.distinctMaps() maps = [] for mapName in [tupel[0] for tupel in distinctMaps]: roundsWithMap = list(filter(lambda r: r.mapName == mapName, rounds)) maps += [MapSummary.MapSummary(roundsWithMap)] allMaps = MapSummary.MapSummary(rounds) allMaps.mapName = "All Maps*" maps += [allMaps] mapsFiltered = filter(lambda x: x.mapName, maps) return flask.render_template("maps.html", maps=mapsFiltered)
def choosing_server(self, bot, update, user_data): user_data['tg_group_id'] = update.message.chat.id servers = DatabaseConnection().get_all_irc_server_descriptions(False) logger.debug(servers) if update.message.text in servers: user_data['irc_server_description'] = update.message.text update.message.reply_text( '%s it is! To which channel should i connect? e.g. #telegram' % update.message.text) return self.CHOOSING_CHANNEL else: update.message.reply_text('Invalid Server. Try again!') return self.CHOOSING_SERVER
class PropertyService(): def __init__(self): self.db = DatabaseConnection() self.collection = "users" def isUserVendor(self, username): user = self.db.findOne(self.collection, {"username": username}) if (user['accountType'] == 'vendor'): return True else: return False
def main(): global database # Read area type definition from JSON file area_type_groups = read_area_types() # Connect to database print( f"Connecting to database \"{DATABASE_NAME}\" at \"{DATABASE_HOST}\" as user \"{DATABASE_USER}\"..." ) database = DatabaseConnection(host=DATABASE_HOST, database=DATABASE_NAME, user=DATABASE_USER, password=DATABASE_PASSWORD) print("Successfully connected") start_time = time.perf_counter() # Iterate over all area type groups for area_type_group in area_type_groups: # Get group name group_name = str(area_type_group[JSON_KEY_GROUP_NAME]) # Get area types that belong to this group group_area_types = area_type_group[JSON_KEY_GROUP_TYPES] print(f"Next group: \"{group_name}\"") # Iterate over all area types of this group and invoke extraction for area_type in group_area_types: extract_area_type(area_type) print(f"Finished group \"{group_name}\"") elapsed_time = time.perf_counter() - start_time print( f"Simplification finished. Everything done. Took {elapsed_time:0.4} seconds" ) database.disconnect()
def __init__(self): BenchmarkReport.__init__(self) # use this to print out what is happening self._stdout = StdoutReporter() self.run_id = int(time()) self.tool_params = '{0}'.format(configs.configs['params']) # replace apostrophes in tool_params self.tool_params = self.tool_params.replace('\'', '\\\'') from database import DatabaseConnection self._db = DatabaseConnection() ver = self._db.query('SELECT VERSION()')[0][0] satt_log('Connected to database: MySQL version {0}'.format(ver)) self._rating_methods = RatingMethod(self._db.query)
class MysqlReporter(BenchmarkReport): def __init__(self): BenchmarkReport.__init__(self) # use this to print out what is happening self._stdout = StdoutReporter() self.run_id = int(time()) self.tool_params = '{0}'.format(configs.configs['params']) # replace apostrophes in tool_params self.tool_params = self.tool_params.replace('\'', '\\\'') from database import DatabaseConnection self._db = DatabaseConnection() ver = self._db.query('SELECT VERSION()')[0][0] satt_log('Connected to database: MySQL version {0}'.format(ver)) self._rating_methods = RatingMethod(self._db.query) def progress(self, progress): # we must redirect progress to stdout self._stdout.progress(progress) def _commit(self): self._db.commit() def _updateDb(self, rb): def choose_tag(): if configs.configs.has_key('tool-tag'): return configs.configs['tool-tag'] else: return configs.configs['tool'] ver = rb.versions.strip() q = """ SELECT id FROM years WHERE year = '{0}'; """.format(configs.configs['year']); res = self._db.query(q) if not res: err('Do not have year {0}. If this is not typo, ' 'update the database and benchmarks'.format(configs.configs['year'])) year_id = res[0][0] # If tool that runs in this run is not known to database, add it q = """ SELECT id FROM tools WHERE name = '{0}' and version = '{1}' and params = '{2}' and year_id = '{3}'; """.format(configs.configs['tool'], ver, self.tool_params, year_id) res = self._db.query(q) if not res: q2 = """ INSERT INTO tools (name, year_id, version, params, tag, note) VALUES('{0}', '{1}', '{2}', '{3}', '{4}', {5}); """.format(configs.configs['tool'], year_id, ver, self.tool_params, choose_tag(), Empty2Null(configs.configs['note'])) self._db.query(q2) # get new tool_id res = self._db.query(q) assert len(res) == 1 tool_id = res[0][0] return tool_id, year_id def save_task(self, rb, cat_id): """ Save unknown task into the database """ name = get_name(rb.name) cr = self._get_correct_result(name, rb) if cr is None: msg = 'Couldn\'t infer if the result is correct or not, setting unkown' satt_log(msg) rb.output += msg rb.result = 'unknown ({0})'.format(rb.result) # create new task q = """ INSERT INTO tasks (name, category_id, correct_result, property) VALUES('{0}', '{1}', '{2}', '{3}'); """.format(name, cat_id, cr, None) self._db.query(q) q = """ SELECT id, correct_result FROM tasks WHERE name = '{0}' and category_id = '{1}'; """.format(name, cat_id) return self._db.query(q) def update_category(self, year_id, name): """ Create new category in the database """ # create the a category in the database q = """ INSERT INTO categories (year_id, name) VALUES ('{0}', '{1}'); """.format(year_id, name) self._db.query(q) # return the new result q = """ SELECT id, name FROM categories WHERE year_id = '{0}' and name = '{1}'; """.format(year_id, name) return self._db.query(q) def done(self, rb): # print it after saving if not self._stdout.done(rb): # if there is a problem, the benchmark will run again, so do not # proceed further return False tool_id, year_id = self._updateDb(rb) q = """ SELECT id, name FROM categories WHERE year_id = '{0}' and name = '{1}'; """.format(year_id, rb.category) res = self._db.query(q) if not res: if configs.configs['save-new-tasks'] == 'yes': res = self.update_category(year_id, rb.category) else: rb.dumpToFile('Do not have given category') satt_log('^^ dumped to file (unknown category)') return True assert len(res) == 1 if not len(res[0]) == 2: print(res[0]) assert len(res[0]) == 2 cat_id = res[0][0] cat_name = res[0][1] q = """ SELECT id, correct_result FROM tasks WHERE name = '{0}' and category_id = '{1}'; """.format(get_name(rb.name), cat_id) res = self._db.query(q) # we do not have such a task?? if not res: if configs.configs['save-new-tasks'] == 'yes': res = self.save_task(rb, cat_id) else: rb.dumpToFile('Do not have given task') satt_log('^^ dumped to file (unknown task)') return True assert len(res) == 1 task_id = res[0][0] correct_result = res[0][1] # replace ' by \' in output rb.output = rb.output.replace('\'', '\\\'') ic = is_correct(correct_result, rb.result) result= rb.result.lower() if rb.witness != '': wtns = rb.witness.strip() # replace ' even in witness, because it can contain # arbitrary text wtns = wtns.replace('\'', '\\\'') else: wtns = None if rb.witness_output != '': # FIXME we should limit the wintess_output size, otherwise we # get get some performance issues rb.witness_output = rb.witness_output.strip() rb.witness_output = rb.witness_output.replace('\'', '\\\'') q = """ INSERT INTO task_results (tool_id, task_id, result, witness, is_correct, points, cpu_time, memory_usage, output, witness_output, run_id) VALUES('{0}', '{1}', '{2}', '{3}', '{4}', '{5}', '{6}', '{7}', {8}, '{9}', '{10}') """.format(tool_id, task_id, result, wtns, ic, self._rating_methods.points(ic, rb.result, wtns, cat_name), None2Zero(rb.time), None2Zero(rb.memory), Empty2Null(rb.output), rb.witness_output, self.run_id) def _exception_handler(args, data): q, tool_id, task_id = data if (args[1].startswith('Duplicate entry')): if configs.configs['ignore-duplicates'] == 'yes': satt_log('Already has this result for this tool, ignoring.') else: err('Already has result of this benchmark for this tool.\n' 'It is only supported to have one result for each ' 'benchmark and particular tool\n' 'If want ignore this behaviour use --ignore-duplicates.\n' '(tool + version + params). You can delete the old result:\n' ' $ ./db-cli \'DELETE from task_results WHERE tool_id={0}' ' and task_id={1}\'\n' 'or you can delete all results for this tool:\n' ' $ ./db-cli \'DELETE from tools WHERE id={0}\'\n' .format(tool_id, task_id, tool_id)) else: err('Failed querying db: {0}\n\n{1}'.format(args[1], q)) self._db.query_with_exception_handler(q, _exception_handler, (q, tool_id, task_id)) self._commit() return True def sendEmail(self, server, from_addr, to_addrs): import smtplib from email.mime.text import MIMEText time_format = '%Y-%m-%d-%H-%S' raw_started_at = strptime(configs.configs['started_at'], time_format) started_at = strftime('%a %b %d %H:%M:%S %Y', raw_started_at) finished_at = strftime('%a %b %d %H:%M:%S %Y') text = """ This is automatically generated message. Do not answer. ======================================================= Satt on tool {0} started at {1}, finished {2} with parameters: {3} on benchmarks from year {4} Note: {5} Results: """.format(configs.configs['tool'], started_at, finished_at, configs.configs['params'], configs.configs['year'], configs.configs['note']) q = """ SELECT result, is_correct, witness, count(*) FROM task_results WHERE run_id = {0} GROUP BY result, is_correct, witness""".format(self.run_id) res = self._db.query(q) if not res: err('No results stored to db after this run?') total = 0 for row in res: result = row[0] if result == 'true' or result == 'false': if row[1] == 0: result += ' incorrect' else: result += ' correct' if not row[2] is None: text += '{0:<15} (witness {1}): {2}\n'.format(result, row[2], row[3]) else: text += '{0:<15}: {1}\n'.format(result, row[3]) total += row[3] text += '\nTotal number of benchmarks: {0}'.format(total) q = """SELECT tool_id FROM task_results WHERE run_id = {0}""".format(self.run_id) res = self._db.query(q) if not res: err('Failed querying db for tool\'s id') tool_id = res[0][0] text += '\n\nYou can check the results here:\n' text += 'http://macdui.fi.muni.cz:3000/tools/{0}'.format(tool_id) text += '\n\nHave a nice day!\n' msg = MIMEText(text) msg['Subject'] = 'Satt results from {0}'.format(started_at) msg['From'] = from_addr msg['To'] = '*****@*****.**' s = smtplib.SMTP(server) ret = s.sendmail(from_addr, to_addrs, msg.as_string()) s.quit() for r in ret: dbg('Failed sending e-mail to {0},' 'err: {1}'.format(r[0], r[1]))
def main(seed_url=None, max_pages=settings.DEFAULT_MAX_PAGES): """ Instantiate the provided seed and start scraping starting at its url. Stop scraping when max pages has been hit. """ if(seed_url is not None): urls_to_be_scraped.append(seed_url) for url in urls_to_be_scraped: if Webpage.instanceID > maxpages: break url = Webpage(url=seed_url) url.page_robot_scannable() if url.need_to_be_scraped is True: url.get_page() url.parse_parge() url.inverted_index_page() url.set_page_scraped() else: url.set_page_scraped() sleep(settings.REQUEST_TIME_INCREMENT) #Eventually look into a matrix math library for pagerank calculations dictionary_of_outgoing_links = {} for item in scraped_urls: if item.pagehtml: dictionary_of_outgoing_links[item.url] = item.pagelinks page_rank_dictionary = page_rank(outgoing_links_to_pagerank(dictionary_of_outgoing_links), settings.PAGERANK_ITERATIONS) dbconnection = DatabaseConnection() dbconnection.connect() for item in scraped_urls: #REFACTOR look into doing a mass insert instead of looping per url. if item.pagehtml: to_insert = {'title':item.title, 'url':item.url, 'pagetext':item.pagetext, 'pagerank':page_rank_dictionary[item.url], 'date':datetime.datetime.now()} dbconnection.load_document(to_insert) dbconnection.insert_document(scrape_data) for word in inverted_index: to_insert = {'word':word, 'date':datetime.date.today(), 'offsets':'BLAH'} dbconnection.load_document(to_insert) dbconnection.insert_document('inverted_index')
class DatabaseProxy(object): def __init__(self, conffile = None): self._db = DatabaseConnection(conffile) # self check ver = self._db.query('SELECT VERSION()')[0][0] satt_log('Connected to database: MySQL version {0}'.format(ver)) self._rating_methods = RatingMethod(self._db.query) def connection(self): return self._db def commit(self): self._db.commit() def getYearID(self, year): q = """ SELECT id FROM years WHERE year = '{0}'; """.format(year); res = self._db.query(q) if not res: return None return res[0][0] def getToolID(self, tool, version, tool_params, year_id): q = """ SELECT id FROM tools WHERE name = '{0}' and version = '{1}' and params = '{2}' and year_id = '{3}'; """.format(tool, version, tool_params, year_id) res = self._db.query(q) if not res: return None assert len(res) == 1 return res[0][0] def getCategoryID(self, year_id, category_name): q = """ SELECT id FROM categories WHERE year_id = '{0}' and name = '{1}'; """.format(year_id, category_name) res = self._db.query(q) if not res: return None return res[0][0] def getTaskID(self, category_id, name): q = """ SELECT id FROM tasks WHERE name = '{0}' and category_id = '{1}'; """.format(get_name(name), category_id) res = self._db.query(q) if not res: return None return res[0][0] def getTaskWithCorrectResult(self, category_id, name): q = """ SELECT id, correct_result FROM tasks WHERE name = '{0}' and category_id = '{1}'; """.format(get_name(rb.name), cat_id) res = self._db.query(q) if not res: return None return (res[0][0], res[0][1]) def hasTaskResult(self, task_id, tool_id): q = """ SELECT count(*) FROM task_results WHERE task_id = '{0}' and tool_id = '{1}'; """.format(task_id, tool_id) res = self._db.query(q) if not res: return False return res[0][0] != 0