def do_push(self): logging.info('Starting push process') cursor = self._conn.cursor() since = (utils.read_settings(self._conn, 'last_update') .get('last_update', 0)) cursor.execute('SELECT * FROM files WHERE server_id IS NULL') add_files = cursor.fetchall() cursor.execute('SELECT * FROM deleted WHERE del_time>?', [since]) rem_files = cursor.fetchall() logging.info('Notifying server %d new files and %d old ones' % (len(add_files), len(rem_files))) for record in add_files: response = self.communicate({'ACTION': 'PUSH', 'TYPE': 'NEW'}) cursor.execute('UPDATE files SET server_id=? WHERE id=?', [int(response['ID']), record['id']]) utils.push_file(record['path'], self._socket, hash_code=record['hash'].decode('hex')) # Read off the server's reply, since this didn't go through the # communicate method we have to do it ourselves. next(self._responses) for record in rem_files: response = self.communicate({'ACTION': 'PUSH', 'TYPE': 'DELETE', 'ID': record['server_id']}) logging.info('...finished push process')
def do_pull(self): logging.info('Starting pull process') since = (utils.read_settings(self._conn, 'last_update') .get('last_update', 0)) cursor = self._conn.cursor() response = self.communicate({'ACTION': 'PULL', 'SINCE': since}) to_recv = parse.listify(parse.loads(response['CHANGES'])) logging.info('Adding %d new files from server.' % len(to_recv)) for x in to_recv: from_serv = parse.loads(x) sid = int(from_serv['ID']) logging.debug('Proccessing file update. SID: %d, type: %s' % (sid, from_serv['type'])) if from_serv['type'] == 'NEW': cursor.execute('SELECT 1 FROM files WHERE server_id=?', [sid]) if cursor.fetchone(): logging.warning('Server returned a file I already have, ' 'ignoring and continuing pull process.') continue file_path, file_hash = self.pull_remote(sid) fd = open(file_path, 'rb') our_hash = utils.hash_file(fd) if our_hash.digest() != file_hash: raise Exception('MD5 digests did not match! Transmission ' 'error suspected.') it_path = self.add_to_itunes(file_path) os.remove(file_path) record = utils.generate_file_info(it_path) record['server_id'] = sid utils.insert_file_record(record, self._conn) logging.debug('Successfuly added file: %s' % (os.path.split(it_path)[-1],)) elif from_serv['type'] == 'DELETE': cursor.execute('SELECT * FROM files WHERE server_id=?', [sid]) record = cursor.fetchone() if not record: logging.warning('Server sent delete directive on file I ' 'don\'t have. Ignoring.') continue self.remove_from_itunes(sid) cursor.execute('DELETE FROM files WHERE server_id=?', [sid]) self._conn.commit() logging.info('...finished pull process')
def push_command(self, command, session): cursor = self._conn.cursor() if command['TYPE'] == 'NEW': cursor.execute('INSERT INTO files (received) VALUES (?)', [time.time()]) sid = cursor.lastrowid resp = parse.dumps({'ACTION': 'HSUP', 'ID': sid, 'DONE': 0}) logging.debug('CONT -> %s' % resp) self._send(resp + '\n') file_path = os.path.join( utils.read_settings(self._conn, 'storage_dir')['storage_dir'], '%d.mp3' % sid) digest = utils.pull_file(file_path, self._socket) our_digest = utils.hash_file(open(file_path, 'rb')).digest() if our_digest != digest: cursor.execute('DELETE FROM files WHERE id=?', [sid]) resp = {'ACTION': 'ERROR', 'REASON': 'Hash mismatch, record revoked, retransmit'} self._conn.commit() return resp, session cursor.execute('UPDATE files SET path=?, hash=? WHERE id=?', [file_path, digest.encode('hex'), sid]) self._conn.commit() resp = {'ACTION': 'HSUP', 'DONE': 1} return resp, session elif command['TYPE'] == 'DELETE': sid = int(command['ID']) cursor.execute( 'INSERT INTO deleted (file_id, del_time) VALUES (?, ?)', [sid, time.time()]) cursor.execute('DELETE FROM files WHERE id=?', [sid]) resp = {'ACTION': 'HSUP', 'DONE': 1} return resp, session else: resp = {'ACTION': 'ERROR', 'REASON': 'Unknown PUSH type: %s' % command['TYPE']} return resp, session
def walk_parses(function, data_type=DataType.TRAIN): settings = utils.read_settings() path = settings.get('paths', 'dataParsed') leave_out_dirs = [] if data_type == DataType.TRAIN: leave_out_dirs = [ os.path.join(path, dir_nb) for dir_nb in ('22', '23', '24') ] for r, ds, fs in os.walk(path): if r in leave_out_dirs: continue print r ds.sort() fs.sort() for f in fs: f_path = os.path.join(r, f) with open(f_path, 'r') as f: for l in f: t = Tree.from_string(l) function(t)
""" Simple script to simulate statistics sending of Minecraft mod """ import requests import json from random import randint from utils import read_settings settings = read_settings() url = "http://{}:{}/upload_json/".format(settings["server"]["address"], settings["server"]["port"]) headers = {"Content-type": "application/json", "Accept": "text/plain"} f = open("stats.json", 'r') stats = json.loads(f.read()) data = { "session_id": 1234, "world": "Volcano_TEST", "round": randint(1,10), "player": randint(1,20), "checkpoints": "test", "position_over_time": "test", "solution": "solution 1", "score": 100, "stats": json.dumps(stats) } r = requests.post(url, data=json.dumps(data), headers=headers)
import numpy as np import utils from FPI_2P import FPI_2P_Stgy import matplotlib.pyplot as plt import contraction plt.figure(figsize=(utils.xy_x, utils.xy_x)) settings = utils.read_settings() # run the FPI fpi = FPI_2P_Stgy([settings.str_row_init_l[0], settings.str_col_init_l[0]], [settings.payoff_matrix_row, settings.payoff_matrix_col]) if settings.display_game: fpi.print_game() fpi.converge(settings.rate, settings.iters) FPI_2P_Stgy.display_eqpt(fpi.eqpt()) if settings.use_LH: fpi.lemke_howson() stats = fpi.get_stats() str_row_l = stats['str_row_l'] str_col_l = stats['str_col_l'] d_l = [] for i in range(1, settings.iters): tuple_i = (str_row_l[i], str_col_l[i]) tuple_i_minus_1 = (str_row_l[i - 1], str_col_l[i - 1]) d_l.append(contraction.distance_function_1(tuple_i_minus_1, tuple_i)) q_l = [] for i in range(1, len(d_l)): q_l.append(d_l[i] / d_l[i - 1])
from sqlalchemy import func, desc import re import time from random import randint from threading import Thread, Lock # for py2exe to correctly import jinja2 import jinja2.ext from database import db_session from models import Session, Test, Stat, Item, Connection, Player import utils # We load the general settings settings = utils.read_settings() app = Flask(__name__) app.secret_key = settings["flask"]["secret_key"] app.debug = settings["flask"]["debug"] app.ext = settings["flask"]["ext"] # Lock used in the identification phase app.lock_players = Lock() # Lock used in the connection phase app.lock_connections = Lock() ######################################### # Players identification and connection #
def admin_toggle_enable_emails(): settings = read_settings() settings["emails_enabled"] = (settings.get("emails_enabled", "False") != "True") write_settings(settings) return redirect(url_for("admin_index"), code=302)
def admin_index(): emails_enabled = (read_settings().get("emails_enabled", "False") == "True") return render_template("admin.html", emails_enabled=emails_enabled, user_groups=get_current_user_groups())
def reconcile(path, conn): """ Given a path and a connection to a DB, alter the DB to reflect the present structure of the directory. """ cursor = conn.cursor() added, removed = scan(path, conn) for path in added: logging.info("Adding file: %s" % path) utils.arrow(path, (utils.generate_file_info), (utils.insert_file_record, conn)) for record in removed: logging.info("Recording as gone: %s" % path) cursor.execute(('INSERT INTO deleted (del_time, server_id, path) ' 'VALUES (?, ?, ?)'), [time.time(), record['server_id'], record['path']]) cursor.execute('DELETE FROM files WHERE id=?', [record['id']]) conn.commit() cursor.close() if __name__ == '__main__': conn = utils.get_client_connection('yasaclient.db') lib_dir = utils.read_settings(conn, 'lib_dir').get('lib_dir') reconcile(lib_dir, conn)
""" Creates the necessary variables for database interaction Database settings are defined in settings.yml """ from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.ext.declarative import declarative_base from utils import read_settings db = read_settings()["database"] engine = create_engine(db, convert_unicode=True) Base = declarative_base(engine) db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) Base.query = db_session.query_property()