def get_good_etalons(weights, tr_map, sim_map): wg = open(weights, 'r').read() good_wgs = [] for l in wg.split("\n"): if l == "": break n1, n2, w = l.split("\t") w = re.sub('\s.*', '', w) if n1 in tr_map.keys() and n2 in tr_map.keys(): tr1 = tr_map[n1].decode('utf8') tr2 = tr_map[n2].decode('utf8') n1_d = util.digest(tr1) n2_d = util.digest(tr2) if n1_d == n2_d: continue if n1_d not in sim_map: print u"not found '%s' at sim_map" % tr1 continue if n2_d not in sim_map[n1_d]: print u"not found '%s' at sim_map" % tr2 continue dev = str(float(w) - float(sim_map[n1_d][n2_d])) approx = str(sim_map[n1_d][n2_d]) good_wgs.append( (n1, tr1, n2, tr2, w, approx, dev.replace('.', ','))) print "Etalon pairs availale: %s" % len(good_wgs) return good_wgs
def process_game(self, game): tmp = tempfile.mkdtemp() build_dir = os.path.join(tmp, 'build') dist = os.path.join(tmp, 'dist.zip') c = self.db.cursor() c.execute( 'SELECT revision, sha256, title FROM game WHERE id = ? ORDER BY revision DESC LIMIT 1', (game.uid, )) revision, prev_sha256, prev_title = c.fetchone() or (1, None, None) os.mkdir(build_dir) shutil.move(game.content_path, os.path.join(build_dir, 'content')) sha256 = create_torrentzip(game.uid, game.platform, build_dir, dist) outfile = os.path.join(DIST_DIR, f'{game.uid}.zip') shutil.move(dist, outfile) if prev_sha256: if prev_sha256 == sha256: pcolor('green', 'no change') return revision += 1 for r, _, f in os.walk(build_dir): for fname in f: path = os.path.join(r, fname) rel = os.path.relpath(path, build_dir).replace(os.path.sep, '/') crc = util.crc32(path) md5 = util.digest(path, hashlib.md5()) sha = util.digest(path, hashlib.sha1()) size = os.stat(path).st_size self.db.execute('INSERT INTO file VALUES (?,?,?,?,?,?)', (sha256, rel, size, crc, md5, sha)) short_sha = sha256.hex()[:6].upper() pcolor('green', f'[rev {revision}: {short_sha}]') if prev_title and prev_title != game.title: pcolor( 'yellow', f'Warning: {game.uid} has been renamed ({prev_title} -> {game.title})' ) try: self.db.execute('INSERT INTO game VALUES (?,?,?,?,?,?)', (game.uid, revision, sha256, game.title, game.platform, self.session)) except sqlite3.IntegrityError as e: pcolor('red', f'Error: {e} when storing {game.title}. Skipped.') return self.db.commit() shutil.rmtree(tmp) if revision == 1: self.cleanup_obsolete(game, sha256)
def get_words_in_scope(translate): if translate is None: return None tr_map = torus.get_translations(translate) words = [] for k in tr_map: words.append(util.digest(tr_map[k].decode('utf8'))) return words
def new_vid2scene_from_config_str(config: str): config = json.loads(config) config = { "provider": "", "kind": "vid", "url": config["url"], "id": digest(config["url"]), "display": False, } return Vid2Scene(config=config)
def create_torrentzip(uid, platform, build_dir, dist_file): content_meta = {'version': 1, 'uniqueId': uid, 'platform': platform} with open(os.path.join(build_dir, 'content.json'), 'w', encoding='utf-8') as f: json.dump(content_meta, f, indent=4) with zipfile.ZipFile(dist_file, 'w') as z: for r, _, f in os.walk(build_dir): for fname in f: path = os.path.join(r, fname) rel = os.path.relpath(path, build_dir).replace(os.path.sep, '/') z.write(path, arcname=rel) subprocess.check_call(['bin/trrntzip', dist_file], stdout=subprocess.DEVNULL) return util.digest(dist_file, hashlib.sha256())
def optim_checksum(self): return util.digest(self.optim.state_dict())
import torus logging.config.fileConfig("logging.conf") settings = {} try: settings = json.load(open('global-settings.json', 'r')) except Exception as e: logging.warn(e) DB_DIR = settings["db_dir"] if "db_dir" in settings else os.environ["MOLVA_DIR"] BLOCKED_NOUNS_LIST = u"\n".join(list(u"абвгдеёжзиклмнопрстуфхцчшщыьъэюя")) BLOCKED_NOUNS = ",".join( map(lambda x: str(util.digest(x)), settings["blocked_nouns"])) BLOCKED_NOUNS_SHORT = ",".join( map(lambda x: str(util.digest(x)), BLOCKED_NOUNS_LIST.split("\n"))) POST_MIN_FREQ = 3 @util.time_logger def build_chains_nouns_replys(cur): logging.info("fill replys") cur.execute("select count(*) from tmp.chains_nouns_all") logging.info("tmp.chains_nouns_all cnt = %s " % cur.fetchone()[0]) cur.execute(""" insert or ignore into tmp.chains_nouns_all select tc.post_id, n1.noun_md5, tc.reply_id, n2.noun_md5, '' from tweet_chains tc
import util from profile import NounProfile logging.config.fileConfig("logging.conf") settings = {} try: settings = json.load(open('global-settings.json', 'r')) except Exception as e: logging.warn(e) DB_DIR = settings["db_dir"] if "db_dir" in settings else os.environ["MOLVA_DIR"] BLOCKED_NOUNS_LIST = u"\n".join(list(u"абвгдеёжзиклмнопрстуфхцчшщыьъэюя")) BLOCKED_NOUNS = ",".join(map( lambda x: str(util.digest(x)), BLOCKED_NOUNS_LIST.split("\n"))) codecs.getwriter('utf8')(sys.stdout) def get_profiles(ind, date): cur = ind.get_db_for_date(date) nouns = stats.get_nouns(cur) logging.info("%s: nouns len %s" % (date, len(nouns))) profiles_dict = stats.setup_noun_profiles(cur, {}, nouns, post_min_freq = 10, blocked_nouns=BLOCKED_NOUNS, nouns_limit = 500) logging.info("%s: profiles len %s" % (date, len(profiles_dict))) return [profiles_dict, nouns] def save_sims(cur, sims):
from flask import Flask, request, jsonify, Response, render_template import json from functools import wraps from trackleaders import get_breaks, get_racer_id, get_racers, get_race_name import util import config import redis import os app = Flask(__name__) BAD_REQUEST = Response(status='405') MAX_NUM_LEN = 20 MAIN_DIGEST = util.digest('static/main.js') # 30 minutes CACHE_DUR = 1800 R = redis.from_url(os.environ.get("REDIS_URL", "redis://localhost:6379/")) def cache_json(view): ''' cache a json response with redis ''' @wraps(view) def decorated_view(*args, **kwargs): # most of the traffic is generated by client side's API call # and url of which are fixed cached = R.get(request.url) if cached is not None: resp = Response(cached, content_type='application/json') else:
from flask import Flask, request, jsonify, Response, render_template import json from functools import wraps from trackleaders import get_breaks, get_racer_id, get_racers, get_race_name import util import config import redis import os app = Flask(__name__) BAD_REQUEST = Response(status='405') MAX_NUM_LEN = 20 MAIN_DIGEST = util.digest('static/main.js') # 30 minutes CACHE_DUR = 1800 R = redis.from_url(os.environ.get("REDIS_URL", "redis://localhost:6379/")) def cache_json(view): ''' cache a json response with redis ''' @wraps(view) def decorated_view(*args, **kwargs): # most of the traffic is generated by client side's API call # and url of which are fixed cached = R.get(request.url) if cached is not None: resp = Response(cached, content_type='application/json') else: resp = view(*args, **kwargs)
def __repr__(self): fmt = 'rand_state: {}, sg_rand_state: {}, init_wavgen_pos: {}, perm_gen_pos: {}\n' return fmt.format(util.digest(self.rand_state), util.digest(self.sg_rand_state), self.init_wavgen_pos, self.perm_gen_pos)