Example #1
0
def start_deezer_keepalive():
    global _keepalive_timer

    test_deezer_login()

    _keepalive_timer = threading.Timer(
        60.0 * config.getint('deezer', 'keepalive'), start_deezer_keepalive)
    _keepalive_timer.start()
Example #2
0
        add_to_playlist: True|False (add to mpd playlist)
        create_zip: True|False (create a zip for the playlist)
    """
    user_input = request.get_json(force=True)
    desc = "Downloading Spotify playlist"
    task = sched.enqueue_task(desc,
                              "download_spotify_playlist_and_queue_and_zip",
                              playlist_name=user_input['playlist_name'],
                              playlist_id=user_input['playlist_url'],
                              add_to_playlist=user_input['add_to_playlist'],
                              create_zip=user_input['create_zip'])
    return jsonify({
        "task_id": id(task),
    })


@atexit.register
def stop_workers():
    sched.stop_workers()
    stop_deezer_keepalive()


start_deezer_keepalive()
sched.run_workers(config.getint('threadpool', 'workers'))

if __name__ == '__main__':
    app.run(host=config['http']['host'],
            port=config['http'].getint('port'),
            debug=config['http'].getboolean('debug'),
            use_reloader=False)
Example #3
0
        add_to_playlist: True|False (add to mpd playlist)
        create_zip: True|False (create a zip for the playlist)
    """
    user_input = request.get_json(force=True)
    desc = "I'm working on your Spotify playlist"
    task = sched.enqueue_task(desc,
                              "download_spotify_playlist_and_queue_and_zip",
                              playlist_name=user_input['playlist_name'],
                              playlist_id=user_input['playlist_url'],
                              add_to_playlist=user_input['add_to_playlist'],
                              create_zip=user_input['create_zip'])
    return jsonify({
        "task_id": id(task),
    })


if config.getint('deezer', 'keepalive', fallback=0) > 0:
    start_deezer_keepalive()

sched.run_workers(config.getint('threadpool', 'workers'))


@atexit.register
def stop_workers():
    sched.stop_workers()
    stop_deezer_keepalive()


if __name__ == '__main__':
    app.run(port=5000, debug=True, use_reloader=False)
Example #4
0
from sqlalchemy import select, and_, not_
import logging

from Test import Test
import db
from configuration import config


maxKeyLength = config.getint('db', 'MaxKeyLength')

class UsedTest(Test):
			
	def run(self, key, testRun, keyReference, client, makePerm=True):
		
		comp = self.comparator(key)
		lenComp = len(str(comp))
		if  lenComp > maxKeyLength:
			outputText='The ' + self.compare + ' which should be tested is ' + lenComp + ' bit long, this is too long for UsedModulo. Increase MaxKeyLength.'
			logging.warning(outputText)
			status = 0
			return status, outputText, []
			
		connection=self.engine.connect()
		s = select([db.usedTable.c.hash]).where(and_(db.usedTable.c.hash == comp, db.usedTable.c.test == self.test))

		if connection.execute(s).first() is None:
			status =1
			outputText = 'The ' + self.compare + ' has not been used before.'
			revokeKeyReference=[]		
		else:
			s = select([db.usedTable.c.test_run]).where(
Example #5
0
	shared = getShared()
	
	#start executing the user commands
	if options.reset:
		resetDB()
		initAllTests(engine, shared)
		exit()

	if options.result:
		result = Result(engine, options.result, client)
		print result.getAll()
		exit()
	
	if options.file:
		cores = min(config.getint('keycheck', 'NumberCores'), len(filenames))	
		pool = ExitPool(cores, initWorker, [testSet, client, options.makePerm, options.verbose, shared], exitWorker)
		result=pool.map(test, filenames, chunksize=1)	
		pool.close()
		pool.join()
		
	
	if options.globalTest:
		tester = Tester(testSet, engine, shared)
		result=[doTest(tester, None, None, None, None, client, options.makePerm, options.verbose, engine)]
		tester.release()
		
	for s in result:
		print s
			
Example #6
0
                            "use_gpu") and torch.cuda.is_available()


# paths
def mkdir(path):
    try:
        os.makedirs(path)
    except OSError:
        pass


output_path = config.get("testing", "output_path")
mkdir(output_path)

# models
encoder = NeuralNet(config.getint("dataset", "patch_size") *
                    config.getint("dataset", "patch_size") * 3,
                    100,
                    10,
                    activation=nn.Tanh)

# load the state
state_path = config.get("testing", "state_path")
encoder.load_state_dict(torch.load(state_path))

# move to gpu if needed
if use_gpu:
    encoder = encoder.to(torch.device("cuda:0"))

data_path = config.get("testing", "data_path")
temp_path = config.get("testing", "temp_path")
Example #7
0
# KMeansconstants
use_gpu = config.getboolean("training",
                            "use_gpu") and torch.cuda.is_available()

# paths
output_path = config.get("training", "output_path")
state_path = os.path.join(output_path, "states")
for path in [output_path, state_path]:
    try:
        os.makedirs(path)
    except OSError:
        pass

# models
encoder = NeuralNet(config.getint("dataset", "patch_size") *
                    config.getint("dataset", "patch_size") * 3,
                    100,
                    10,
                    activation=nn.Tanh)

decoder = NeuralNet(10,
                    100,
                    config.getint("dataset", "patch_size") *
                    config.getint("dataset", "patch_size") * 3,
                    activation=nn.Tanh,
                    activate_last=False)

# move to gpu if needed
if use_gpu:
    encoder = encoder.to(torch.device("cuda:0"))
Example #8
0
def initDB():
	global metadata
	metadata = MetaData()
	
	engine = getEngine()
	maxKeyLength = config.getint('db', 'MaxKeyLength')
	
	global clientTable
	global testTable
	global testSetTable
	global testRunTable
	global testResultTable
	global revokeKeysTable
	global usedTable
	global keyStorageTable
	global smallFactorTable
	global commonGCDTable
	
	clientTable = Table('Client', metadata,
		Column('name', String(40), primary_key=True),
		Column('test_set', None, ForeignKey('Test_Set.id')),
		Column('admin', Boolean, nullable=False, default=False),
	)		
	
	testTable = Table('Test', metadata,
		Column('name', String(20), primary_key=True),
		Column('test_set', None, ForeignKey('Test_Set.id'), primary_key=True),
		Column('type', Integer, default = 0, nullable=False),
		Column('parameter', Integer, default=0),
	)

	testSetTable = Table('Test_Set', metadata,
		Column('id', Integer, Sequence('Test_Set_id_seq'), primary_key=True),
		Column('description', String(400))
	)


	testRunTable = Table('Test_Run', metadata,
		Column('id', Integer, Sequence('Test_Run_id_seq'), primary_key=True),
		Column('key_reference', Integer),
		Column('client', None, ForeignKey('Client.name'), nullable = False),
		Column('key_type', String(20)),
		Column('key_format', String(20)),
		Column('test_set', None, ForeignKey('Test_Set.id')),
		Column('started', DateTime),
		Column('completed', DateTime),
	)
		
	testResultTable = Table('Test_Result', metadata, 
		Column('test_run', None, ForeignKey('Test_Run.id'), primary_key=True),
		Column('test', None, ForeignKey('Test.name'), primary_key=True),
		Column('status', Integer, nullable=False, default=0),
		Column('output', String(400)),
	)

	revokeKeysTable = Table('Revoke_Keys', metadata,
		Column('test_run', None, ForeignKey('Test_Run.id'), autoincrement=False, primary_key=True),
		Column('test', None, ForeignKey('Test.name'), autoincrement=False, primary_key=True),
		Column('revoke_key',  None, ForeignKey('Test_Run.id'), autoincrement=False, primary_key=True),
		)
		
	usedTable = Table('Used', metadata , 
		Column('hash', String(maxKeyLength)),
		Column('test', String(20), primary_key=True, autoincrement=False),
		Column('test_run', None, ForeignKey('Test_Run.id'), primary_key=True, autoincrement=False),
		Column('key_reference', Integer),
		Column('client', None, ForeignKey('Client.name')),
		)
		
	keyStorageTable = Table('Key_Storage', metadata , 
		Column('test_run', None, ForeignKey('Test_Run.id'), primary_key=True, autoincrement=False),
		Column('client', None, ForeignKey('Client.name')),
		Column('key_reference', Integer),
		Column('modulus', String(maxKeyLength)),
		Column('exponent', Integer))
	
	smallFactorTable = Table('Small_Factor', metadata , 
		Column('up_to', BigInteger, primary_key=True, autoincrement=False),
		Column('product', Text(2**32-1)),
	)
	
	commonGCDTable = Table('Common_GCD', metadata , 
		Column('product', Text(2**32-1)),
		Column('id', Integer, Sequence('Common_GCD_id_seq'), primary_key=True)
	)
		
	metadata.create_all(engine)