Esempio n. 1
0
 def createWorker(self, clientSocket, address):
     # workerId = hashlib.md5(str(address).encode('utf8')).hexdigest()
     worker = Worker(clientSocket,
                     address,
                     self.db.Session,
                     sslContext=self.sslContext)
     worker.start()
    def run(self):
        ae_res = []
        stability_res = []

        # Cross Validation loop
        for i in range(len(self.folds)):
            # get first fold #i info
            fold = self.folds[i]

            # set train, validation and test sets for this fold iteration
            self.args.trainfiles = fold.train
            self.args.valfile = fold.validation
            self.args.testfile = fold.test

            # all training logic is inside Worker (core/worker.py)
            worker = Worker(i, self.conf, self.data_conf, self.cache, self.args,
                            verbose=self.verbose, inference=self.inference)
            res, _ = worker.run()

            # print results for each fold
            if self.verbose:
                partial_res = summary_angular_errors([r.error for r in res])
                self._print_results(partial_res, 'fold '+str(i))

            # accumulate results into lists
            ae_res += res

        # summary_angular_errors: computes mean, median, best 25%, etc...
        results = summary_angular_errors([r.error for r in ae_res])
        if self.verbose:
            self._print_results(results, 'total')

        return results
Esempio n. 3
0
def main(args):
    logging.info( args )
    device = 'gpu' if args.gpu else 'cpu'

    devices = device_lib.list_local_devices()
    num_gpus = len([d for d in devices if '/gpu' in d.name])
 
    env = gym.make(args.game)
    env = Env(env, resized_width=84, resized_height=84, agent_history_length=4)
    num_actions = len(env.gym_actions)

    global_net = Network(num_actions, -1, 'cpu')
    actor_networks = []
    for t in range(args.threads):
        device_index = 0 if device is 'cpu' else (t if args.threads <= num_gpus else 0)
        n = Network(num_actions, t, device, device_index)
        n.tie_global_net(global_net)
        actor_networks.append(n)

    sess = tf.Session(config=tf.ConfigProto(intra_op_parallelism_threads=args.threads, inter_op_parallelism_threads=args.threads))
    sess.run(tf.global_variables_initializer())
    saver = tf.train.Saver()
    if not os.path.exists(args.checkpoint_dir):
        os.makedirs(args.checkpoint_dir)

    threads = []
    for t, net in enumerate(actor_networks):
        e = Env(gym.make(args.game), net.width, net.height, net.depth)
        w = Worker(t, e, net, sess, saver, args.checkpoint_dir)
        w.start()
        threads.append(w)

    for t in threads:
        t.join()
Esempio n. 4
0
def main():
    args = parser.parse_args()

    if args.seed is not None:
        random.seed(args.seed)
        torch.manual_seed(args.seed)
        cudnn.deterministic = True
        warnings.warn('You have chosen to seed training. '
                      'This will turn on the CUDNN deterministic setting, '
                      'which can slow down your training considerably! '
                      'You may see unexpected behavior when restarting '
                      'from checkpoints.')

    # load configuration file: epochs, loss function, etc... for this experiment
    with open(args.configurationfile, 'r') as f:
        conf = json.load(f)

    # load datapath file: paths specific to the current machine
    with open(args.datapath, 'r') as f:
        data_conf = json.load(f)

    # remove previous results
    output_dir = os.path.join(args.outputfolder, args.dataset, args.subdataset,
                              conf['name'])
    if not args.evaluate and not args.resume:
        if os.path.exists(output_dir):
            shutil.rmtree(output_dir)

    # create output folder
    os.makedirs(output_dir, exist_ok=True)
    args.outputfolder = output_dir

    # copy configuration file to output folder
    shutil.copy(
        args.configurationfile,
        os.path.join(output_dir, os.path.basename(args.configurationfile)))

    # we overwrite the stdout and stderr (standard output and error) to
    # files in the output directory
    sys.stdout = PrintLogger(os.path.join(output_dir, 'stdout.txt'),
                             sys.stdout)
    sys.stderr = PrintLogger(os.path.join(output_dir, 'stderr.txt'),
                             sys.stderr)

    fold = 0  # no folds, but we always use fold #0 for these experiments
    cache = CacheManager(conf)
    worker = Worker(fold, conf, data_conf, cache, args)
    res, _ = worker.run()

    # some datasets have no validation GT
    if len(res) > 0:
        # print angular errors statistics (mean, median, etc...)
        generate_results(res, 'test')
Esempio n. 5
0
 def __init__(self):
     super(Manager, self).__init__()
     self.basePath = "db/db.json"
     self.baseImagePath = "db/images"
     self.hgigerbot = hgigerbot
     self.imageManager = ImageManager(basePath=self.baseImagePath)
     self.worker = Worker(self.baseImagePath, self.imageManager)
     self.requestType = RequestType()
     self.currentCell = {}
     self.hints = []
     self.hintSections = []
     self.currentHint = {}
    def test_24_hours_shifts(self):
        # 1 hour to load, 1 hour to produce
        # produce 2 every 2 hours
        # ie 24 - 1 (1 to start the machine) / 2 = 10
        machine, spec, stock = create_machine(stocking_zone_size=None)
        factory = Factory()
        factory.add_worker(Worker(working_hour = 8 * 60))
        factory.add_worker(Worker(working_hour = 8 * 60))
        factory.add_worker(Worker(working_hour = 8 * 60))
        factory.add_production_unit(machine)
        factory.run(24 * 60)

        self.assertEquals(stock.count(), 720 - 1)
def runWorker(mapURL, threads):
	queue = Queue.Queue()

	# create a thread pool and give them a queue
	for i in range(threads):
		t = Worker(queue, str(i))
		t.setDaemon(True)
		t.start()

	# give the queue some data
	for url in mapURL.values():
		queue.put(url)

	# wait for the queue to finish
	queue.join()
    def setUp(self):
        self.unaffected_production_unit, spec, zone = create_machine(
            material_type_input="yarn")

        self.worker = Worker()
        self.affected_production_unit = ProductionUnit(spec)

        self.inputs = Material("yarn")
        self.started_production_unit = ProductionUnit(spec)
        self.started_production_unit.perform_next_operation(self.worker)

        self.loaded_production_unit = ProductionUnit(spec)
        self.loaded_production_unit.perform_next_operation(self.worker)
        self.loaded_production_unit.perform_next_operation(self.worker)

        config = {'rate_by_minute': 0.2}
        spec_four = Specification()
        spec_four.add(
            MaterialInputConstraint(Material(type="flour", quantity=2)))
        spec_four.add(
            MaterialInputConstraint(Material(type="water", quantity=1)))
        spec_four.add_output_material(Material("bread", 1))

        self.four_a_pain = ProductionUnit(spec_four, config)

        self.four_a_pain.perform_next_operation(self.worker)
        self.four_a_pain.perform_next_operation(self.worker)
 def setUp(self):
     spec = Specification()
     spec.add(MaterialInputConstraint(Material(type="wood", quantity=1)))
     spec.add_output_material(Material("Furniture", 1))
     self.machine = ProductionUnit(spec, config={"rate_by_minute":0.25})
     self.worker = Worker()
     self.input = Material("wood", quantity=3)
Esempio n. 10
0
def main():
    args = parser.parse_args()

    # load configuration file for this experiment
    with open(args.configurationfile, 'r') as f:
        conf = json.load(f)

    # load datapath file: paths specific to the current machine
    with open(args.datapath, 'r') as f:
        data_conf = json.load(f)

    # create output folder
    os.makedirs(args.outputfolder, exist_ok=True)

    # copy configuration file to output folder
    shutil.copy(
        args.configurationfile,
        os.path.join(args.outputfolder,
                     os.path.basename(args.configurationfile)))

    # we overwrite the stdout and stderr (standard output and error) to
    # files in the output directory
    sys.stdout = PrintLogger(os.path.join(args.outputfolder, 'stdout.txt'),
                             sys.stdout)
    sys.stderr = PrintLogger(os.path.join(args.outputfolder, 'stderr.txt'),
                             sys.stderr)

    # used in core/worker.py to determine what to do
    args.evaluate = True
    args.resume = False
    args.valfile = None
    args.trainfiles = None

    # init the cache manager: this caches images from the dataset
    # to avoid reading them more than once
    cache = CacheManager(conf, no_cache=True)

    fold = 0  # no folds, but we always use fold #0 for these experiments
    worker = Worker(fold, conf, data_conf, cache, args, inference=True)
    res, _ = worker.run()

    # print angular errors statistics (mean, median, etc...)
    generate_results(res)
    def test_add_skill_constraint_to_operation(self):
        tech_production_unit, spec, zone = create_machine(
            material_type_input="iron")

        start_op = StartOperation(tech_production_unit, worker=self.worker)
        start_op.add_constraint(SkillConstraint(skill_name="blacksmith"))

        self.assertRaises(CannotPerformOperation, start_op.run)

        blacksmith = Worker()
        blacksmith.skills.append("blacksmith")
        StartOperation(tech_production_unit, worker=blacksmith).run()
        self.assertEquals(tech_production_unit.get_state(),
                          ProductionUnit.STARTED)
def run(args_obj):
    # create worker thread
    cmd_obj = Worker(args_obj=args_obj)
    # create msg manager thread
    msg_obj = MSGManager(cmd_obj)
    msg_obj.start()
    cmd_obj.start()
    cmd_obj.join()
    msg_obj.join()
Esempio n. 13
0
def get_factory(yaml_conf):
    yaml = load(yaml_conf)
    factory = Factory(name=yaml["name"])
    materials = create_materials(yaml)
    for production_unit in yaml["production_units"]:
        spec = create_spec(materials, production_unit)
        config = {}
        config["rate_by_minute"] = production_unit.get("rate", 1)
        factory.add_production_unit(
            ProductionUnit(spec=spec,
                           config=config,
                           name=production_unit["name"]))

    for worker in yaml.get("workers", []):
        working_hour = worker.get("working_hour", 8) * 60
        factory.add_worker(Worker(working_hour=working_hour))
    return factory
Esempio n. 14
0
def main():
    mapURL = {}

    logo()
    parser = argparse.ArgumentParser(description='Ragpicker Malware Crawler')
    parser.add_argument("-a",
                        "--artwork",
                        help="Show artwork",
                        action="store_true",
                        required=False)
    parser.add_argument("-p",
                        "--processes",
                        type=int,
                        default=3,
                        help="Number of processes (default=3, max=6)")
    parser.add_argument("-u",
                        "--url",
                        help="Download and analysis from a single URL")
    parser.add_argument("-d",
                        "--directory",
                        help="Load files from local directory")
    parser.add_argument("-i",
                        "--info",
                        help="Print Ragpicker config infos",
                        action="store_true",
                        required=False)
    parser.add_argument("-da",
                        "--delete",
                        help="Delete all stored data",
                        action="store_true")
    parser.add_argument('--log-level',
                        default=logging.INFO,
                        help='logging level, default=logging.INFO')
    parser.add_argument('--log-filename', help='logging filename')
    parser.add_argument('--version',
                        action='version',
                        version='Ragpicker version ' + RAGPICKER_VERSION)

    global args
    args = parser.parse_args()

    if args.artwork:
        try:
            while True:
                time.sleep(1)
                logo()
        except KeyboardInterrupt:
            return

    if args.log_level:
        log_conf = dict(
            level=args.log_level,
            format='%(levelname)s %(name)s %(module)s:%(lineno)d %(message)s')

        if args.log_filename:
            log_conf['filename'] = args.log_filename
            log.info("log-filename: " + args.log_filename)

        logging.basicConfig(**log_conf)

    if args.delete:
        worker = Worker()
        worker.runDelete()
        return

    if args.info:
        printRagpickerInfos(True)
        return

    if args.url:
        log.info(color("Download and analysis from %s" % args.url, RED))
        runWorker(args.url)
    elif args.directory:
        printRagpickerInfos()
        log.info(
            color("Load files from local directory %s" % args.directory, RED))
        mapURL = getLocalFiles(args.directory)
    else:
        printRagpickerInfos()
        # Malware URLs Crawlen
        mapURL = runCrawler()

    # Max Threads=6
    if args.processes > 6:
        args.processes = 6

    log.info(color("Processes: " + str(args.processes), RED))
    log.info(color("Process " + str(len(mapURL)) + " URLs", RED))

    # Create Process Pool
    pool = Pool(processes=args.processes)

    # Malware Download, process and reporting
    for url in mapURL.values():
        pool.apply_async(runWorker, args=(url, ))

    pool.close()
    pool.join()
Esempio n. 15
0
def runWorker(url):
    log.info("Worker URL: " + url)
    worker = Worker()
    worker.run(url)
Esempio n. 16
0
 def test_factory_add_worker(self):
     factory = Factory()
     factory.add_worker(Worker())
     self.assertEquals(len(factory.workers), 1)
Esempio n. 17
0
class Manager(object):
    def __init__(self):
        super(Manager, self).__init__()
        self.basePath = "db/db.json"
        self.baseImagePath = "db/images"
        self.hgigerbot = hgigerbot
        self.imageManager = ImageManager(basePath=self.baseImagePath)
        self.worker = Worker(self.baseImagePath, self.imageManager)
        self.requestType = RequestType()
        self.currentCell = {}
        self.hints = []
        self.hintSections = []
        self.currentHint = {}

    def inThread(self, function):
        Thread(target=function).start()

    def hintsCount(self):
        return sum([len(cell["body"][1]) for cell in self.cells])

    def setBuilder(self, builder):
        self.builder = builder

    def openCell(self, cellId):
        self.currentCell = self.worker.findElementById(elements=self.cells,
                                                       id=cellId)
        self.hintSections, self.hints = self.currentCell["body"][
            0], self.currentCell["body"][1]

    def openHint(self, hintId):
        self.currentHint = [
            hint for hint in self.hints if hint["id"] == hintId
        ][0]

    def save(self):
        with open(self.basePath, "w") as f:
            json.dump([self.cellSections, self.cells],
                      f,
                      sort_keys=True,
                      indent=4)

    def load(self):
        with open(self.basePath) as f:
            self.cellSections, self.cells = json.load(f)

    def reception(self, type, **request):
        self.worker.update(self)
        # (↓) [-CELL SECTIONS-]
        if type == self.requestType.ADD_CELL_SECTION:
            self.cellSections = self.worker.addCellSection(**request)
        elif type == self.requestType.DELETE_CELL_SECTION:
            self.cellSections, self.cells = self.worker.deleteCellSection(
                **request)
        elif type == self.requestType.RENAME_CELL_SECTION:
            self.cellSections, self.cells = self.worker.renameCellSection(
                **request)
        elif type == self.requestType.REICON_CELL_SECTION:
            self.cellSections = self.worker.reiconCellSection(**request)
        elif type == self.requestType.SWAP_CELL_SECTIONS:
            self.cellSections = self.worker.swapCellSections(**request)
        # (↓) [-CELL-]
        elif type == self.requestType.ADD_CELL:
            self.cells = self.worker.addCell(**request)
        elif type == self.requestType.DELETE_CELL:
            self.cells = self.worker.deleteCell(**request)
        elif type == self.requestType.RENAME_CELL:
            self.cells = self.worker.renameCell(**request)
        elif type == self.requestType.MOVE_CELL:
            self.cells = self.worker.moveCell(**request)
        # (↓) [-HINT SECTIONS-]
        elif type == self.requestType.ADD_HINT_SECTION:
            self.cells = self.worker.addHintSection(**request)
        elif type == self.requestType.DELETE_HINT_SECTION:
            self.cells = self.worker.deleteHintSection(**request)
        elif type == self.requestType.RENAME_HINT_SECTION:
            self.cells = self.worker.renameHintSection(**request)
        elif type == self.requestType.REICON_HINT_SECTION:
            self.cells = self.worker.reiconHintSection(**request)
        elif type == self.requestType.SWAP_HINT_SECTIONS:
            self.cells = self.worker.swapHintSections(**request)
        # (↓) [-HINTS-]
        elif type == self.requestType.ADD_HINT:
            self.cells = self.worker.addHint(**request)
        elif type == self.requestType.DELETE_HINT:
            self.cells = self.worker.deleteHint(**request)
        elif type == self.requestType.RENAME_HINT:
            self.cells = self.worker.renameHint(**request)
        elif type == self.requestType.MOVE_HINT:
            self.cells = self.worker.moveHint(**request)
        # (↓) [-HINT-]
        elif type == self.requestType.EDIT_TAGS:
            self.cells = self.worker.editTags(**request)
        elif type == self.requestType.ADD_PAGE:
            self.cells = self.worker.addPage(**request)
        elif type == self.requestType.DELETE_PAGE:
            self.cells = self.worker.deletePage(**request)
        elif type == self.requestType.RENAME_PAGE:
            self.cells = self.worker.renamePage(**request)
        elif type == self.requestType.SWAP_PAGES:
            self.cells = self.worker.swapPages(**request)
        elif type == self.requestType.EDIT_PAGE_TEXT:
            self.cells = self.worker.editPageText(**request)
        elif type == self.requestType.ADD_IMAGE:
            self.cells = self.worker.addImage(**request)
        elif type == self.requestType.DELETE_IMAGE:
            self.cells = self.worker.deleteImage(**request)

        self.inThread(function=self.save)
        self.builder.rebuild()
Esempio n. 18
0
def main():
	mapURL = {}
	
	logo()
	parser = argparse.ArgumentParser(description='Ragpicker Malware Crawler')
	parser.add_argument("-a", "--artwork", help="Show artwork", action="store_true", required=False)
	parser.add_argument("-p", "--processes", type=int, default=3, help="Number of processes (default=3, max=6)")
	parser.add_argument("-u", "--url", help="Download and analysis from a single URL")
	parser.add_argument("-d", "--directory", help="Load files from local directory")
	parser.add_argument("-i", "--info", help="Print Ragpicker config infos", action="store_true", required=False)
	parser.add_argument("-da", "--delete", help="Delete all stored data", action="store_true")
	parser.add_argument('--log-level', default=logging.INFO, help='logging level, default=logging.INFO')
	parser.add_argument('--log-filename', help='logging filename')
	parser.add_argument('--version', action='version', version='Ragpicker version ' + RAGPICKER_VERSION)

	global args 
	args = parser.parse_args()
	
	if args.artwork:
		try:
			while True:
				time.sleep(1)
				logo()
		except KeyboardInterrupt:
			return
		
	if args.log_level:
		log_conf = dict(level=args.log_level,
			format='%(levelname)s %(name)s %(module)s:%(lineno)d %(message)s')

		if args.log_filename:
			log_conf['filename'] = args.log_filename
			log.info("log-filename: " + args.log_filename)

		logging.basicConfig(**log_conf)
	
	if args.delete:
		worker = Worker()
		worker.runDelete()
		return
	
	if args.info:
		printRagpickerInfos(True)
		return
	
	if args.url:
		log.info(color("Download and analysis from %s" % args.url, RED))
		runWorker(args.url)
	elif args.directory:
		printRagpickerInfos()
		log.info(color("Load files from local directory %s" % args.directory, RED))
		mapURL = getLocalFiles(args.directory)
	else:			
		printRagpickerInfos()
		# Malware URLs Crawlen 
		mapURL = runCrawler()
		
	# Max Threads=6
	if args.processes > 6:
		args.processes = 6
		
	log.info(color("Processes: " + str(args.processes), RED))	
	log.info(color("Process " + str(len(mapURL)) + " URLs", RED))
	
	# Create Process Pool
	pool = Pool(processes=args.processes)
	
	# Malware Download, process and reporting
	for url in mapURL.values():
		pool.apply_async(runWorker, args=(url,))
		
	pool.close()
	pool.join()
Esempio n. 19
0
 def test_working_hour(self):
     worker = Worker(working_hour=1)
     worker.add_unit_of_work()
     self.assertRaises(Event, worker.add_unit_of_work)
 def test_working_hour(self):
     eight_hour_worker = Worker(working_hour = 8 * 60)
     self.assertRaises(Event, LoadOperation(Material(type="wood", quantity=1), production_unit=self.machine, worker=eight_hour_worker).run, during=8*60 + 1)
Esempio n. 21
0
def main():
    # Initialize worker
    worker = Worker()
    # Run worker
    worker.run()
 def setUp(self):
     self.machine, spec, self.stock_zone = create_machine(material_type_input="wood", material_type_output="plank")
     self.worker = Worker()
     StartOperation(production_unit=self.machine, time_to_perform=1, worker=self.worker).run(during=1)
Esempio n. 23
0
from multiprocessing import Process
from core.beat import Beat
from core.worker import Worker

if __name__ == '__main__':
    beat = Beat()
    beat_process = Process(target=beat.run)
    beat_process.start()

    worker = Worker()
    worker_process = Process(target=worker.run)
    worker_process.start()

    beat_process.join()
    worker_process.join()
Esempio n. 24
0
def runWorker(url):
	log.info("Worker URL: " + url)
	worker = Worker()
	worker.run(url)