Ejemplo n.º 1
0
 def save(self, filename):
     logger.info('saving')
     res = []
     for each in self.tasks:
         res.append(each.toDict())
     with open(filename, 'w') as f:
         json.dump(res, f)
Ejemplo n.º 2
0
 def get_model(self, model_id):
     always_refresh = True
     model_id = cherrypy.request.params.get('model_id')
     print(model_id)
     request_folder_name = model_id
     train_path = os.path.join(self.BASE_FILE_PATH, request_folder_name)
     if not os.path.exists(train_path):
         raise cherrypy.HTTPError(404)
     taf = ToArchiveFolder(train_path)
     compressedFile = os.path.join(STATIC_FILE_PATH, model_id)
     if not os.path.exists(compressedFile):
         os.makedirs(compressedFile)
     compressedFile = os.path.join(compressedFile, model_id + '.zip')
     if os.path.exists(compressedFile) and not always_refresh:
         logger.info('model exists,skipping')
         result = {
             'code':
             '200',
             'url':
             'http://134.175.1.246:80/static/' + model_id + '/' + model_id +
             '.zip'
         }
         return json.dumps(result)
     else:
         taf.zip(compressedFile)
         result = {
             'code':
             '200',
             'url':
             'http://134.175.1.246:80/static/' + model_id + '/' + model_id +
             '.zip'
         }
         return json.dumps(result)
Ejemplo n.º 3
0
 def __init__(self, filepath):
     if zipfile.is_zipfile(filepath):
         self._af = zipfile.ZipFile(filepath, 'r')
         self.filepath = filepath
         logger.info("Successfully load file" + filepath)
     else:
         logger.error("Cannot load file" + filepath)
Ejemplo n.º 4
0
 def _init_model_(self, model_path):
     logger.info('initiating model')
     detection_graph = tf.Graph()
     with detection_graph.as_default():
         od_graph_def = tf.GraphDef()
         with tf.gfile.GFile(model_path, 'rb') as fid:
             serialized_graph = fid.read()
             od_graph_def.ParseFromString(serialized_graph)
             tf.import_graph_def(od_graph_def, name='')
     self.graph = detection_graph
     logger.info('model initialized')
Ejemplo n.º 5
0
 def zip(self, target):
     if self.folderPath is None:
         logger.error("Folder Path Not Specified")
         return False
     else:
         abs_src = os.path.abspath(self.folderPath)
         contents = os.walk(self.folderPath)
         self._af = zipfile.ZipFile(target, 'w', zipfile.ZIP_DEFLATED)
         for root, dirs, files in contents:
             for file in files:
                 abs_name = os.path.abspath(os.path.join(root, file))
                 arcname = abs_name[len(abs_src) + 1:]
                 logger.info('adding file' + abs_name)
                 self._af.write(abs_name, arcname)
         self._af.close()
Ejemplo n.º 6
0
 def unzip(self, extractTo, deleteOrigin=False):
     uncompress_size = sum((file.file_size for file in self._af.infolist()))
     extracted_size = 0
     pbar = tqdm(total=uncompress_size,
                 initial=extracted_size,
                 unit='B',
                 unit_scale=True,
                 desc="uncompressing " + self.filepath)
     for file in self._af.infolist():
         self._af.extract(file, extractTo)
         extracted_size += file.file_size
         pbar.update(extracted_size)
     self._af.extractall(extractTo)
     logger.info("Successfully unzip file")
     if deleteOrigin:
         os.remove(self.filepath)
         logger.info("Successfully delete original file")
Ejemplo n.º 7
0
 def start(self, notify_func=None, args=None):
     if self.config is None:
         logger.error('No Config Found')
         return
     train_pipeline_file = self.config['pipeline_config_file']
     configs = self._get_configs_from_pipeline_file(train_pipeline_file)
     model_config = configs['model']
     train_config = configs['train_config']
     input_config = configs['train_input_config']
     logger.info('Building Model')
     model_fn = functools.partial(model_builder.build,
                                  model_config=model_config,
                                  is_training=True)
     logger.info('creating input dict')
     create_input_dict_fn = functools.partial(self.get_next, input_config)
     ps_tasks = 0
     worker_replicas = 1
     worker_job_name = 'obj_detection_trainer'
     task = 0
     is_chief = True
     master = ''
     num_clones = 1
     clone_on_cpu = False
     try:
         logger.info('Training Started')
         trainer_m.train(create_input_dict_fn, model_fn, train_config,
                         master, task, num_clones, worker_replicas,
                         clone_on_cpu, ps_tasks, worker_job_name, is_chief,
                         self.config, notify_func, args)
     except:
         logger.error('Cannot Start Training')
         traceback.print_exc(file=sys.stdout)
Ejemplo n.º 8
0
 def init(self, model_path):
     if not self.hasInitialized:
         self._init_model_(model_path)
     else:
         logger.info('model has initialized, skipping')
Ejemplo n.º 9
0
 def init(self, model_name, model_path):
     if not self.hasInitialized:
         self._init_model_(model_name, model_path)
     else:
         logger.info('model has been initialized, skip')