def create_task_fallback(self, files, options={}, name=None, progress_callback=None): # Pre chunked API create task implementation, used as fallback if len(files) == 0: raise NodeResponseError("Not enough images") # Equivalent as passing the open file descriptor, since requests # eventually calls read(), but this way we make sure to close # the file prior to reading the next, so we don't run into open file OS limits def read_file(file_path): with open(file_path, 'rb') as f: return f.read() fields = { 'name': name, 'options': options_to_json(options), 'images': [(os.path.basename(f), read_file(f), (mimetypes.guess_type(f)[0] or "image/jpg")) for f in files] } def create_callback(mpe): total_bytes = mpe.len def callback(monitor): if progress_callback is not None and total_bytes > 0: progress_callback(100.0 * monitor.bytes_read / total_bytes) return callback e = MultipartEncoder(fields=fields) m = encoder.MultipartEncoderMonitor(e, create_callback(e)) result = self.post('/task/new', data=m, headers={'Content-Type': m.content_type}) return self.handle_task_new_response(result)
def worker(): while True: task = q.get() if task is None or nonloc.error is not None: q.task_done() break # Upload file if task['wait_until'] > datetime.datetime.now(): time.sleep((task['wait_until'] - datetime.datetime.now()).seconds) try: file = task['file'] fields = { 'images': [(os.path.basename(file), read_file(file), (mimetypes.guess_type(file)[0] or "image/jpg"))] } e = MultipartEncoder(fields=fields) result = self.post( '/task/new/upload/{}'.format(uuid), data=e, headers={'Content-Type': e.content_type}) if isinstance( result, dict ) and 'success' in result and result['success']: uf = nonloc.uploaded_files.increment() if progress_event is not None: progress_event.set() else: if isinstance(result, dict) and 'error' in result: raise NodeResponseError(result['error']) else: raise NodeServerError( "Failed upload with unexpected result: %s" % str(result)) except OdmError as e: if task['retries'] < max_retries and not ( isinstance(result, dict) and 'noRetry' in result and result['noRetry']): # Put task back in queue task['retries'] += 1 task['wait_until'] = datetime.datetime.now( ) + datetime.timedelta(seconds=task['retries'] * retry_timeout) q.put(task) else: nonloc.error = e except Exception as e: nonloc.error = e finally: q.task_done()
def create_task(self, files, options={}, name=None, progress_callback=None, skip_post_processing=False, webhook=None, outputs=[], parallel_uploads=10, max_retries=5, retry_timeout=5): """Start processing a new task. At a minimum you need to pass a list of image paths. All other parameters are optional. >>> n = Node('localhost', 3000) >>> t = n.create_task(['examples/images/image_1.jpg', 'examples/images/image_2.jpg'], \ {'orthophoto-resolution': 2, 'dsm': True}) >>> info = t.info() >>> info.status <TaskStatus.RUNNING: 20> >>> info.last_error '' >>> t.info().images_count 2 >>> t.output()[0:2] ['DJI_0131.JPG - DJI_0313.JPG has 1 candidate matches', 'DJI_0131.JPG - DJI_0177.JPG has 3 candidate matches'] Args: files (list): list of image paths + optional GCP file path. options (dict): options to use, for example {'orthophoto-resolution': 3, ...} name (str): name for the task progress_callback (function): callback reporting upload progress percentage skip_post_processing (bool): When true, skips generation of map tiles, derivate assets, point cloud tiles. webhook (str): Optional URL to call when processing has ended (either successfully or unsuccessfully). outputs (list): Optional paths relative to the project directory that should be included in the all.zip result file, overriding the default behavior. parallel_uploads (int): Number of parallel uploads. max_retries (int): Number of attempts to make before giving up on a file upload. retry_timeout (int): Wait at least these many seconds before attempting to upload a file a second time, multiplied by the retry number. Returns: :func:`~Task` """ if not self.version_greater_or_equal_than("1.4.0"): return self.create_task_fallback(files, options, name, progress_callback) if len(files) == 0: raise NodeResponseError("Not enough images") fields = { 'name': name, 'options': options_to_json(options), } if skip_post_processing: fields['skipPostProcessing'] = 'true' if webhook is not None: fields['webhook'] = webhook if outputs: fields['outputs'] = json.dumps(outputs) e = MultipartEncoder(fields=fields) result = self.post('/task/new/init', data=e, headers={'Content-Type': e.content_type}) if isinstance(result, dict) and 'error' in result: raise NodeResponseError(result['error']) if isinstance(result, dict) and 'uuid' in result: uuid = result['uuid'] progress_event = None class nonloc: uploaded_files = AtomicCounter(0) error = None # Equivalent as passing the open file descriptor, since requests # eventually calls read(), but this way we make sure to close # the file prior to reading the next, so we don't run into open file OS limits def read_file(file_path): if Node.prefixHttp.match(file_path) or Node.prefixHttps.match( file_path): return requests.get(file_path).content else: with open(file_path, 'rb') as f: return f.read() # Upload def worker(): while True: task = q.get() if task is None or nonloc.error is not None: q.task_done() break # Upload file if task['wait_until'] > datetime.datetime.now(): time.sleep((task['wait_until'] - datetime.datetime.now()).seconds) try: file = task['file'] fields = { 'images': [(os.path.basename(file), read_file(file), (mimetypes.guess_type(file)[0] or "image/jpg"))] } e = MultipartEncoder(fields=fields) result = self.post( '/task/new/upload/{}'.format(uuid), data=e, headers={'Content-Type': e.content_type}) if isinstance( result, dict ) and 'success' in result and result['success']: uf = nonloc.uploaded_files.increment() if progress_event is not None: progress_event.set() else: if isinstance(result, dict) and 'error' in result: raise NodeResponseError(result['error']) else: raise NodeServerError( "Failed upload with unexpected result: %s" % str(result)) except OdmError as e: if task['retries'] < max_retries and not ( isinstance(result, dict) and 'noRetry' in result and result['noRetry']): # Put task back in queue task['retries'] += 1 task['wait_until'] = datetime.datetime.now( ) + datetime.timedelta(seconds=task['retries'] * retry_timeout) q.put(task) else: nonloc.error = e except Exception as e: nonloc.error = e finally: q.task_done() q = queue.Queue() threads = [] for i in range(parallel_uploads): t = threading.Thread(target=worker) t.start() threads.append(t) if progress_callback is not None: progress_event = threading.Event() now = datetime.datetime.now() for file in files: q.put({'file': file, 'wait_until': now, 'retries': 0}) # Wait for progress updates if progress_event is not None: current_progress = 0 while not q.empty(): if progress_event.wait(0.1): progress_event.clear() current_progress = 100.0 * nonloc.uploaded_files.value / len( files) try: progress_callback(current_progress) except Exception as e: nonloc.error = e if nonloc.error is not None: break # Make sure to report 100% complete if current_progress != 100 and nonloc.error is None: try: progress_callback(100.0) except Exception as e: nonloc.error = e # block until all tasks are done if nonloc.error is None: q.join() # stop workers for i in range(parallel_uploads): q.put(None) for t in threads: t.join() if nonloc.error is not None: raise nonloc.error result = self.post('/task/new/commit/{}'.format(uuid)) return self.handle_task_new_response(result) else: raise NodeServerError("Invalid response from /task/new/init: %s" % result)
'name': name, 'options': options_to_json(options), 'images': [(os.path.basename(f), read_file(f), (mimetypes.guess_type(f)[0] or "image/jpg")) for f in files] } def create_callback(mpe): total_bytes = mpe.len def callback(monitor): if progress_callback is not None and total_bytes > 0: progress_callback(100.0 * monitor.bytes_read / total_bytes) return callback e = MultipartEncoder(fields=fields) m = encoder.MultipartEncoderMonitor(e, create_callback(e)) result = self.post('/task/new', data=m, headers={'Content-Type': m.content_type}) return self.handle_task_new_response(result) def handle_task_new_response(self, result): if isinstance(result, dict) and 'uuid' in result: return Task(self, result['uuid']) elif isinstance(result, dict) and 'error' in result: raise NodeResponseError(result['error']) else: raise NodeServerError('Invalid response: ' + str(result)) def get_task(self, uuid):