예제 #1
0
    def post(self, url, data=None, headers={}):
        try:
            res = requests.post(self.url(url),
                                data=data,
                                headers=headers,
                                timeout=self.timeout)

            if res.status_code == 401:
                raise NodeResponseError(
                    "Unauthorized. Do you need to set a token?")
            elif res.status_code != 200 and res.status_code != 403:
                raise NodeServerError(res.status_code)

            if "Content-Type" in res.headers and "application/json" in res.headers[
                    'Content-Type']:
                result = res.json()
                if 'error' in result:
                    raise NodeResponseError(result['error'])
                return result
            else:
                return res
        except json.decoder.JSONDecodeError as e:
            raise NodeServerError(str(e))
        except (requests.exceptions.Timeout,
                requests.exceptions.ConnectionError) as e:
            raise NodeConnectionError(str(e))
예제 #2
0
    def get(self, url, query={}, **kwargs):
        try:
            res = requests.get(self.url(url, query),
                               timeout=self.timeout,
                               **kwargs)
            if res.status_code == 401:
                raise NodeResponseError(
                    "Unauthorized. Do you need to set a token?")
            elif not res.status_code in [200, 403, 206]:
                raise NodeServerError("Unexpected status code: %s" %
                                      res.status_code)

            if "Content-Type" in res.headers and "application/json" in res.headers[
                    'Content-Type']:
                result = res.json()
                if 'error' in result:
                    raise NodeResponseError(result['error'])
                return result
            else:
                return res
        except json.decoder.JSONDecodeError as e:
            raise NodeServerError(str(e))
        except (requests.exceptions.Timeout,
                requests.exceptions.ConnectionError) as e:
            raise NodeConnectionError(str(e))
예제 #3
0
 def handle_task_new_response(self, result):
     if isinstance(result, dict) and 'uuid' in result:
         return Task(self, result['uuid'])
     elif isinstance(result, dict) and 'error' in result:
         raise NodeResponseError(result['error'])
     else:
         raise NodeServerError('Invalid response: ' + str(result))
예제 #4
0
파일: api.py 프로젝트: testerarm/clusterodm
    def create_task_fallback(self, files, options={}, name=None, progress_callback=None):
        # Pre chunked API create task implementation, used as fallback
        if len(files) == 0:
            raise NodeResponseError("Not enough images")

        # Equivalent as passing the open file descriptor, since requests
        # eventually calls read(), but this way we make sure to close
        # the file prior to reading the next, so we don't run into open file OS limits
        def read_file(file_path):
            with open(file_path, 'rb') as f:
                return f.read()

        fields = {
            'name': name,
            'options': options_to_json(options),
            'images': [(os.path.basename(f), read_file(f), (mimetypes.guess_type(f)[0] or "image/jpg")) for
                       f in files]
        }

        def create_callback(mpe):
            total_bytes = mpe.len

            def callback(monitor):
                if progress_callback is not None and total_bytes > 0:
                    progress_callback(100.0 * monitor.bytes_read / total_bytes)

            return callback

        e = MultipartEncoder(fields=fields)
        m = encoder.MultipartEncoderMonitor(e, create_callback(e))

        result = self.post('/task/new', data=m, headers={'Content-Type': m.content_type})

        return self.handle_task_new_response(result)
예제 #5
0
            def worker():
                while True:
                    task = q.get()
                    if task is None or nonloc.error is not None:
                        q.task_done()
                        break

                    # Upload file
                    if task['wait_until'] > datetime.datetime.now():
                        time.sleep((task['wait_until'] -
                                    datetime.datetime.now()).seconds)

                    try:
                        file = task['file']
                        fields = {
                            'images':
                            [(os.path.basename(file), read_file(file),
                              (mimetypes.guess_type(file)[0] or "image/jpg"))]
                        }

                        e = MultipartEncoder(fields=fields)
                        result = self.post(
                            '/task/new/upload/{}'.format(uuid),
                            data=e,
                            headers={'Content-Type': e.content_type})

                        if isinstance(
                                result, dict
                        ) and 'success' in result and result['success']:
                            uf = nonloc.uploaded_files.increment()
                            if progress_event is not None:
                                progress_event.set()
                        else:
                            if isinstance(result, dict) and 'error' in result:
                                raise NodeResponseError(result['error'])
                            else:
                                raise NodeServerError(
                                    "Failed upload with unexpected result: %s"
                                    % str(result))
                    except OdmError as e:
                        if task['retries'] < max_retries and not (
                                isinstance(result, dict)
                                and 'noRetry' in result and result['noRetry']):
                            # Put task back in queue
                            task['retries'] += 1
                            task['wait_until'] = datetime.datetime.now(
                            ) + datetime.timedelta(seconds=task['retries'] *
                                                   retry_timeout)
                            q.put(task)
                        else:
                            nonloc.error = e
                    except Exception as e:
                        nonloc.error = e
                    finally:
                        q.task_done()
예제 #6
0
파일: api.py 프로젝트: testerarm/clusterodm
    def get(self, url, query={}, **kwargs):
        try:
            print("***** PYODM get method " +  str(datetime.datetime.now()) + " url " + str(url))

            res = requests.get(self.url(url, query), timeout=self.timeout, **kwargs)
	    #print("get Len(res): "+ str(len(res)))
            if res.status_code == 401:
                raise NodeResponseError("Unauthorized. Do you need to set a token?")
            elif not res.status_code in [200, 403, 206]:
                raise NodeServerError("Unexpected status code: %s" % res.status_code)

            if "Content-Type" in res.headers and "application/json" in res.headers['Content-Type']:
		print("get size " + str(res.headers.get('content-length')) + ' url ' + str(url))
                result = res.json()
                if 'error' in result:
                    raise NodeResponseError(result['error'])
                return result
            else:
                return res
        except json.decoder.JSONDecodeError as e:
            raise NodeServerError(str(e))
        except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e:
            raise NodeConnectionError(str(e))
예제 #7
0
파일: api.py 프로젝트: testerarm/clusterodm
    def post(self, url, data=None, headers={}):
        try:
	    print("***** PYODM post method " +  str(datetime.datetime.now()) + " url " + str(url))
            res = requests.post(self.url(url), data=data, headers=headers, timeout=self.timeout)
            #print("post Len(res): "+ str(len(res)))

            if res.status_code == 401:
                raise NodeResponseError("Unauthorized. Do you need to set a token?")
            elif res.status_code != 200 and res.status_code != 403:
                raise NodeServerError(res.status_code)

            if "Content-Type" in res.headers and "application/json" in res.headers['Content-Type']:
                print("response size " + str(res.headers.get('content-length')) + ' url ' + str(url))

		result = res.json()
                if 'error' in result:
                    raise NodeResponseError(result['error'])
                return result
            else:
                return res
        except json.decoder.JSONDecodeError as e:
            raise NodeServerError(str(e))
        except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e:
            raise NodeConnectionError(str(e))
예제 #8
0
파일: api.py 프로젝트: zachaller/PyODM
    def create_task_fallback(self, files, options={}, name=None, progress_callback=None):
        # Pre chunked API create task implementation, used as fallback
        if len(files) == 0:
            raise NodeResponseError("Not enough images")

        # Equivalent as passing the open file descriptor, since requests
        # eventually calls read(), but this way we make sure to close
        # the file prior to reading the next, so we don't run into open file OS limits
        def read_file(file_path):
            with open(file_path, 'rb') as f:
                return f.read()

        fields = {
            'name': name,
            'options': options_to_json(options),
            'images': [(os.path.basename(f), read_file(f), (mimetypes.guess_type(f)[0] or "image/jpg")) for
                       f in files]
예제 #9
0
    def download_zip(self,
                     destination,
                     progress_callback=None,
                     parallel_downloads=16,
                     parallel_chunks_size=10):
        """Download this task's assets archive to a directory.

        Args:
            destination (str): directory where to download assets archive. If the directory does not exist, it will be created.
            progress_callback (function): an optional callback with one parameter, the download progress percentage.
            parallel_downloads (int): maximum number of parallel downloads if the node supports http range.
            parallel_chunks_size (int): size in MB of chunks for parallel downloads
        Returns:
            str: path to archive file (.zip)
        """
        info = self.info()
        if info.status != TaskStatus.COMPLETED:
            raise NodeResponseError("Cannot download task, task status is " +
                                    str(info.status))

        if not os.path.exists(destination):
            os.makedirs(destination, exist_ok=True)

        try:
            download_stream = self.get('/task/{}/download/all.zip'.format(
                self.uuid),
                                       stream=True)
            headers = download_stream.headers

            zip_path = os.path.join(
                destination, "{}_{}_all.zip".format(self.uuid,
                                                    int(time.time())))

            # Keep track of download progress (if possible)
            content_length = download_stream.headers.get('content-length')
            total_length = int(
                content_length) if content_length is not None else None
            downloaded = 0
            chunk_size = int(parallel_chunks_size * 1024 * 1024)
            use_fallback = False
            accept_ranges = headers.get('accept-ranges')

            # Can we do parallel downloads?
            if accept_ranges is not None and accept_ranges.lower(
            ) == 'bytes' and total_length is not None and total_length > chunk_size and parallel_downloads > 1:
                num_chunks = int(math.ceil(total_length / float(chunk_size)))
                num_workers = parallel_downloads

                class nonloc:
                    completed_chunks = AtomicCounter(0)
                    merge_chunks = [False] * num_chunks
                    error = None

                def merge():
                    current_chunk = 0

                    with open(zip_path, "wb") as out_file:
                        while current_chunk < num_chunks and nonloc.error is None:
                            if nonloc.merge_chunks[current_chunk]:
                                chunk_file = "%s.part%s" % (zip_path,
                                                            current_chunk)
                                with open(chunk_file, "rb") as fd:
                                    out_file.write(fd.read())

                                os.unlink(chunk_file)

                                current_chunk += 1
                            else:
                                time.sleep(0.1)

                def worker():
                    while True:
                        task = q.get()
                        part_num, bytes_range = task
                        if bytes_range is None or nonloc.error is not None:
                            q.task_done()
                            break

                        try:
                            # Download chunk
                            res = self.get(
                                '/task/{}/download/all.zip'.format(self.uuid),
                                stream=True,
                                headers={'Range': 'bytes=%s-%s' % bytes_range})
                            if res.status_code == 206:
                                with open("%s.part%s" % (zip_path, part_num),
                                          'wb') as fd:
                                    bytes_written = 0
                                    try:
                                        for chunk in res.iter_content(4096):
                                            bytes_written += fd.write(chunk)
                                    except (requests.exceptions.Timeout,
                                            requests.exceptions.ConnectionError
                                            ) as e:
                                        raise NodeConnectionError(str(e))

                                    if bytes_written != (bytes_range[1] -
                                                         bytes_range[0] + 1):
                                        # Process again
                                        q.put((part_num, bytes_range))
                                        return

                                with nonloc.completed_chunks.lock:
                                    nonloc.completed_chunks.value += 1

                                    if progress_callback is not None:
                                        progress_callback(
                                            100.0 *
                                            nonloc.completed_chunks.value /
                                            num_chunks)

                                nonloc.merge_chunks[part_num] = True
                            else:
                                nonloc.error = RangeNotAvailableError()
                        except OdmError as e:
                            time.sleep(5)
                            q.put((part_num, bytes_range))
                        except Exception as e:
                            nonloc.error = e
                        finally:
                            q.task_done()

                q = queue.PriorityQueue()
                threads = []
                for i in range(num_workers):
                    t = threading.Thread(target=worker)
                    t.start()
                    threads.append(t)

                merge_thread = threading.Thread(target=merge)
                merge_thread.start()

                range_start = 0

                for i in range(num_chunks):
                    range_end = min(range_start + chunk_size - 1,
                                    total_length - 1)
                    q.put((i, (range_start, range_end)))
                    range_start = range_end + 1

                # block until all tasks are done
                while not all(nonloc.merge_chunks) and nonloc.error is None:
                    time.sleep(0.1)

                # stop workers
                for i in range(len(threads)):
                    q.put((-1, None))
                for t in threads:
                    t.join()

                merge_thread.join()

                if nonloc.error is not None:
                    if isinstance(nonloc.error, RangeNotAvailableError):
                        use_fallback = True
                    else:
                        raise nonloc.error
            else:
                use_fallback = True

            if use_fallback:
                # Single connection, boring download
                with open(zip_path, 'wb') as fd:
                    for chunk in download_stream.iter_content(4096):
                        downloaded += len(chunk)

                        if progress_callback is not None and total_length is not None:
                            progress_callback(
                                (100.0 * float(downloaded) / total_length))

                        fd.write(chunk)

        except (requests.exceptions.Timeout,
                requests.exceptions.ConnectionError, ReadTimeoutError) as e:
            raise NodeConnectionError(e)

        return zip_path
예제 #10
0
 def post(self, url, data):
     result = self.node.post(url, data)
     if isinstance(result, dict) and 'error' in result:
         raise NodeResponseError(result['error'])
     return result
예제 #11
0
 def get(self, url, query={}, **kwargs):
     result = self.node.get(url, query, **kwargs)
     if isinstance(result, dict) and 'error' in result:
         raise NodeResponseError(result['error'])
     return result
예제 #12
0
    def create_task(self,
                    files,
                    options={},
                    name=None,
                    progress_callback=None,
                    skip_post_processing=False,
                    webhook=None,
                    outputs=[],
                    parallel_uploads=10,
                    max_retries=5,
                    retry_timeout=5):
        """Start processing a new task.
        At a minimum you need to pass a list of image paths. All other parameters are optional.

        >>> n = Node('localhost', 3000)
        >>> t = n.create_task(['examples/images/image_1.jpg', 'examples/images/image_2.jpg'], \
                          {'orthophoto-resolution': 2, 'dsm': True})
        >>> info = t.info()
        >>> info.status
        <TaskStatus.RUNNING: 20>
        >>> info.last_error
        ''
        >>> t.info().images_count
        2
        >>> t.output()[0:2]
        ['DJI_0131.JPG - DJI_0313.JPG has 1 candidate matches', 'DJI_0131.JPG - DJI_0177.JPG has 3 candidate matches']

        Args:
            files (list): list of image paths + optional GCP file path.
            options (dict): options to use, for example {'orthophoto-resolution': 3, ...}
            name (str): name for the task
            progress_callback (function): callback reporting upload progress percentage
            skip_post_processing  (bool): When true, skips generation of map tiles, derivate assets, point cloud tiles.
            webhook (str): Optional URL to call when processing has ended (either successfully or unsuccessfully).
            outputs (list): Optional paths relative to the project directory that should be included in the all.zip result file, overriding the default behavior.
            parallel_uploads (int): Number of parallel uploads.
            max_retries (int): Number of attempts to make before giving up on a file upload.
            retry_timeout (int): Wait at least these many seconds before attempting to upload a file a second time, multiplied by the retry number.
        Returns:
            :func:`~Task`
        """
        if not self.version_greater_or_equal_than("1.4.0"):
            return self.create_task_fallback(files, options, name,
                                             progress_callback)

        if len(files) == 0:
            raise NodeResponseError("Not enough images")

        fields = {
            'name': name,
            'options': options_to_json(options),
        }

        if skip_post_processing:
            fields['skipPostProcessing'] = 'true'

        if webhook is not None:
            fields['webhook'] = webhook

        if outputs:
            fields['outputs'] = json.dumps(outputs)

        e = MultipartEncoder(fields=fields)

        result = self.post('/task/new/init',
                           data=e,
                           headers={'Content-Type': e.content_type})
        if isinstance(result, dict) and 'error' in result:
            raise NodeResponseError(result['error'])

        if isinstance(result, dict) and 'uuid' in result:
            uuid = result['uuid']
            progress_event = None

            class nonloc:
                uploaded_files = AtomicCounter(0)
                error = None

            # Equivalent as passing the open file descriptor, since requests
            # eventually calls read(), but this way we make sure to close
            # the file prior to reading the next, so we don't run into open file OS limits
            def read_file(file_path):
                if Node.prefixHttp.match(file_path) or Node.prefixHttps.match(
                        file_path):
                    return requests.get(file_path).content
                else:
                    with open(file_path, 'rb') as f:
                        return f.read()

            # Upload
            def worker():
                while True:
                    task = q.get()
                    if task is None or nonloc.error is not None:
                        q.task_done()
                        break

                    # Upload file
                    if task['wait_until'] > datetime.datetime.now():
                        time.sleep((task['wait_until'] -
                                    datetime.datetime.now()).seconds)

                    try:
                        file = task['file']
                        fields = {
                            'images':
                            [(os.path.basename(file), read_file(file),
                              (mimetypes.guess_type(file)[0] or "image/jpg"))]
                        }

                        e = MultipartEncoder(fields=fields)
                        result = self.post(
                            '/task/new/upload/{}'.format(uuid),
                            data=e,
                            headers={'Content-Type': e.content_type})

                        if isinstance(
                                result, dict
                        ) and 'success' in result and result['success']:
                            uf = nonloc.uploaded_files.increment()
                            if progress_event is not None:
                                progress_event.set()
                        else:
                            if isinstance(result, dict) and 'error' in result:
                                raise NodeResponseError(result['error'])
                            else:
                                raise NodeServerError(
                                    "Failed upload with unexpected result: %s"
                                    % str(result))
                    except OdmError as e:
                        if task['retries'] < max_retries and not (
                                isinstance(result, dict)
                                and 'noRetry' in result and result['noRetry']):
                            # Put task back in queue
                            task['retries'] += 1
                            task['wait_until'] = datetime.datetime.now(
                            ) + datetime.timedelta(seconds=task['retries'] *
                                                   retry_timeout)
                            q.put(task)
                        else:
                            nonloc.error = e
                    except Exception as e:
                        nonloc.error = e
                    finally:
                        q.task_done()

            q = queue.Queue()
            threads = []
            for i in range(parallel_uploads):
                t = threading.Thread(target=worker)
                t.start()
                threads.append(t)

            if progress_callback is not None:
                progress_event = threading.Event()

            now = datetime.datetime.now()
            for file in files:
                q.put({'file': file, 'wait_until': now, 'retries': 0})

            # Wait for progress updates
            if progress_event is not None:
                current_progress = 0
                while not q.empty():
                    if progress_event.wait(0.1):
                        progress_event.clear()
                        current_progress = 100.0 * nonloc.uploaded_files.value / len(
                            files)
                        try:
                            progress_callback(current_progress)
                        except Exception as e:
                            nonloc.error = e
                    if nonloc.error is not None:
                        break

                # Make sure to report 100% complete
                if current_progress != 100 and nonloc.error is None:
                    try:
                        progress_callback(100.0)
                    except Exception as e:
                        nonloc.error = e

            # block until all tasks are done
            if nonloc.error is None:
                q.join()

            # stop workers
            for i in range(parallel_uploads):
                q.put(None)
            for t in threads:
                t.join()

            if nonloc.error is not None:
                raise nonloc.error

            result = self.post('/task/new/commit/{}'.format(uuid))
            return self.handle_task_new_response(result)
        else:
            raise NodeServerError("Invalid response from /task/new/init: %s" %
                                  result)