def test_file_action_with_values(): filepath = Path("fake/test.odt") action = FileAction("Mocking", filepath, 42) assert action.type == "Mocking" # Test repr() when .get_percent() equals 0 assert repr(action) == "Mocking('test.odt'[42])" # Will test .get_percent() details = action.export() assert details["size"] == 42 assert details["name"] == "test.odt" assert details["filepath"] == f"fake{os.path.sep}test.odt" # Test progress property setter when .progress < .size action.progress = 24.5 details = action.export() assert details["progress"] == 24.5 * 100 / 42.0 # Test progress property setter when .progress >= .size action.progress = 222.0 details = action.export() assert details["progress"] == 100.0 assert details["uploaded"] Action.finish_action()
def test_download_action(): filepath = Path("fake/test.odt") action = DownloadAction(filepath, 0) assert action.type == "Download" Action.finish_action() assert action.finished
def test_file_action(tmp): parent = tmp() parent.mkdir() filepath = parent / "test.txt" size = filepath.write_bytes(b"This is Sparta!") action = FileAction("Mocking", filepath, size) assert action.type == "Mocking" assert not action.empty # Will test .get_percent() details = action.export() assert details["action_type"] == "Mocking" assert details["progress"] == 0.0 assert isinstance(details["uid"], str) assert details["size"] == size assert details["name"] == filepath.name assert details["filepath"] == str(filepath) assert Action.get_current_action() is action # Test repr() when .get_percent() > 0 action.size = 42 action.progress = 4.2 assert repr(action) == "Mocking('test.txt'[42]-10.0)" Action.finish_action() assert action.finished
def test_idle_action(): action = IdleAction() assert repr(action) == "Idle" assert action.type == "Idle" Action.finish_action() assert action.finished
def change_systray_icon(self): syncing = False engines = self.manager.get_engines() invalid_credentials = True paused = True offline = True for engine in engines.itervalues(): syncing |= engine.is_syncing() invalid_credentials &= engine.has_invalid_credentials() paused &= engine.is_paused() offline &= engine.is_offline() if offline: new_state = 'stopping' Action(Translator.get('OFFLINE')) elif invalid_credentials: new_state = 'stopping' Action(Translator.get('INVALID_CREDENTIALS')) elif not engines or paused: new_state = 'disabled' Action.finish_action() elif syncing: new_state = 'transferring' else: new_state = 'asleep' Action.finish_action() self.set_icon_state(new_state)
def _handle_changes(self, first_pass=False): log.debug("Handle remote changes, first_pass=%r", first_pass) self._client = self._check_offline() if self._client is None: return False try: if self._last_remote_full_scan is None: log.debug("Remote full scan") self._action = Action("Remote scanning") self._scan_remote() self._end_action() # Might need to handle the changes now if first_pass: self.initiate.emit() return True full_scan = self._dao.get_config('remote_need_full_scan', None) if full_scan is not None: self._partial_full_scan(full_scan) return else: paths = self._dao.get_paths_to_scan() while len(paths) > 0: remote_ref = paths[0].path self._dao.update_config('remote_need_full_scan', remote_ref) self._partial_full_scan(remote_ref) paths = self._dao.get_paths_to_scan() self._action = Action("Handle remote changes") self._update_remote_states() self._save_changes_state() if first_pass: self.initiate.emit() else: self.updated.emit() return True except HTTPError as e: if e.code == 401 or e.code == 403: self._engine.set_invalid_credentials( reason= 'got HTTPError %d while trying to handle remote changes' % e.code, exception=e) else: log.exception(e) self._engine.set_offline() except (BadStatusLine, URLError, socket.error) as e: # Pause the rest of the engine log.exception(e) self._engine.set_offline() except ThreadInterrupt as e: raise e except Exception as e: log.exception(e) finally: self._end_action() return False
def test_upload_action(tmp): folder = tmp() folder.mkdir() filepath = folder / "test-upload.txt" filepath.write_bytes(b"This is Sparta!") action = UploadAction(filepath, filepath.stat().st_size) assert action.type == "Upload" Action.finish_action() assert action.finished
def test_finalization_action(tmp): folder = tmp() folder.mkdir() filepath = folder / "test.txt" filepath.write_bytes(b"This is Sparta!") action = LinkingAction(filepath, filepath.stat().st_size) assert action.type == "Linking" Action.finish_action() assert action.finished
def test_verification_action(tmp): folder = tmp() folder.mkdir() filepath = folder / "test.txt" filepath.write_bytes(b"This is Sparta!") action = VerificationAction(filepath, filepath.stat().st_size) assert action.type == "Verification" Action.finish_action() assert action.finished
def _handle_changes(self, first_pass=False): log.trace('Handle remote changes, first_pass=%r', first_pass) self._client = self._check_offline() if self._client is None: return False try: if self._last_remote_full_scan is None: log.trace('Remote full scan') self._action = Action('Remote scanning') self._scan_remote() self._end_action() # Might need to handle the changes now if first_pass: self.initiate.emit() return True full_scan = self._dao.get_config('remote_need_full_scan', None) if full_scan is not None: self._partial_full_scan(full_scan) return None paths = self._dao.get_paths_to_scan() while paths: remote_ref = paths[0].path self._dao.update_config('remote_need_full_scan', remote_ref) self._partial_full_scan(remote_ref) paths = self._dao.get_paths_to_scan() self._action = Action('Handle remote changes') self._update_remote_states() if first_pass: self.initiate.emit() else: self.updated.emit() except HTTPError as e: err = 'HTTP error %d while trying to handle remote changes' % e.code if e.code in (401, 403): self._engine.set_invalid_credentials(reason=err, exception=e) else: log.exception(err) self._engine.set_offline() except (BadStatusLine, URLError, socket.error): # Pause the rest of the engine log.exception('Network error') self._engine.set_offline() except ThreadInterrupt: raise except: log.exception('Unexpected error') else: return True finally: self._end_action() return False
def _execute(self): try: self._init() if not self.client.exists('/'): self.rootDeleted.emit() return self._action = Action("Setup watchdog") self._watchdog_queue = Queue() self._setup_watchdog() log.debug("Watchdog setup finished") self._action = Action("Full local scan") self._scan() self._end_action() # Check windows dequeue and folder scan only every 100 loops ( every 1s ) current_time_millis = int(round(time() * 1000)) self._win_delete_interval = current_time_millis self._win_folder_scan_interval = current_time_millis i = 0 while (1): self._interact() sleep(0.01) while (not self._watchdog_queue.empty()): # Dont retest if already local scan evt = self._watchdog_queue.get() self.handle_watchdog_event(evt) # Check to scan i += 1 if i % 100 != 0: continue i = 0 threshold_time = current_milli_time() - 1000 * self._scan_delay # Need to create a list of to scan as the dictionary cannot grow while iterating local_scan = [] for path, last_event_time in self._to_scan.iteritems(): if last_event_time < threshold_time: local_scan.append(path) for path in local_scan: self._scan_path(path) # Dont delete if the time has changed since last scan if self._to_scan[path] < threshold_time: del self._to_scan[path] if (len(self._delete_files)): # Enforce scan of all others folders to not loose track of moved file self._scan_handle_deleted_files() except ThreadInterrupt: raise finally: self._stop_watchdog()
def get_action(self): action = Action.get_current_action(self._thread_id) if action is None: action = self._action if action is None: action = IdleAction() return action
def get_tooltip(self): actions = Action.get_actions() if actions is None or len(actions) == 0: return self.get_default_tooltip() # Display only the first action for now # TODO Get all actions ? or just file action action = actions.itervalues().next() if action is None: return self.get_default_tooltip() if isinstance(action, FileAction): if action.get_percent() is not None: return ("%s - %s - %s - %d%%" % (self.get_default_tooltip(), action.type, action.filename, action.get_percent())) else: return ("%s - %s - %s" % (self.get_default_tooltip(), action.type, action.filename)) elif action.get_percent() is not None: return ("%s - %s - %d%%" % (self.get_default_tooltip(), action.type, action.get_percent())) else: return ("%s - %s" % (self.get_default_tooltip(), action.type))
def function(a, b=1): # There should be 1 action, automatically created by the decorator action = Action.get_current_action() assert action assert action.type == "Testing tooltip!" return a * b
def get_tooltip(self): actions = Action.get_actions() if not actions: return self.default_tooltip # Display only the first action for now for action in actions.itervalues(): if action and not action.type.startswith('_'): break else: return self.default_tooltip if isinstance(action, FileAction): if action.get_percent() is not None: return '%s - %s - %s - %d%%' % ( self.default_tooltip, action.type, action.filename, action.get_percent(), ) return '%s - %s - %s' % ( self.default_tooltip, action.type, action.filename, ) elif action.get_percent() is not None: return '%s - %s - %d%%' % ( self.default_tooltip, action.type, action.get_percent(), ) return '%s - %s' % ( self.default_tooltip, action.type, )
def test_tooltip(): @tooltip("Testing tooltip!") def function(a, b=1): # There should be 1 action, automatically created by the decorator action = Action.get_current_action() assert action assert action.type == "Testing tooltip!" return a * b # There is no Action right now assert Action.get_current_action() is None function(4.2, b=10) # There should be no action now that the function has been called assert Action.get_current_action() is None
def test_file_action_signals(): """Try to mimic QThread signals to test ._connect_reporter().""" class Reporter: def action_started(self): pass def action_progressing(self): pass def action_done(self): pass filepath = Path("fake/test.odt") action = FileAction("Mocking", filepath, size=42, reporter=Reporter()) Action.finish_action() assert action.finished
def test_file_action_inexistant_file(tmp): parent = tmp() parent.mkdir() filepath = parent / "test.txt" action = FileAction("Mocking", filepath, 0) assert action.empty assert not action.uploaded details = action.export() assert details["action_type"] == "Mocking" assert details["progress"] == 0.0 assert isinstance(details["uid"], str) assert details["size"] == 0 assert details["name"] == filepath.name assert details["filepath"] == str(filepath) Action.finish_action()
def _update_speed_metrics(self): action = Action.get_last_file_action() if action: duration = action.end_time - action.start_time # Too fast for clock resolution if duration <= 0: return speed = (action.size / duration) * 1000 log.trace("Transfer speed %d ko/s", speed / 1024) self._current_metrics["speed"] = speed
def _do_update(self, version): log.info("Starting application update process") log.info("Fetching version %s from update site %s", version, self.update_site) self.action = Action("Downloading %s version" % version) self.action.progress = 0 self._update_action(self.action) self.esky_app.fetch_version(version, self._update_callback) log.info("Installing version %s", version) self._update_action(Action("Installing %s version" % version)) self.esky_app.install_version(version) log.debug("Reinitializing Esky internal state") self.action.type = "Reinitializing" self.esky_app.reinitialize() log.info("Ended application update process") self._end_action()
def handle_watchdog_event(self, evt): self._action = Action("Handle watchdog event") log.debug("Handling watchdog event [%s] on %r", evt.event_type, evt.src_path) try: src_path = normalize_event_filename(evt.src_path) # Event on the folder by itself if os.path.isdir(src_path): return ref = self._local_client.get_path(src_path) file_name = os.path.basename(src_path) # Disable as we use the global open files instead of editor lock file if self.is_lock_file( file_name) and self._manager.get_direct_edit_auto_lock(): if evt.event_type == 'created': self._lock_queue.put((ref, 'lock')) elif evt.event_type == 'deleted': self._lock_queue.put((ref, 'unlock')) return queue = False if evt.event_type == 'modified' or evt.event_type == 'created': queue = True if evt.event_type == 'moved': ref = self._local_client.get_path(evt.dest_path) file_name = os.path.basename(evt.dest_path) src_path = evt.dest_path queue = True elif self._local_client.is_temp_file(file_name): return dir_path = self._local_client.get_path(os.path.dirname(src_path)) name = self._local_client.get_remote_id(dir_path, "nxdirecteditname") if name is None: return if name != file_name: return if self._manager.get_direct_edit_auto_lock( ) and self._local_client.get_remote_id(dir_path, "nxdirecteditlock") != "1": self._manager.get_autolock_service().set_autolock( src_path, self) if queue: # ADD TO UPLOAD QUEUE self._upload_queue.put(ref) return except ThreadInterrupt: raise except StandardError: log.exception('Watchdog error') finally: self._end_action()
def test_file_action(tmp): parent = tmp() parent.mkdir() filepath = parent / "test.txt" size = filepath.write_bytes(b"This is Sparta!") action = FileAction("Mocking", filepath) assert action.type == "Mocking" # Will test .get_percent() details = action.export() assert details["last_transfer"] == "Mocking" assert details["progress"] == 0.0 assert isinstance(details["uid"], str) assert details["size"] == size assert details["name"] == filepath.name assert details["filepath"] == str(filepath) assert Action.get_current_action() == action Action.finish_action() assert action.finished
def _execute(self): try: self._watchdog_queue = Queue() self._action = Action("Clean up folder") try: self._cleanup() except ThreadInterrupt: raise except Exception as ex: log.debug(ex) self._action = Action("Setup watchdog") self._setup_watchdog() self._end_action() # Load the target url if Drive was not launched before self.handle_url() if self._test: log.trace( "DirectEdit Entering main loop: continue:%r pause:%r running:%r", self._continue, self._pause, self._running) while True: self._interact() if self._test: log.trace( "DirectEdit post interact: continue:%r pause:%r running:%r", self._continue, self._pause, self._running) try: self._handle_queues() except NotFound: pass except ThreadInterrupt: raise except Exception as ex: log.debug(ex) sleep(0.01) except ThreadInterrupt: raise finally: self._stop_watchdog()
def _read_data(self, file_object, buffer_size): while True: current_action = Action.get_current_action() if current_action is not None and current_action.suspend: break # Check if synchronization thread was suspended if self.check_suspended is not None: self.check_suspended('File upload: %s' % file_object.name) r = file_object.read(buffer_size) if not r: break if current_action is not None: current_action.progress += buffer_size yield r
def test_file_action_empty_file(tmp): parent = tmp() parent.mkdir() filepath = parent / "test.txt" filepath.touch() action = FileAction("Mocking", filepath, filepath.stat().st_size) assert action.empty assert not action.uploaded details = action.export() assert details["action_type"] == "Mocking" assert details["progress"] == 0.0 assert isinstance(details["uid"], str) assert details["size"] == 0 assert details["name"] == filepath.name assert details["filepath"] == str(filepath) # Trigger a progression update telling that the file has been uploaded action.progress += 0 assert action.export()["progress"] == 100.0 assert action.uploaded Action.finish_action()
def _execute(self): try: self._watchdog_queue = Queue() self._action = Action("Clean up folder") self._cleanup() self._action = Action("Setup watchdog") self._setup_watchdog() self._end_action() # Load the target url if Drive was not launched before self.handle_url() while (1): self._interact() try: self._handle_queues() except NotFound: pass while (not self._watchdog_queue.empty()): evt = self._watchdog_queue.get() self.handle_watchdog_event(evt) sleep(0.01) except ThreadInterrupt: raise finally: self._stop_watchdog()
def _init_scan_remote(self, doc_pair, remote_info): if remote_info is None: raise ValueError("Cannot bind %r to missing remote info" % doc_pair) if not remote_info.folderish: # No children to align, early stop. log.trace("Skip remote scan as it is not a folderish document: %r", remote_info) return None remote_parent_path = doc_pair.remote_parent_path + '/' + remote_info.uid if self._dao.is_path_scanned(remote_parent_path): log.trace("Skip already remote scanned: %s", doc_pair.local_path) return None if doc_pair.local_path is not None: self._action = Action("Remote scanning : " + doc_pair.local_path) log.debug("Remote scanning: %s", doc_pair.local_path) return remote_parent_path
def handle_watchdog_event(self, evt): self._action = Action("Handle watchdog event") log.debug("Handling watchdog event [%s] on %r", evt.event_type, evt.src_path) try: src_path = normalize_event_filename(evt.src_path) # Event on the folder by itself if os.path.isdir(src_path): return ref = self._local_client.get_path(src_path) file_name = os.path.basename(src_path) if self.is_lock_file( file_name) and self._manager.get_direct_edit_auto_lock(): if evt.event_type == 'created': self._lock_queue.put((ref, 'lock')) elif evt.event_type == 'deleted': self._lock_queue.put((ref, 'unlock')) return if self._local_client.is_temp_file(file_name): return queue = False if evt.event_type == 'modified' or evt.event_type == 'created': queue = True if evt.event_type == 'moved': ref = self._local_client.get_path(evt.dest_path) file_name = os.path.basename(evt.dest_path) queue = True dir_path = self._local_client.get_path(os.path.dirname(src_path)) name = self._local_client.get_remote_id(dir_path, "nxdirecteditname") if name is None: return if name != file_name: return if self._manager.get_direct_edit_auto_lock( ) and self._local_client.get_remote_id(dir_path, "nxdirecteditlock") != "1": self._manager.get_autolock_service().set_autolock( src_path, self) if queue: # ADD TO UPLOAD QUEUE self._upload_queue.put(ref) return except Exception as e: log.warn("Watchdog exception : %r", e, exc_info=True) finally: self._end_action()
def suspend_client(self, reason): if self.is_paused() or self._stopped: raise ThreadInterrupt # Verify thread status thread_id = current_thread().ident for thread in self._threads: if hasattr(thread, "worker") and isinstance(thread.worker, Processor): if thread.worker._thread_id == thread_id and thread.worker._continue == False: raise ThreadInterrupt # Get action current_file = None action = Action.get_current_action() if isinstance(action, FileAction): client = self.get_local_client() current_file = client.get_path(action.filepath) if current_file is not None and self._folder_lock is not None and current_file.startswith(self._folder_lock): log.debug("PairInterrupt '%s' because lock on '%s'", current_file, self._folder_lock) raise PairInterrupt
def test_action(): action = Action("Testing") assert action.type == "Testing" assert repr(action) assert "%" not in repr(action) actions = Action.get_actions() assert len(actions) == 1 assert list(actions.values())[0] == action assert Action.get_current_action() == action # Will test .get_percent() details = action.export() assert details["last_transfer"] == "Testing" assert details["progress"] == 0.0 assert isinstance(details["uid"], str) Action.finish_action() actions = Action.get_actions() assert len(actions) == 0 assert Action.get_current_action() is None
def suspend_client(self, reason): if self.is_paused() or self._stopped: raise ThreadInterrupt # Verify thread status thread_id = current_thread().ident for thread in self._threads: if hasattr(thread, "worker") and isinstance(thread.worker, Processor): if (thread.worker._thread_id == thread_id and thread.worker._continue == False): raise ThreadInterrupt # Get action current_file = None action = Action.get_current_action() if isinstance(action, FileAction): client = self.get_local_client() current_file = client.get_path(action.filepath) if (current_file is not None and self._folder_lock is not None and current_file.startswith(self._folder_lock)): log.debug("PairInterrupt '%s' because lock on '%s'", current_file, self._folder_lock) raise PairInterrupt
def get_tooltip(self): actions = Action.get_actions() if actions is None or len(actions) == 0: return self.get_default_tooltip() # Display only the first action for now # TODO Get all actions ? or just file action action = actions.itervalues().next() if action is None: return self.get_default_tooltip() if isinstance(action, FileAction): if action.get_percent() is not None: return ("%s - %s - %s - %d%%" % (self.get_default_tooltip(), action.type, action.filename, action.get_percent())) else: return ( "%s - %s - %s" % (self.get_default_tooltip(), action.type, action.filename)) elif action.get_percent() is not None: return ("%s - %s - %d%%" % (self.get_default_tooltip(), action.type, action.get_percent())) else: return ("%s - %s" % (self.get_default_tooltip(), action.type))
def end_action(): Action.finish_action()
def test_action_with_values(): action = Action(action_type="Trying", progress=42.222) assert "%" in repr(action) details = action.export() assert details["progress"] == 42.222 Action.finish_action()
def execute(self, command, url=None, op_input=None, timeout=-1, check_params=False, void_op=False, extra_headers=None, enrichers=None, file_out=None, **params): """Execute an Automation operation""" if check_params: self._check_params(command, params) if url is None: url = self.automation_url + command headers = { "Content-Type": "application/json+nxrequest", "Accept": "application/json+nxentity, */*", "X-NXproperties": "*", # Keep compatibility with old header name "X-NXDocumentProperties": "*", } if void_op: headers.update({"X-NXVoidOperation": "true"}) if self.repository != 'default': headers.update({"X-NXRepository": self.repository}) if extra_headers is not None: headers.update(extra_headers) if enrichers is not None: headers.update({ 'X-NXenrichers.document': ', '.join(enrichers), }) headers.update(self._get_common_headers()) json_struct = {'params': {}} for k, v in params.items(): if v is None: continue if k == 'properties': s = "" for propname, propvalue in v.items(): s += "%s=%s\n" % (propname, propvalue) json_struct['params'][k] = s.strip() else: json_struct['params'][k] = v if op_input: json_struct['input'] = op_input data = json.dumps(json_struct) cookies = self._get_cookies() log.trace("Calling %s with headers %r, cookies %r" " and JSON payload %r", url, headers, cookies, data) req = urllib2.Request(url, data, headers) timeout = self.timeout if timeout == -1 else timeout try: resp = self.opener.open(req, timeout=timeout) except Exception as e: log_details = self._log_details(e) if isinstance(log_details, tuple): _, _, _, error = log_details if error and error.startswith("Unable to find batch"): raise InvalidBatchException() raise e current_action = Action.get_current_action() if current_action and current_action.progress is None: current_action.progress = 0 if file_out is not None: locker = self.unlock_path(file_out) try: with open(file_out, "wb") as f: while True: # Check if synchronization thread was suspended if self.check_suspended is not None: self.check_suspended('File download: %s' % file_out) buffer_ = resp.read(FILE_BUFFER_SIZE) if buffer_ == '': break if current_action: current_action.progress += FILE_BUFFER_SIZE f.write(buffer_) return None, file_out finally: self.lock_path(file_out, locker) else: return self._read_response(resp, url)
def do_get(self, url, file_out=None, digest=None, digest_algorithm=None): log.trace('Downloading file from %r to %r with digest=%s, digest_algorithm=%s', url, file_out, digest, digest_algorithm) h = None if digest is not None: if digest_algorithm is None: digest_algorithm = guess_digest_algorithm(digest) log.trace('Guessed digest algorithm from digest: %s', digest_algorithm) digester = getattr(hashlib, digest_algorithm, None) if digester is None: raise ValueError('Unknow digest method: ' + digest_algorithm) h = digester() headers = self._get_common_headers() base_error_message = ( "Failed to connect to Nuxeo server %r with user %r" ) % (self.server_url, self.user_id) try: log.trace("Calling '%s' with headers: %r", url, headers) req = urllib2.Request(url, headers=headers) response = self.opener.open(req, timeout=self.blob_timeout) current_action = Action.get_current_action() # Get the size file if (current_action and response is not None and response.info() is not None): current_action.size = int(response.info().getheader( 'Content-Length', 0)) if file_out is not None: locker = self.unlock_path(file_out) try: with open(file_out, "wb") as f: while True: # Check if synchronization thread was suspended if self.check_suspended is not None: self.check_suspended('File download: %s' % file_out) buffer_ = response.read(FILE_BUFFER_SIZE) if buffer_ == '': break if current_action: current_action.progress += FILE_BUFFER_SIZE f.write(buffer_) if h is not None: h.update(buffer_) if digest is not None: actual_digest = h.hexdigest() if digest != actual_digest: if os.path.exists(file_out): os.remove(file_out) raise CorruptedFile("Corrupted file %r: expected digest = %s, actual digest = %s" % (file_out, digest, actual_digest)) return None, file_out finally: self.lock_path(file_out, locker) else: result = response.read() if h is not None: h.update(result) if digest is not None: actual_digest = h.hexdigest() if digest != actual_digest: raise CorruptedFile("Corrupted file: expected digest = %s, actual digest = %s" % (digest, actual_digest)) return result, None except urllib2.HTTPError as e: if e.code == 401 or e.code == 403: raise Unauthorized(self.server_url, self.user_id, e.code) else: e.msg = base_error_message + ": HTTP error %d" % e.code raise e except Exception as e: if hasattr(e, 'msg'): e.msg = base_error_message + ": " + e.msg raise
def do_get(self, url, file_out=None, digest=None, digest_algorithm=None): log.trace( 'Downloading file from %r to %r with digest=%s, digest_algorithm=%s', url, file_out, digest, digest_algorithm) h = None if digest is not None: if digest_algorithm is None: digest_algorithm = guess_digest_algorithm(digest) log.trace('Guessed digest algorithm from digest: %s', digest_algorithm) digester = getattr(hashlib, digest_algorithm, None) if digester is None: raise ValueError('Unknow digest method: ' + digest_algorithm) h = digester() headers = self._get_common_headers() base_error_message = ( "Failed to connect to Nuxeo server %r with user %r") % ( self.server_url, self.user_id) try: log.trace("Calling '%s' with headers: %r", url, headers) req = urllib2.Request(url, headers=headers) response = self.opener.open(req, timeout=self.blob_timeout) current_action = Action.get_current_action() # Get the size file if (current_action and response is not None and response.info() is not None): current_action.size = int(response.info().getheader( 'Content-Length', 0)) if file_out is not None: locker = self.unlock_path(file_out) try: with open(file_out, "wb") as f: while True: # Check if synchronization thread was suspended if self.check_suspended is not None: self.check_suspended('File download: %s' % file_out) buffer_ = response.read(self.get_download_buffer()) if buffer_ == '': break if current_action: current_action.progress += ( self.get_download_buffer()) f.write(buffer_) if h is not None: h.update(buffer_) if digest is not None: actual_digest = h.hexdigest() if digest != actual_digest: if os.path.exists(file_out): os.remove(file_out) raise CorruptedFile( "Corrupted file %r: expected digest = %s, actual digest = %s" % (file_out, digest, actual_digest)) return None, file_out finally: self.lock_path(file_out, locker) else: result = response.read() if h is not None: h.update(result) if digest is not None: actual_digest = h.hexdigest() if digest != actual_digest: raise CorruptedFile( "Corrupted file: expected digest = %s, actual digest = %s" % (digest, actual_digest)) return result, None except urllib2.HTTPError as e: if e.code == 401 or e.code == 403: raise Unauthorized(self.server_url, self.user_id, e.code) else: e.msg = base_error_message + ": HTTP error %d" % e.code raise e except Exception as e: if hasattr(e, 'msg'): e.msg = base_error_message + ": " + e.msg raise
def _end_action(self): Action.finish_action() self._action = None
def end_action(self): Action.finish_action()
def execute(self, command, url=None, op_input=None, timeout=-1, check_params=True, void_op=False, extra_headers=None, file_out=None, **params): """Execute an Automation operation""" if check_params: self._check_params(command, params) if url is None: url = self.automation_url + command headers = { "Content-Type": "application/json+nxrequest", "Accept": "application/json+nxentity, */*", "X-NXproperties": "*", # Keep compatibility with old header name "X-NXDocumentProperties": "*", } if void_op: headers.update({"X-NXVoidOperation": "true"}) if self.repository != DEFAULT_REPOSITORY_NAME: headers.update({"X-NXRepository": self.repository}) if extra_headers is not None: headers.update(extra_headers) headers.update(self._get_common_headers()) json_struct = {'params': {}} for k, v in params.items(): if v is None: continue if k == 'properties': s = "" for propname, propvalue in v.items(): s += "%s=%s\n" % (propname, propvalue) json_struct['params'][k] = s.strip() else: json_struct['params'][k] = v if op_input: json_struct['input'] = op_input data = json.dumps(json_struct) cookies = self._get_cookies() log.trace( "Calling %s with headers %r, cookies %r" " and JSON payload %r", url, headers, cookies, data) req = urllib2.Request(url, data, headers) timeout = self.timeout if timeout == -1 else timeout try: resp = self.opener.open(req, timeout=timeout) except Exception as e: self._log_details(e) raise current_action = Action.get_current_action() if current_action and current_action.progress is None: current_action.progress = 0 if file_out is not None: locker = self.unlock_path(file_out) try: with open(file_out, "wb") as f: while True: # Check if synchronization thread was suspended if self.check_suspended is not None: self.check_suspended('File download: %s' % file_out) buffer_ = resp.read(self.get_download_buffer()) if buffer_ == '': break if current_action: current_action.progress += ( self.get_download_buffer()) f.write(buffer_) return None, file_out finally: self.lock_path(file_out, locker) else: return self._read_response(resp, url)
def do_get(self, url, file_out=None, digest=None, digest_algorithm=None): h = None if digest is not None: if digest_algorithm is None: digest_algorithm = guess_digest_algorithm(digest) digester = getattr(hashlib, digest_algorithm, None) if digester is None: raise ValueError('Unknow digest method: ' + digest_algorithm) h = digester() headers = self._get_common_headers() base_error_message = ( "Failed to connect to Nuxeo server %r with user %r" ) % (self.server_url, self.user_id) try: log.trace("Calling '%s' with headers: %r", url, headers) req = urllib2.Request(url, headers=headers) response = self.opener.open(req, timeout=self.blob_timeout) current_action = Action.get_current_action() # Get the size file if (current_action and response is not None and response.info() is not None): current_action.size = int(response.info().getheader( 'Content-Length', 0)) if file_out is not None: locker = self.unlock_path(file_out) try: with open(file_out, "wb") as f: while True: # Check if synchronization thread was suspended if self.check_suspended is not None: self.check_suspended('File download: %s' % file_out) buffer_ = response.read(self.get_download_buffer()) if buffer_ == '': break if current_action: current_action.progress += ( self.get_download_buffer()) f.write(buffer_) if h is not None: h.update(buffer_) if self._remote_error is not None: # Simulate a configurable remote (e.g. network or # server) error for the tests raise self._remote_error if self._local_error is not None: # Simulate a configurable local error (e.g. "No # space left on device") for the tests raise self._local_error if digest is not None and digest != h.hexdigest(): if os.path.exists(file_out): os.remove(file_out) raise CorruptedFile("Corrupted file") return None, file_out finally: self.lock_path(file_out, locker) else: result = response.read() if h is not None: h.update(result) if digest is not None and digest != h.hexdigest(): raise CorruptedFile("Corrupted file") return result, None except urllib2.HTTPError as e: if e.code == 401 or e.code == 403: raise Unauthorized(self.server_url, self.user_id, e.code) else: e.msg = base_error_message + ": HTTP error %d" % e.code raise e except Exception as e: if hasattr(e, 'msg'): e.msg = base_error_message + ": " + e.msg raise
def execute(self, command, op_input=None, timeout=-1, check_params=True, void_op=False, extra_headers=None, file_out=None, **params): """Execute an Automation operation""" if self._remote_error is not None: # Simulate a configurable (e.g. network or server) error for the # tests raise self._remote_error if check_params: self._check_params(command, params) url = self.automation_url + command headers = { "Content-Type": "application/json+nxrequest", "Accept": "application/json+nxentity, */*", "X-NXproperties": "*", # Keep compatibility with old header name "X-NXDocumentProperties": "*", } if void_op: headers.update({"X-NXVoidOperation": "true"}) if self.repository != DEFAULT_REPOSITORY_NAME: headers.update({"X-NXRepository": self.repository}) if extra_headers is not None: headers.update(extra_headers) headers.update(self._get_common_headers()) json_struct = {'params': {}} for k, v in params.items(): if v is None: continue if k == 'properties': s = "" for propname, propvalue in v.items(): s += "%s=%s\n" % (propname, propvalue) json_struct['params'][k] = s.strip() else: json_struct['params'][k] = v if op_input: json_struct['input'] = op_input log.trace("Dumping JSON structure: %s", json_struct) data = json.dumps(json_struct) cookies = self._get_cookies() log.trace("Calling %s with headers %r, cookies %r" " and JSON payload %r", url, headers, cookies, data) req = urllib2.Request(url, data, headers) timeout = self.timeout if timeout == -1 else timeout try: resp = self.opener.open(req, timeout=timeout) except Exception as e: self._log_details(e) raise current_action = Action.get_current_action() if current_action and current_action.progress is None: current_action.progress = 0 if file_out is not None: locker = self.unlock_path(file_out) try: with open(file_out, "wb") as f: while True: # Check if synchronization thread was suspended if self.check_suspended is not None: self.check_suspended('File download: %s' % file_out) buffer_ = resp.read(self.get_download_buffer()) if buffer_ == '': break if current_action: current_action.progress += ( self.get_download_buffer()) f.write(buffer_) if self._remote_error is not None: # Simulate a configurable remote (e.g. network or # server) error for the tests raise self._remote_error if self._local_error is not None: # Simulate a configurable local error (e.g. "No # space left on device") for the tests raise self._local_error return None, file_out finally: self.lock_path(file_out, locker) else: return self._read_response(resp, url)