def Notify(message, subtitle=None, title="MyData"): """ Post notification. """ if sys.platform.startswith("win"): wx.GetApp().taskBarIcon.ShowBalloon(title, message) return path = "resources/macosx/MyData Notifications.app/Contents/MacOS" executable = "MyData Notifications" args = ["-message", message, "-title", title, "-sound", "Purr"] if subtitle: args = args + ["-subtitle", subtitle] if hasattr(sys, "frozen"): args = args + ["-activate", "org.mytardis.MyData"] else: args = args + ["-activate", "org.python.python"] if hasattr(sys, "frozen"): path = "../MacOS" else: path = "resources/macosx/MyData Notifications.app/Contents/MacOS" proc = subprocess.Popen([os.path.join(path, executable)] + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, _ = proc.communicate() if proc.returncode != 0: logger.error(stdout)
def GetUserByEmail(settingsModel, email): myTardisUrl = settingsModel.GetMyTardisUrl() myTardisUsername = settingsModel.GetUsername() myTardisApiKey = settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/user/?format=json&email__iexact=" + \ urllib2.quote(email) headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey)} try: response = requests.get(url=url, headers=headers) except: raise Exception(traceback.format_exc()) if response.status_code != 200: logger.debug(url) message = response.text raise Exception(message) try: userRecordsJson = response.json() except: logger.error(traceback.format_exc()) raise numUserRecordsFound = userRecordsJson['meta']['total_count'] if numUserRecordsFound == 0: raise DoesNotExist( message="User with email \"%s\" was not found in MyTardis" % email, url=url, response=response) else: logger.debug("Found user record for email '" + email + "'.") return UserModel(settingsModel=settingsModel, userRecordJson=userRecordsJson['objects'][0])
def VerificationWorker(self): # pylint: disable=fixme # FIXME: Should this be in verifications (not folders) controller? """ One worker per thread. By default, up to 5 threads can run simultaneously for verifying whether local data files exist on the MyTardis server. """ while True: if self.IsShuttingDown(): return task = self.verificationsQueue.get() if task is None: break # pylint: disable=bare-except try: task.Run() except ValueError, err: if str(err) == "I/O operation on closed file": logger.info( "Ignoring closed file exception - it is normal " "to encounter these exceptions while canceling " "uploads.") self.verificationsQueue.task_done() return else: logger.error(traceback.format_exc()) self.verificationsQueue.task_done() return except:
def Verify(settingsModel, datafileId): """ Verify a datafile via the MyTardis API. """ myTardisUrl = settingsModel.GetMyTardisUrl() myTardisUsername = settingsModel.GetUsername() myTardisApiKey = settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/dataset_file/%s/verify/" % datafileId headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey), "Content-Type": "application/json", "Accept": "application/json"} response = requests.get(url=url, headers=headers) if response.status_code < 200 or response.status_code >= 300: logger.error("Failed to verify datafile id \"%s\" " % datafileId) logger.error(response.text) return False # Returning True doesn't mean that the file has been verified. # It just means that the MyTardis API has accepted our verification # request without raising an error. The verification is asynchronous # so it might not happen immediately if there is congestion in the # Celery queue. return True
def CheckConnectivityWorker(): """ Checks network connectivity in separate thread. """ wx.CallAfter(BeginBusyCursorIfRequired) # pylint: disable=broad-except try: activeNetworkInterfaces = \ UploaderModel.GetActiveNetworkInterfaces() except Exception, err: logger.error(traceback.format_exc()) if type(err).__name__ == "WindowsError" and \ "The handle is invalid" in str(err): message = "An error occurred, suggesting " \ "that you have launched MyData.exe from a " \ "Command Prompt window. Please launch it " \ "from a shortcut or from a Windows Explorer " \ "window instead.\n" \ "\n" \ "See: https://bugs.python.org/issue3905" def ShowErrorDialog(message): """ Show error dialog in main thread. """ dlg = wx.MessageDialog(None, message, "MyData", wx.OK | wx.ICON_ERROR) dlg.ShowModal() wx.CallAfter(ShowErrorDialog, message)
def GetUserByUsername(settingsModel, username): myTardisUrl = settingsModel.GetMyTardisUrl() myTardisUsername = settingsModel.GetUsername() myTardisApiKey = settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/user/?format=json&username="******"Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey) } try: response = requests.get(url=url, headers=headers) except: raise Exception(traceback.format_exc()) if response.status_code != 200: message = response.text raise Exception(message) try: userRecordsJson = response.json() except: logger.error(traceback.format_exc()) raise numUserRecordsFound = userRecordsJson['meta']['total_count'] if numUserRecordsFound == 0: raise DoesNotExist( message="User \"%s\" was not found in MyTardis" % username, url=url, response=response) else: logger.debug("Found user record for username '" + username + "'.") return UserModel(settingsModel=settingsModel, username=username, userRecordJson=userRecordsJson['objects'][0])
def endBusyCursorIfRequired(): try: wx.EndBusyCursor() except wx._core.PyAssertionError, e: if "no matching wxBeginBusyCursor()" not in str(e): logger.error(str(e)) raise
def Cancel(self): try: self.canceled = True # logger.debug("Canceling upload \"" + # self.GetRelativePathToUpload() + "\".") if self.bufferedReader is not None: self.bufferedReader.close() logger.debug("Closed buffered reader for \"" + self.GetRelativePathToUpload() + "\".") scpUploadProcess = self.GetScpUploadProcess() if scpUploadProcess and PidIsRunning(scpUploadProcess.pid): self.scpUploadProcess.terminate() # Check if the process has really # terminated and force kill if not. try: pid = self.scpUploadProcess.pid # See if this throws psutil.NoSuchProcess: _ = psutil.Process(int(pid)) if sys.platform.startswith("win"): # pylint: disable=no-member os.kill(pid, signal.CTRL_C_EVENT) else: os.kill(pid, signal.SIGKILL) # pylint: disable=no-member logger.debug("Force killed SCP upload process for %s" % self.GetRelativePathToUpload()) except psutil.NoSuchProcess: logger.debug("SCP upload process for %s was terminated " "gracefully." % self.GetRelativePathToUpload()) except: # pylint: disable=bare-except logger.error(traceback.format_exc())
def ValidateSettings(): """ Validate settings. """ logger.debug("Starting run() method for thread %s" % threading.current_thread().name) # pylint: disable=bare-except try: wx.CallAfter(wx.BeginBusyCursor) # pylint: disable=broad-except try: activeNetworkInterfaces = UploaderModel.GetActiveNetworkInterfaces() except Exception, err: logger.error(traceback.format_exc()) if type(err).__name__ == "WindowsError" and "The handle is invalid" in str(err): message = ( "An error occurred, suggesting " "that you have launched MyData.exe from a " "Command Prompt window. Please launch it " "from a shortcut or from a Windows Explorer " "window instead.\n" "\n" "See: https://bugs.python.org/issue3905" ) def ShowErrorDialog(message): """ Needs to run in the main thread. """ dlg = wx.MessageDialog(None, message, "MyData", wx.OK | wx.ICON_ERROR) dlg.ShowModal() wx.CallAfter(ShowErrorDialog, message) if len(activeNetworkInterfaces) == 0: message = ( "No active network interfaces." "\n\n" "Please ensure that you have an active " "network interface (e.g. Ethernet or WiFi)." ) def ShowDialog(): """ Needs to run in the main thread. """ dlg = wx.MessageDialog(None, message, "MyData", wx.OK | wx.ICON_ERROR) dlg.ShowModal() wx.CallAfter(EndBusyCursorIfRequired) self.frame.SetStatusMessage("") self.frame.SetConnected(self.settingsModel.GetMyTardisUrl(), False) wx.CallAfter(ShowDialog) return self.settingsValidation = self.settingsModel.Validate() event = mde.MyDataEvent( mde.EVT_SETTINGS_VALIDATION_FOR_REFRESH_COMPLETE, needToValidateSettings=False ) wx.PostEvent(self.frame, event) wx.CallAfter(EndBusyCursorIfRequired)
def CreateOnceTask(self, event): """ Create and schedule task(s) according to the settings configured in the Schedule tab of the Settings dialog. """ scheduleType = "Once" logger.debug("Schedule type is Once.") def RunTaskOnce(event, jobId): """ Run a task once, on the date and time configured in the Schedule tab of the Settings dialog. """ app = wx.GetApp() wx.CallAfter(app.DisableTestAndUploadToolbarButtons) while not app.Processing(): time.sleep(0.01) needToValidateSettings = False wx.CallAfter(app.OnRefresh, event, needToValidateSettings, jobId) # Sleep this thread until the job is really # finished, so we can determine the job's # finish time. while app.Processing(): time.sleep(0.01) jobDesc = "Scan folders and upload datafiles" startTime = \ datetime.combine(self.settingsModel.GetScheduledDate(), self.settingsModel.GetScheduledTime()) if startTime < datetime.now(): delta = datetime.now() - startTime if delta.total_seconds() < 10: startTime = datetime.now() else: message = "Scheduled time is in the past." logger.error(message) if self.settingsModel.GetLastSettingsUpdateTrigger() != \ LastSettingsUpdateTrigger.READ_FROM_DISK: wx.MessageBox(message, "MyData", wx.ICON_ERROR) return timeString = startTime.strftime("%I:%M %p") dateString = \ "{d:%A} {d.day}/{d.month}/{d.year}".format(d=startTime) wx.GetApp().GetMainFrame().SetStatusMessage( "The \"%s\" task is scheduled " "to run at %s on %s" % (jobDesc, timeString, dateString)) taskDataViewId = self.tasksModel.GetMaxDataViewId() + 1 jobArgs = [event, taskDataViewId] task = TaskModel(taskDataViewId, RunTaskOnce, jobArgs, jobDesc, startTime, scheduleType=scheduleType) try: self.tasksModel.AddRow(task) except ValueError, err: wx.MessageBox(str(err), "MyData", wx.ICON_ERROR) return
def GetDataFileCreatedTime(self, dataFileIndex): absoluteFilePath = self.GetDataFilePath(dataFileIndex) try: createdTimeIsoString = datetime.fromtimestamp( os.stat(absoluteFilePath).st_ctime).isoformat() return createdTimeIsoString except: logger.error(traceback.format_exc()) return None
def GetDataFileModifiedTime(self, dataFileIndex): absoluteFilePath = self.GetDataFilePath(dataFileIndex) try: modifiedTimeIsoString = datetime.fromtimestamp( os.stat(absoluteFilePath).st_mtime).isoformat() return modifiedTimeIsoString except: # pylint: disable=bare-except logger.error(traceback.format_exc()) return None
def StartUploadsForFolder(self, folderModel): # pylint: disable=too-many-return-statements fc = self # pylint: disable=invalid-name try: fc.finishedCountingVerifications[folderModel] = \ threading.Event() if self.IsShuttingDown(): return fc.numVerificationsToBePerformedLock.acquire() fc.numVerificationsToBePerformed += folderModel.GetNumFiles() fc.numVerificationsToBePerformedLock.release() logger.debug("StartUploadsForFolder: Starting verifications " "and uploads for folder: " + folderModel.GetFolder()) if self.IsShuttingDown(): return try: # Save MyTardis URL, so if it's changing in the # Settings Dialog while this thread is # attempting to connect, we ensure that any # exception thrown by this thread refers to the # old version of the URL. myTardisUrl = self.settingsModel.GetMyTardisUrl() # pylint: disable=broad-except try: experimentModel = ExperimentModel\ .GetOrCreateExperimentForFolder(folderModel, fc.testRun) except Exception, err: logger.error(traceback.format_exc()) wx.PostEvent( self.notifyWindow, self.showMessageDialogEvent(title="MyData", message=str(err), icon=wx.ICON_ERROR)) return folderModel.SetExperiment(experimentModel) connected = ConnectionStatus.CONNECTED wx.PostEvent( self.notifyWindow, self.connectionStatusEvent(myTardisUrl=myTardisUrl, connectionStatus=connected)) # pylint: disable=broad-except try: datasetModel = DatasetModel\ .CreateDatasetIfNecessary(folderModel, fc.testRun) except Exception, err: logger.error(traceback.format_exc()) wx.PostEvent( self.notifyWindow, self.showMessageDialogEvent(title="MyData", message=str(err), icon=wx.ICON_ERROR)) return folderModel.SetDatasetModel(datasetModel) self.VerifyDatafiles(folderModel)
def Delete(self): # pylint: disable=bare-except try: os.unlink(self.privateKeyFilePath) if self.publicKeyFilePath is not None: os.unlink(self.publicKeyFilePath) except: logger.error(traceback.format_exc()) return False return True
def EndBusyCursorIfRequired(): # pylint: disable=no-member # Otherwise pylint complains about PyAssertionError. # pylint: disable=protected-access try: wx.EndBusyCursor() except wx._core.PyAssertionError, err: if "no matching wxBeginBusyCursor()" \ not in str(err): logger.error(str(err)) raise
def EndBusyCursorIfRequired(): """ The built in wx.EndBusyCursor raises an ugly exception if the busy cursor has already been stopped. """ # pylint: disable=no-member # Otherwise pylint complains about PyAssertionError. # pylint: disable=protected-access try: wx.EndBusyCursor() except wx._core.PyAssertionError, err: if "no matching wxBeginBusyCursor()" not in str(err): logger.error(str(err)) raise
def InitForUploads(self): fc = self # pylint: disable=invalid-name app = wx.GetApp() if hasattr(app, "TestRunRunning"): fc.testRun = app.TestRunRunning() else: fc.testRun = False fc.SetStarted() settingsModel = fc.settingsModel fc.SetCanceled(False) fc.SetFailed(False) fc.SetCompleted(False) fc.verificationsModel.DeleteAllRows() fc.uploadsModel.DeleteAllRows() fc.uploadsModel.SetStartTime(datetime.datetime.now()) fc.verifyDatafileRunnable = {} fc.verificationsQueue = Queue.Queue() fc.numVerificationWorkerThreads = \ settingsModel.GetMaxVerificationThreads() fc.verificationWorkerThreads = [] for i in range(fc.numVerificationWorkerThreads): thread = threading.Thread(name="VerificationWorkerThread-%d" % (i + 1), target=fc.VerificationWorker) fc.verificationWorkerThreads.append(thread) thread.start() fc.uploadDatafileRunnable = {} fc.uploadsQueue = Queue.Queue() fc.numUploadWorkerThreads = settingsModel.GetMaxUploadThreads() fc.uploadMethod = UploadMethod.HTTP_POST fc.getOrCreateExpThreadingLock = threading.Lock() if sys.platform.startswith("linux"): RestartErrandBoy() # pylint: disable=broad-except try: settingsModel.GetUploaderModel().RequestStagingAccess() uploadToStagingRequest = settingsModel\ .GetUploadToStagingRequest() except Exception, err: # MyData app could be missing from MyTardis server. logger.error(traceback.format_exc()) wx.PostEvent( self.notifyWindow, self.showMessageDialogEvent( title="MyData", message=str(err), icon=wx.ICON_ERROR)) return
def EndBusyCursorIfRequired(): """ The built in wx.EndBusyCursor raises an ugly exception if the busy cursor has already been stopped. """ # pylint: disable=no-member # Otherwise pylint complains about PyAssertionError. # pylint: disable=protected-access try: wx.EndBusyCursor() except wx._core.PyAssertionError, err: if "no matching wxBeginBusyCursor()" \ not in str(err): logger.error(str(err)) raise
def __init__(self, settingsModel=None, dataViewId=None, username=None, name=None, email=None, userRecordJson=None, userNotFoundInMyTardis=False): # pylint: disable=too-many-arguments self.settingsModel = settingsModel self.userId = None self.dataViewId = dataViewId self.username = username self.name = name self.email = email self.groups = [] self.userRecordJson = userRecordJson self.userNotFoundInMyTardis = userNotFoundInMyTardis if userRecordJson is not None: self.userId = userRecordJson['id'] if username is None: self.username = userRecordJson['username'] if name is None: self.name = userRecordJson['first_name'] + " " + \ userRecordJson['last_name'] if email is None: self.email = userRecordJson['email'] try: for group in userRecordJson['groups']: self.groups.append( GroupModel(settingsModel=settingsModel, groupJson=group)) except KeyError: # 'groups' should be available in the user record's JSON # if using https://github.com/monash-merc/mytardis/tree/mydata message = "Incompatible MyTardis version" \ "\n\n" \ "You appear to be connecting to a MyTardis server whose " \ "MyTardis version doesn't provide some of the " \ "functionality required by MyData." \ "\n\n" \ "Please check that you are using the correct MyTardis " \ "URL and ask your MyTardis administrator to check " \ "that an appropriate version of MyTardis is installed." logger.error(traceback.format_exc()) logger.error(message) raise IncompatibleMyTardisVersion(message)
def Run(self): """ This method provides the functionality of the verification workers. Data files found locally are looked up on the MyTardis server, and are classified according to whether they are found on the server, whether they are verified, and if not, whether they have been completely or partially uploaded. """ dataFilePath = self.folderModel.GetDataFilePath(self.dataFileIndex) dataFileDirectory = \ self.folderModel.GetDataFileDirectory(self.dataFileIndex) dataFileName = os.path.basename(dataFilePath) fc = self.foldersController # pylint: disable=invalid-name if not hasattr(fc, "verificationsThreadingLock"): fc.verificationsThreadingLock = threading.Lock() fc.verificationsThreadingLock.acquire() verificationDataViewId = self.verificationsModel.GetMaxDataViewId() + 1 self.verificationModel = \ VerificationModel(dataViewId=verificationDataViewId, folderModel=self.folderModel, dataFileIndex=self.dataFileIndex) self.verificationsModel.AddRow(self.verificationModel) fc.verificationsThreadingLock.release() self.verificationModel.SetMessage("Looking for matching file on " "MyTardis server...") self.verificationModel.SetStatus(VerificationStatus.IN_PROGRESS) self.verificationsModel.MessageUpdated(self.verificationModel) try: dataset = self.folderModel.GetDatasetModel() if not dataset: # test runs don't create required datasets raise DoesNotExist("Dataset doesn't exist.") existingDatafile = DataFileModel.GetDataFile( settingsModel=self.settingsModel, dataset=dataset, filename=dataFileName, directory=dataFileDirectory) self.verificationModel.SetMessage("Found datafile on " "MyTardis server.") self.verificationModel.SetStatus(VerificationStatus.FOUND_VERIFIED) self.verificationsModel.MessageUpdated(self.verificationModel) self.HandleExistingDatafile(existingDatafile) except DoesNotExist: self.HandleNonExistentDataFile() except: # pylint: disable=bare-except logger.error(traceback.format_exc())
def OnMyTardis(self, event): try: import webbrowser items = self.foldersView.GetDataViewControl().GetSelections() rows = [self.foldersModel.GetRow(item) for item in items] if len(rows) == 1: folderRecord = self.foldersModel.GetFolderRecord(rows[0]) if folderRecord.GetDatasetModel() is not None: webbrowser\ .open(self.settingsModel.GetMyTardisUrl() + "/" + folderRecord.GetDatasetModel().GetViewUri()) else: webbrowser.open(self.settingsModel.GetMyTardisUrl()) else: webbrowser.open(self.settingsModel.GetMyTardisUrl()) except: logger.error(traceback.format_exc())
def InitForUploads(self): fc = self # pylint: disable=invalid-name app = wx.GetApp() if hasattr(app, "TestRunRunning"): fc.testRun = app.TestRunRunning() else: fc.testRun = False fc.SetStarted() settingsModel = fc.settingsModel fc.SetCanceled(False) fc.SetFailed(False) fc.SetCompleted(False) fc.verificationsModel.DeleteAllRows() fc.uploadsModel.DeleteAllRows() fc.verifyDatafileRunnable = {} fc.verificationsQueue = Queue.Queue() # For now, the max number of verification threads is hard-coded # to 16: fc.numVerificationWorkerThreads = 16 fc.verificationWorkerThreads = [] for i in range(fc.numVerificationWorkerThreads): thread = threading.Thread(name="VerificationWorkerThread-%d" % (i + 1), target=fc.VerificationWorker) fc.verificationWorkerThreads.append(thread) thread.start() fc.uploadDatafileRunnable = {} fc.uploadsQueue = Queue.Queue() fc.numUploadWorkerThreads = settingsModel.GetMaxUploadThreads() fc.uploadMethod = UploadMethod.HTTP_POST # pylint: disable=broad-except try: settingsModel.GetUploaderModel().RequestStagingAccess() uploadToStagingRequest = settingsModel\ .GetUploadToStagingRequest() except Exception, err: # MyData app could be missing from MyTardis server. logger.error(traceback.format_exc()) wx.PostEvent( self.notifyWindow, self.showMessageDialogEvent(title="MyData", message=str(err), icon=wx.ICON_ERROR)) return
def RequestUploadToStagingApproval(self): """ Used to request the ability to upload via SCP to a staging area, and then register in MyTardis. """ try: keyPair = self.settingsModel.GetSshKeyPair() if not keyPair: keyPair = OpenSSH.FindKeyPair("MyData") except PrivateKeyDoesNotExist: keyPair = OpenSSH.NewKeyPair("MyData") self.settingsModel.SetSshKeyPair(keyPair) myTardisUrl = self.settingsModel.GetMyTardisUrl() myTardisUsername = self.settingsModel.GetUsername() myTardisApiKey = self.settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/mydata_uploaderregistrationrequest/" headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey), "Content-Type": "application/json", "Accept": "application/json" } uploaderRegistrationRequestJson = \ {"uploader": self.responseJson['resource_uri'], "name": self.name, "requester_name": self.contactName, "requester_email": self.contactEmail, "requester_public_key": keyPair.GetPublicKey(), "requester_key_fingerprint": keyPair.GetFingerprint()} data = json.dumps(uploaderRegistrationRequestJson) response = requests.post(headers=headers, url=url, data=data) if response.status_code >= 200 and response.status_code < 300: responseJson = response.json() response.close() return UploaderRegistrationRequest( settingsModel=self.settingsModel, uploaderRegRequestJson=responseJson) else: if response.status_code == 404: response.close() raise DoesNotExist("HTTP 404 (Not Found) received for: " + url) logger.error("Status code = " + str(response.status_code)) logger.error("URL = " + url) message = response.text response.close() raise Exception(message)
def RequestUploadToStagingApproval(self): """ Used to request the ability to upload via SCP to a staging area, and then register in MyTardis. """ try: keyPair = self.settingsModel.GetSshKeyPair() if not keyPair: keyPair = OpenSSH.FindKeyPair("MyData") except PrivateKeyDoesNotExist: keyPair = OpenSSH.NewKeyPair("MyData") self.settingsModel.SetSshKeyPair(keyPair) myTardisUrl = self.settingsModel.GetMyTardisUrl() myTardisUsername = self.settingsModel.GetUsername() myTardisApiKey = self.settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/mydata_uploaderregistrationrequest/" headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey), "Content-Type": "application/json", "Accept": "application/json"} uploaderRegistrationRequestJson = \ {"uploader": self.responseJson['resource_uri'], "name": self.name, "requester_name": self.contactName, "requester_email": self.contactEmail, "requester_public_key": keyPair.GetPublicKey(), "requester_key_fingerprint": keyPair.GetFingerprint()} data = json.dumps(uploaderRegistrationRequestJson) response = requests.post(headers=headers, url=url, data=data) if response.status_code >= 200 and response.status_code < 300: responseJson = response.json() response.close() return UploaderRegistrationRequest( settingsModel=self.settingsModel, uploaderRegRequestJson=responseJson) else: if response.status_code == 404: response.close() raise DoesNotExist("HTTP 404 (Not Found) received for: " + url) logger.error("Status code = " + str(response.status_code)) logger.error("URL = " + url) message = response.text response.close() raise Exception(message)
def UploadUploaderInfo(self): """ Uploads info about the instrument PC to MyTardis via HTTP POST """ myTardisUrl = self.settingsModel.GetMyTardisUrl() myTardisDefaultUsername = self.settingsModel.GetUsername() myTardisDefaultUserApiKey = self.settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/mydata_uploader/?format=json" + \ "&uuid=" + urllib.quote(self.uuid) headers = {"Authorization": "ApiKey " + myTardisDefaultUsername + ":" + myTardisDefaultUserApiKey, "Content-Type": "application/json", "Accept": "application/json"} try: response = requests.get(headers=headers, url=url) except Exception, e: logger.error(str(e)) raise
def __init__(self, settingsModel=None, dataViewId=None, username=None, name=None, email=None, userRecordJson=None, userNotFoundInMyTardis=False): # pylint: disable=too-many-arguments self.settingsModel = settingsModel self.userId = None self.dataViewId = dataViewId self.username = username self.name = name self.email = email self.groups = [] self.userRecordJson = userRecordJson self.userNotFoundInMyTardis = userNotFoundInMyTardis if userRecordJson is not None: self.userId = userRecordJson['id'] if username is None: self.username = userRecordJson['username'] if name is None: self.name = userRecordJson['first_name'] + " " + \ userRecordJson['last_name'] if email is None: self.email = userRecordJson['email'] try: for group in userRecordJson['groups']: self.groups.append(GroupModel(settingsModel=settingsModel, groupJson=group)) except KeyError: # 'groups' should be available in the user record's JSON # if using https://github.com/monash-merc/mytardis/tree/mydata message = "Incompatible MyTardis version" \ "\n\n" \ "You appear to be connecting to a MyTardis server whose " \ "MyTardis version doesn't provide some of the " \ "functionality required by MyData." \ "\n\n" \ "Please check that you are using the correct MyTardis " \ "URL and ask your MyTardis administrator to check " \ "that an appropriate version of MyTardis is installed." logger.error(traceback.format_exc()) logger.error(message) raise IncompatibleMyTardisVersion(message)
def RequestStagingAccess(self): """ This could be called from multiple threads simultaneously, so it requires locking. """ if self.requestStagingAccessThreadLock.acquire(False): try: try: self.UploadUploaderInfo() except: print traceback.format_exc() logger.error(traceback.format_exc()) raise uploadToStagingRequest = None try: uploadToStagingRequest = \ self.ExistingUploadToStagingRequest() except DoesNotExist: uploadToStagingRequest = \ self.RequestUploadToStagingApproval() logger.debug("Uploader registration request created.") except PrivateKeyDoesNotExist: logger.debug( "Generating new uploader registration request, " "because private key was moved or deleted.") uploadToStagingRequest = \ self.RequestUploadToStagingApproval() logger.debug( "Generated new uploader registration request, " "because private key was moved or deleted.") if uploadToStagingRequest.IsApproved(): logger.debug("Uploads to staging have been approved!") else: logger.debug( "Uploads to staging haven't been approved yet.") self.settingsModel\ .SetUploadToStagingRequest(uploadToStagingRequest) except: logger.error(traceback.format_exc()) raise finally: self.requestStagingAccessThreadLock.release()
def StartDataUploads(self): # pylint: disable=too-many-return-statements # pylint: disable=too-many-branches # pylint: disable=too-many-statements fc = self # pylint: disable=invalid-name fc.SetStarted() settingsModel = fc.settingsModel fc.canceled.clear() fc.verificationsModel.DeleteAllRows() fc.uploadsModel.DeleteAllRows() fc.verifyDatafileRunnable = {} fc.verificationsQueue = Queue.Queue() # For now, the max number of verification threads is set to be the # same as the max number of upload threads. fc.numVerificationWorkerThreads = settingsModel.GetMaxUploadThreads() fc.verificationWorkerThreads = [] for i in range(fc.numVerificationWorkerThreads): thread = threading.Thread(name="VerificationWorkerThread-%d" % (i + 1), target=fc.VerificationWorker) fc.verificationWorkerThreads.append(thread) thread.start() fc.uploadDatafileRunnable = {} fc.uploadsQueue = Queue.Queue() fc.numUploadWorkerThreads = settingsModel.GetMaxUploadThreads() fc.uploadMethod = UploadMethod.HTTP_POST # pylint: disable=broad-except try: settingsModel.GetUploaderModel().RequestStagingAccess() uploadToStagingRequest = settingsModel\ .GetUploadToStagingRequest() except Exception, err: # MyData app could be missing from MyTardis server. logger.error(traceback.format_exc()) wx.PostEvent( self.notifyWindow, self.showMessageDialogEvent( title="MyData", message=str(err), icon=wx.ICON_ERROR)) return
def UploadUploaderInfo(self): """ Uploads info about the instrument PC to MyTardis via HTTP POST """ # pylint: disable=too-many-statements myTardisUrl = self.settingsModel.GetMyTardisUrl() myTardisUsername = self.settingsModel.GetUsername() myTardisApiKey = self.settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/mydata_uploader/?format=json" + \ "&uuid=" + urllib.quote(self.uuid) headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey), "Content-Type": "application/json", "Accept": "application/json"} try: response = requests.get(headers=headers, url=url) except Exception, err: logger.error(str(err)) raise
def ShowDialog(message): """ Show error dialog in main thread. """ logger.error(message) # pylint: disable=no-member # Otherwise pylint complains about PyAssertionError. # pylint: disable=protected-access try: wx.EndBusyCursor() if wx.version().startswith("3.0.3.dev"): arrowCursor = wx.Cursor(wx.CURSOR_ARROW) else: arrowCursor = wx.StockCursor(wx.CURSOR_ARROW) event.settingsDialog.dialogPanel.SetCursor(arrowCursor) except wx._core.PyAssertionError, err: if "no matching wxBeginBusyCursor()" \ not in str(err): logger.error(str(err)) raise
def EndBusyCursorIfRequired(event): """ The built in wx.EndBusyCursor raises an ugly exception if the busy cursor has already been stopped. """ # pylint: disable=no-member # Otherwise pylint complains about PyAssertionError. # pylint: disable=protected-access try: wx.EndBusyCursor() if event.settingsDialog: if wx.version().startswith("3.0.3.dev"): arrowCursor = wx.Cursor(wx.CURSOR_ARROW) else: arrowCursor = wx.StockCursor(wx.CURSOR_ARROW) event.settingsDialog.dialogPanel.SetCursor(arrowCursor) except wx._core.PyAssertionError, err: if "no matching wxBeginBusyCursor()" not in str(err): logger.error(str(err)) raise
def Cancel(self): try: self.canceled = True if self.verificationTimer: try: self.verificationTimer.cancel() except: # pylint: disable=bare-except logger.error(traceback.format_exc()) if self.bufferedReader is not None: self.bufferedReader.close() logger.debug("Closed buffered reader for \"" + self.GetRelativePathToUpload() + "\".") if self.scpUploadProcessPid: if sys.platform.startswith("win"): os.kill(self.scpUploadProcessPid, signal.SIGABRT) else: os.kill(self.scpUploadProcessPid, signal.SIGKILL) except: # pylint: disable=bare-except logger.warning(traceback.format_exc())
def RequestStagingAccess(self): """ This could be called from multiple threads simultaneously, so it requires locking. """ if not hasattr(self, "requestStagingAccessThreadingLock"): self.requestStagingAccessThreadingLock = threading.Lock() if self.requestStagingAccessThreadingLock.acquire(False): try: try: self.UploadUploaderInfo() except: print traceback.format_exc() logger.error(traceback.format_exc()) raise uploadToStagingRequest = None try: uploadToStagingRequest = \ self.ExistingUploadToStagingRequest() except DoesNotExist: uploadToStagingRequest = \ self.RequestUploadToStagingApproval() logger.info("Uploader registration request created.") except PrivateKeyDoesNotExist: logger.info("Generating new uploader registration request, " "because private key was moved or deleted.") uploadToStagingRequest = \ self.RequestUploadToStagingApproval() logger.info("Generated new uploader registration request, " "because private key was moved or deleted.") if uploadToStagingRequest.IsApproved(): logger.info("Uploads to staging have been approved!") else: logger.info("Uploads to staging haven't been approved yet.") self.settingsModel\ .SetUploadToStagingRequest(uploadToStagingRequest) except: logger.error(traceback.format_exc()) raise finally: self.requestStagingAccessThreadingLock.release()
def OnMyTardis(self, event): """ Called when user clicks the Internet Browser icon on the main toolbar. """ # pylint: disable=bare-except try: items = self.foldersView.GetDataViewControl().GetSelections() rows = [self.foldersModel.GetRow(item) for item in items] if len(rows) == 1: folderRecord = self.foldersModel.GetFolderRecord(rows[0]) if folderRecord.GetDatasetModel() is not None: webbrowser.open( self.settingsModel.GetMyTardisUrl() + "/" + folderRecord.GetDatasetModel().GetViewUri() ) else: webbrowser.open(self.settingsModel.GetMyTardisUrl()) else: webbrowser.open(self.settingsModel.GetMyTardisUrl()) except: logger.error(traceback.format_exc())
def UploadUploaderInfo(self): """ Uploads info about the instrument PC to MyTardis via HTTP POST """ # pylint: disable=too-many-statements myTardisUrl = self.settingsModel.GetMyTardisUrl() myTardisUsername = self.settingsModel.GetUsername() myTardisApiKey = self.settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/mydata_uploader/?format=json" + \ "&uuid=" + urllib.quote(self.uuid) headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey), "Content-Type": "application/json", "Accept": "application/json" } try: response = requests.get(headers=headers, url=url) except Exception, err: logger.error(str(err)) raise
def GetSettings(self): """ Used to retrieve uploader settings from the mytardis-app-mydata's UploaderSettings model on the MyTardis server. """ myTardisUrl = self.settingsModel.GetMyTardisUrl() myTardisUsername = self.settingsModel.GetUsername() myTardisApiKey = self.settingsModel.GetApiKey() headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey), "Content-Type": "application/json", "Accept": "application/json"} url = myTardisUrl + "/api/v1/mydata_uploader/?format=json" + \ "&uuid=" + urllib.quote(self.uuid) try: response = requests.get(headers=headers, url=url, timeout=3) except Exception, err: logger.error(str(err)) raise
def GetInstrument(settingsModel, facility, name): """ Get instrument. """ myTardisUrl = settingsModel.GetMyTardisUrl() myTardisUsername = settingsModel.GetUsername() myTardisApiKey = settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/instrument/?format=json" + \ "&facility__id=" + str(facility.GetId()) + \ "&name=" + urllib.quote(name) headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey) } session = requests.Session() response = session.get(url=url, headers=headers) if response.status_code != 200: message = response.text logger.error(message) raise Exception(message) instrumentsJson = response.json() numInstrumentsFound = \ instrumentsJson['meta']['total_count'] if numInstrumentsFound == 0: logger.warning("Instrument \"%s\" was not found in MyTardis" % name) logger.debug(url) logger.debug(response.text) response.close() session.close() return None else: logger.debug("Found instrument record for name \"%s\" " "in facility \"%s\"" % (name, facility.GetName())) instrumentJson = instrumentsJson['objects'][0] response.close() session.close() return InstrumentModel(settingsModel=settingsModel, name=name, instrumentJson=instrumentJson)
def GetInstrument(settingsModel, facility, name): """ Get instrument. """ myTardisUrl = settingsModel.GetMyTardisUrl() myTardisUsername = settingsModel.GetUsername() myTardisApiKey = settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/instrument/?format=json" + \ "&facility__id=" + str(facility.GetId()) + \ "&name=" + urllib.quote(name) headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey)} session = requests.Session() response = session.get(url=url, headers=headers) if response.status_code != 200: message = response.text logger.error(message) raise Exception(message) instrumentsJson = response.json() numInstrumentsFound = \ instrumentsJson['meta']['total_count'] if numInstrumentsFound == 0: logger.warning("Instrument \"%s\" was not found in MyTardis" % name) logger.debug(url) logger.debug(response.text) response.close() session.close() return None else: logger.debug("Found instrument record for name \"%s\" " "in facility \"%s\"" % (name, facility.GetName())) instrumentJson = instrumentsJson['objects'][0] response.close() session.close() return InstrumentModel( settingsModel=settingsModel, name=name, instrumentJson=instrumentJson)
def Notify(message, subtitle=None, title="MyData"): """ Post notification. """ if sys.platform.startswith("win"): wx.GetApp().taskBarIcon.ShowBalloon(title, message) return if sys.platform.startswith("linux"): try: icon = MYDATA_ICONS.GetIconPath("favicon", vendor="MyTardis") args = ["-i", icon, "-t", "3000", title, message] proc = subprocess.Popen(["notify-send"] + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, _ = proc.communicate() if proc.returncode != 0: logger.error(stdout) except: # pylint: disable=bare-except sys.stderr.write(message + "\n") return path = "resources/macosx/MyData Notifications.app/Contents/MacOS" executable = "MyData Notifications" args = ["-message", message, "-title", title, "-sound", "Purr"] if subtitle: args = args + ["-subtitle", subtitle] if hasattr(sys, "frozen"): args = args + ["-activate", "org.mytardis.MyData"] else: args = args + ["-activate", "org.python.python"] if hasattr(sys, "frozen"): path = "../MacOS" else: path = "resources/macosx/MyData Notifications.app/Contents/MacOS" proc = subprocess.Popen([os.path.join(path, executable)] + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, _ = proc.communicate() if proc.returncode != 0: logger.error(stdout)
def Notify(message, subtitle=None, title="MyData"): """ Post notification. """ if sys.platform.startswith("win"): wx.GetApp().taskBarIcon.ShowBalloon(title, message) return if sys.platform.startswith("linux"): try: icon = MYDATA_ICONS.GetIconPath("favicon", vendor="MyTardis") args = ["-i", icon, "-t", "3000", title, message] proc = subprocess.Popen(["notify-send"] + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, _ = proc.communicate() if proc.returncode != 0: logger.error(stdout) except: # pylint: disable=bare-except sys.stderr.write(message + "\n") return path = "resources/macosx/MyData Notifications.app/Contents/MacOS" executable = "MyData Notifications" args = ["-message", message, "-title", title] if subtitle: args = args + ["-subtitle", subtitle] if hasattr(sys, "frozen"): args = args + ["-activate", "org.mytardis.MyData"] else: args = args + ["-activate", "org.python.python"] if hasattr(sys, "frozen"): path = "../MacOS" else: path = "resources/macosx/MyData Notifications.app/Contents/MacOS" proc = subprocess.Popen([os.path.join(path, executable)] + args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout, _ = proc.communicate() if proc.returncode != 0: logger.error(stdout)
def ShutdownForRefreshWorker(): """ Shuts down upload threads (in dedicated worker thread) before restarting them. """ logger.debug("Starting run() method for thread %s" % threading.current_thread().name) logger.debug("Shutting down for refresh from %s." % threading.current_thread().name) # pylint: disable=bare-except try: wx.CallAfter(BeginBusyCursorIfRequired) app = wx.GetApp() app.GetScheduleController().ApplySchedule(event) event.foldersController.ShutDownUploadThreads() shutdownForRefreshCompleteEvent = MyDataEvent( EVT_SHUTDOWN_FOR_REFRESH_COMPLETE, shutdownSuccessful=True) wx.PostEvent(app.GetMainFrame(), shutdownForRefreshCompleteEvent) wx.CallAfter(EndBusyCursorIfRequired, event) except: logger.error(traceback.format_exc()) message = "An error occurred while trying to shut down " \ "the existing data-scan-and-upload process in order " \ "to start another one.\n\n" \ "See the Log tab for details of the error." logger.error(message) def ShowDialog(): """ Show error dialog in main thread. """ dlg = wx.MessageDialog(None, message, "MyData", wx.OK | wx.ICON_ERROR) dlg.ShowModal() wx.CallAfter(ShowDialog) logger.debug("Finishing run() method for thread %s" % threading.current_thread().name)
def GetSettings(self): """ Used to retrieve uploader settings from the mytardis-app-mydata's UploaderSettings model on the MyTardis server. """ myTardisUrl = self.settingsModel.GetMyTardisUrl() myTardisUsername = self.settingsModel.GetUsername() myTardisApiKey = self.settingsModel.GetApiKey() headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey), "Content-Type": "application/json", "Accept": "application/json" } url = myTardisUrl + "/api/v1/mydata_uploader/?format=json" + \ "&uuid=" + urllib.quote(self.uuid) try: response = requests.get(headers=headers, url=url) except Exception, err: logger.error(str(err)) raise
def SshServerIsReady(username, privateKeyFilePath, host, port): if sys.platform.startswith("win"): privateKeyFilePath = GetCygwinPath(privateKeyFilePath) if sys.platform.startswith("win"): cmdAndArgs = [OPENSSH.DoubleQuote(OPENSSH.ssh), "-p", str(port), "-i", OPENSSH.DoubleQuote(privateKeyFilePath), "-oPasswordAuthentication=no", "-oNoHostAuthenticationForLocalhost=yes", "-oStrictHostKeyChecking=no", "-l", username, host, OPENSSH.DoubleQuote("echo Ready")] else: cmdAndArgs = [OPENSSH.DoubleQuote(OPENSSH.ssh), "-p", str(port), "-i", OPENSSH.DoubleQuote(privateKeyFilePath), "-oPasswordAuthentication=no", "-oNoHostAuthenticationForLocalhost=yes", "-oStrictHostKeyChecking=no", "-l", username, host, OPENSSH.DoubleQuote("echo Ready")] cmdString = " ".join(cmdAndArgs) logger.debug(cmdString) proc = subprocess.Popen(cmdString, shell=OPENSSH.preferToUseShellInSubprocess, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=DEFAULT_STARTUP_INFO, creationflags=DEFAULT_CREATION_FLAGS) stdout, _ = proc.communicate() returncode = proc.returncode if returncode != 0: logger.error(stdout) return returncode == 0
def CreateInstrument(settingsModel, facility, name): """ Create instrument. """ myTardisUrl = settingsModel.GetMyTardisUrl() myTardisUsername = settingsModel.GetUsername() myTardisApiKey = settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/instrument/" headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey), "Content-Type": "application/json", "Accept": "application/json" } instrumentJson = {"facility": facility.GetResourceUri(), "name": name} data = json.dumps(instrumentJson) response = requests.post(headers=headers, url=url, data=data) content = response.text if response.status_code >= 200 and response.status_code < 300: instrumentJson = response.json() return InstrumentModel(settingsModel=settingsModel, name=name, instrumentJson=instrumentJson) else: if response.status_code == 401: message = "Couldn't create instrument \"%s\" " \ "in facility \"%s\"." \ % (name, facility.GetName()) message += "\n\n" message += "Please ask your MyTardis administrator to " \ "check the permissions of the \"%s\" " \ "user account." % myTardisUsername raise Unauthorized(message) if response.status_code == 404: raise Exception("HTTP 404 (Not Found) received for: " + url) logger.error("Status code = " + str(response.status_code)) logger.error("URL = " + url) raise Exception(content)
def ScanForUserFolders(self, writeProgressUpdateToStatusBar, shouldAbort): """ Scan for user folders. """ dataDir = self.settingsModel.GetDataDirectory() userOrGroupFilterString = '*%s*' % self.settingsModel.GetUserFilter() folderStructure = self.settingsModel.GetFolderStructure() filesDepth1 = glob(os.path.join(dataDir, userOrGroupFilterString)) dirsDepth1 = [item for item in filesDepth1 if os.path.isdir(item)] userFolderNames = [os.path.basename(d) for d in dirsDepth1] for userFolderName in userFolderNames: if shouldAbort(): wx.CallAfter(wx.GetApp().GetMainFrame().SetStatusMessage, "Data scans and uploads were canceled.") wx.CallAfter(EndBusyCursorIfRequired) return if folderStructure.startswith("Username"): logger.debug("Found folder assumed to be username: "******"Email"): logger.debug("Found folder assumed to be email: " + userFolderName) usersDataViewId = self.usersModel.GetMaxDataViewId() + 1 try: if folderStructure.startswith("Username"): userRecord = \ UserModel.GetUserByUsername(self.settingsModel, userFolderName) elif folderStructure.startswith("Email"): userRecord = \ UserModel.GetUserByEmail(self.settingsModel, userFolderName) except DoesNotExist: userRecord = None if shouldAbort(): wx.CallAfter(wx.GetApp().GetMainFrame().SetStatusMessage, "Data scans and uploads were canceled.") wx.CallAfter(EndBusyCursorIfRequired) return if userRecord is not None: userRecord.SetDataViewId(usersDataViewId) self.usersModel.AddRow(userRecord) userFolderPath = os.path.join(dataDir, userFolderName) logger.debug("Folder structure: " + folderStructure) if folderStructure == 'Username / Dataset' or \ folderStructure == 'Email / Dataset': self.ScanForDatasetFolders(userFolderPath, userRecord, userFolderName) elif folderStructure == \ 'Username / Experiment / Dataset' or \ folderStructure == 'Email / Experiment / Dataset': self.ScanForExperimentFolders(userFolderPath, userRecord, userFolderName) elif folderStructure == \ 'Username / "MyTardis" / Experiment / Dataset': userFolderContents = os.listdir(userFolderPath) myTardisFolderName = None for item in userFolderContents: if item.lower() == 'mytardis': myTardisFolderName = item if not myTardisFolderName: message = 'Didn\'t find "MyTardis" folder in ' \ '"%s"' % userFolderPath logger.error(message) raise InvalidFolderStructure(message) myTardisFolderPath = os.path.join(userFolderPath, myTardisFolderName) self.ScanForExperimentFolders(myTardisFolderPath, userRecord, userFolderName) if shouldAbort(): wx.CallAfter(wx.GetApp().GetMainFrame() .SetStatusMessage, "Data scans and uploads were canceled.") wx.CallAfter(EndBusyCursorIfRequired) return else: message = "Didn't find a MyTardis user record for folder " \ "\"%s\" in %s" % (userFolderName, dataDir) logger.warning(message) if shouldAbort(): wx.CallAfter(wx.GetApp().GetMainFrame().SetStatusMessage, "Data scans and uploads were canceled.") wx.CallAfter(EndBusyCursorIfRequired) return if not self.settingsModel.UploadInvalidUserOrGroupFolders(): logger.warning("Skipping %s, because " "'Upload invalid user folders' " "setting is not checked." % userFolderName) continue if folderStructure.startswith("Username"): userRecord = UserModel(settingsModel=self.settingsModel, username=userFolderName, userNotFoundInMyTardis=True) elif folderStructure.startswith("Email"): userRecord = \ UserModel(settingsModel=self.settingsModel, email=userFolderName, userNotFoundInMyTardis=True) userRecord.SetDataViewId(usersDataViewId) self.usersModel.AddRow(userRecord) if shouldAbort(): wx.CallAfter(wx.GetApp().GetMainFrame().SetStatusMessage, "Data scans and uploads were canceled.") wx.CallAfter(EndBusyCursorIfRequired) return self.ScanForDatasetFolders(os.path.join(dataDir, userFolderName), userRecord, userFolderName) if threading.current_thread().name == "MainThread": writeProgressUpdateToStatusBar() else: wx.CallAfter(writeProgressUpdateToStatusBar)
def GetDescription(self): try: return self.json['description'] except: # pylint: disable=bare-except logger.error("self.json = " + str(self.json)) logger.error(traceback.format_exc())
class UploaderModel(object): """ Model class for MyTardis API v1's UploaderAppResource. See: https://github.com/mytardis/mytardis-app-mydata/blob/master/api.py """ # pylint: disable=too-many-instance-attributes def __init__(self, settingsModel): # pylint: disable=too-many-locals # pylint: disable=too-many-branches # pylint: disable=too-many-statements self.settingsModel = settingsModel self.interface = None self.responseJson = None self.id = None # pylint: disable=invalid-name self.uploaderSettings = None self.uuid = self.settingsModel.GetUuid() if self.uuid is None: self.GenerateUuid() self.settingsModel.SetUuid(self.uuid) self.osUsername = "" self.cpus = 0 self.osPlatform = "" self.hostname = "" self.machine = "" self.osVersion = "" self.requestStagingAccessThreadLock = threading.Lock() self.memory = "" self.osSystem = "" self.architecture = "" self.osRelease = "" self.processor = "" self.architecture = "" # Here we check connectivity even if we've already done so, because # we need to ensure that we get the correct network interface for # self.interface, otherwise if the active interface changes, # we can get errors like this: KeyError: 'RTC' # when accessing things like ipv4Address[self.interface] activeInterfaces = UploaderModel.GetActiveNetworkInterfaces() if len(activeInterfaces) == 0: message = "No active network interfaces." \ "\n\n" \ "Please ensure that you have an active network interface " \ "(e.g. Ethernet or WiFi)." raise NoActiveNetworkInterface(message) # Sometimes on Windows XP, you can end up with multiple results # from "netsh interface show interface" # If there is one called "Local Area Connection", # then that's the one we'll go with. if "Local Area Connection" in activeInterfaces: activeInterfaces = ["Local Area Connection"] elif "Local Area Connection 2" in activeInterfaces: activeInterfaces = ["Local Area Connection 2"] elif "Ethernet" in activeInterfaces: activeInterfaces = ["Ethernet"] elif "Internet" in activeInterfaces: activeInterfaces = ["Internet"] elif "Wi-Fi" in activeInterfaces: activeInterfaces = ["Wi-Fi"] # For now, we're only dealing with one active network interface. # It is possible to have more than one active network interface, # but we hope that the code above has picked the best one. # If there are no active interfaces, then we shouldn't have # reached this point - we should have already raised an # exception. self.interface = activeInterfaces[0] if sys.platform.startswith("win"): proc = subprocess.Popen(["ipconfig", "/all"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=DEFAULT_STARTUP_INFO, creationflags=DEFAULT_CREATION_FLAGS) stdout, _ = proc.communicate() if proc.returncode != 0: raise Exception(stdout) macAddress = {} ipv4Address = {} ipv6Address = {} subnetMask = {} interface = "" for row in stdout.split("\n"): match = re.match(r"^\S.*adapter (.*):\s*$", row) if match: interface = match.groups()[0] if interface == self.interface: if ': ' in row: key, value = row.split(': ') if key.strip(' .') == "Physical Address": macAddress[interface] = value.strip() if "IPv4 Address" in key.strip(' .'): ipv4Address[interface] = \ value.strip().replace("(Preferred)", "") ipv4Address[interface] = \ ipv4Address[interface] \ .replace("(Tentative)", "") if "IPv6 Address" in key.strip(' .'): ipv6Address[interface] = \ value.strip().replace("(Preferred)", "") ipv6Address[interface] = \ ipv6Address[interface] \ .replace("(Tentative)", "") if "Subnet Mask" in key.strip(' .'): subnetMask[interface] = value.strip() elif sys.platform.startswith("darwin"): proc = subprocess.Popen(["ifconfig", self.interface], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=DEFAULT_STARTUP_INFO, creationflags=DEFAULT_CREATION_FLAGS) stdout, _ = proc.communicate() if proc.returncode != 0: raise Exception(stdout) macAddress = {} ipv4Address = {} ipv6Address = {} subnetMask = {} for row in stdout.split("\n"): if sys.platform.startswith("darwin"): match = re.match(r"\s+ether (\S*)\s*$", row) else: match = re.match(r".*\s+HWaddr (\S*)\s*$", row) if match: macAddress[self.interface] = match.groups()[0] match = re.match(r"\s+inet (\S*)\s+netmask\s+(\S*)\s+.*$", row) if match: ipv4Address[self.interface] = match.groups()[0] subnetMask[self.interface] = match.groups()[1] match = re.match(r"\s+inet6 (\S*)\s+.*$", row) if match: ipv6Address[self.interface] = match.groups()[0] else: macAddress = {} ipv4Address = {} ipv6Address = {} subnetMask = {} interface = self.interface macAddress[interface] = \ netifaces.ifaddresses(interface)[netifaces.AF_LINK][0]['addr'] ipv4Addrs = netifaces.ifaddresses(interface)[netifaces.AF_INET] ipv4Address[interface] = ipv4Addrs[0]['addr'] subnetMask[interface] = ipv4Addrs[0]['netmask'] ipv6Addrs = netifaces.ifaddresses(interface)[netifaces.AF_INET6] for addr in ipv6Addrs: match = re.match(r'(.+)%(.+)', addr['addr']) if match and match.group(2) == interface: ipv6Address[interface] = match.group(1) self.macAddress = macAddress[self.interface] if self.interface in ipv4Address: self.ipv4Address = ipv4Address[self.interface] else: self.ipv4Address = "" if self.interface in ipv6Address: self.ipv6Address = ipv6Address[self.interface] else: self.ipv6Address = "" if self.interface in subnetMask: self.subnetMask = subnetMask[self.interface] else: self.subnetMask = "" logger.debug("The active network interface is: " + str(self.interface)) self.name = self.settingsModel.GetInstrumentName() self.contactName = self.settingsModel.GetContactName() self.contactEmail = self.settingsModel.GetContactEmail() self.userAgentName = "MyData" self.userAgentVersion = VERSION self.userAgentInstallLocation = "" # pylint: disable=bare-except if hasattr(sys, 'frozen'): self.userAgentInstallLocation = os.path.dirname(sys.executable) else: try: self.userAgentInstallLocation = \ os.path.dirname(pkgutil.get_loader("MyData").filename) except: self.userAgentInstallLocation = os.getcwd() fmt = "%-17s %8s %8s %8s %5s%% %9s %s\n" diskUsage = ( fmt % ("Device", "Total", "Used", "Free", "Use ", "Type", "Mount")) for part in psutil.disk_partitions(all=False): if os.name == 'nt': if 'cdrom' in part.opts or part.fstype == '': # skip cd-rom drives with no disk in it; they may raise # ENOENT, pop-up a Windows GUI error for a non-ready # partition or just hang. continue usage = psutil.disk_usage(part.mountpoint) diskUsage = diskUsage + ( fmt % (part.device, BytesToHuman(usage.total), BytesToHuman(usage.used), BytesToHuman(usage.free), int(usage.percent), part.fstype, part.mountpoint)) self.diskUsage = diskUsage.strip() self.dataPath = self.settingsModel.GetDataDirectory() self.defaultUser = self.settingsModel.GetUsername() def UploadUploaderInfo(self): """ Uploads info about the instrument PC to MyTardis via HTTP POST """ # pylint: disable=too-many-statements myTardisUrl = self.settingsModel.GetMyTardisUrl() myTardisUsername = self.settingsModel.GetUsername() myTardisApiKey = self.settingsModel.GetApiKey() url = myTardisUrl + "/api/v1/mydata_uploader/?format=json" + \ "&uuid=" + urllib.quote(self.uuid) headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey), "Content-Type": "application/json", "Accept": "application/json" } try: response = requests.get(headers=headers, url=url) except Exception, err: logger.error(str(err)) raise if response.status_code == 404: message = "The MyData app is missing from the MyTardis server." logger.error(url) logger.error(message) raise MissingMyDataAppOnMyTardisServer(message) if response.status_code >= 200 and response.status_code < 300: existingUploaderRecords = response.json() else: logger.error("An error occurred while retrieving uploader info.") logger.error("Status code = " + str(response.status_code)) logger.error(response.text) raise Exception(response.text) numExistingUploaderRecords = \ existingUploaderRecords['meta']['total_count'] if numExistingUploaderRecords > 0: self.id = existingUploaderRecords['objects'][0]['id'] if 'settings' in existingUploaderRecords['objects'][0]: self.uploaderSettings = \ existingUploaderRecords['objects'][0]['settings'] logger.debug("Uploading uploader info to MyTardis...") if numExistingUploaderRecords > 0: url = myTardisUrl + "/api/v1/mydata_uploader/%d/" % self.id else: url = myTardisUrl + "/api/v1/mydata_uploader/" self.osPlatform = sys.platform self.osSystem = platform.system() self.osRelease = platform.release() self.osVersion = platform.version() self.osUsername = getpass.getuser() self.machine = platform.machine() self.architecture = str(platform.architecture()) self.processor = platform.processor() self.memory = BytesToHuman(psutil.virtual_memory().total) self.cpus = psutil.cpu_count() self.hostname = platform.node() uploaderJson = { "uuid": self.uuid, "name": self.name, "contact_name": self.contactName, "contact_email": self.contactEmail, "user_agent_name": self.userAgentName, "user_agent_version": self.userAgentVersion, "user_agent_install_location": self.userAgentInstallLocation, "os_platform": self.osPlatform, "os_system": self.osSystem, "os_release": self.osRelease, "os_version": self.osVersion, "os_username": self.osUsername, "machine": self.machine, "architecture": self.architecture, "processor": self.processor, "memory": self.memory, "cpus": self.cpus, "disk_usage": self.diskUsage, "data_path": self.dataPath, "default_user": self.defaultUser, "interface": self.interface, "mac_address": self.macAddress, "ipv4_address": self.ipv4Address, "ipv6_address": self.ipv6Address, "subnet_mask": self.subnetMask, "hostname": self.hostname, "instruments": [self.settingsModel.GetInstrument().GetResourceUri()] } data = json.dumps(uploaderJson, indent=4) logger.debug(data) if numExistingUploaderRecords > 0: response = requests.put(headers=headers, url=url, data=data) else: response = requests.post(headers=headers, url=url, data=data) if response.status_code >= 200 and response.status_code < 300: logger.debug("Upload succeeded for uploader info.") self.responseJson = response.json() else: logger.error("Upload failed for uploader info.") logger.error("Status code = " + str(response.status_code)) logger.error(response.text) raise Exception(response.text)
def UpdateSettings(self, settingsList): """ Used to save uploader settings to the mytardis-app-mydata's UploaderSettings model on the MyTardis server. """ myTardisUrl = self.settingsModel.GetMyTardisUrl() myTardisUsername = self.settingsModel.GetUsername() myTardisApiKey = self.settingsModel.GetApiKey() headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey), "Content-Type": "application/json", "Accept": "application/json" } if not self.id: url = myTardisUrl + "/api/v1/mydata_uploader/?format=json" + \ "&uuid=" + urllib.quote(self.uuid) try: response = requests.get(headers=headers, url=url) except Exception, err: logger.error(str(err)) raise if response.status_code == 404: message = "The MyData app is missing from the MyTardis server." logger.error(url) logger.error(message) raise MissingMyDataAppOnMyTardisServer(message) if response.status_code >= 200 and response.status_code < 300: existingUploaderRecords = response.json() else: logger.error("An error occurred while retrieving uploader id.") logger.error("Status code = " + str(response.status_code)) logger.error(response.text) raise Exception(response.text) numExistingUploaderRecords = \ existingUploaderRecords['meta']['total_count'] if numExistingUploaderRecords > 0: self.id = existingUploaderRecords['objects'][0]['id'] else: logger.debug("Uploader record doesn't exist yet, so " "we can't save settings to the server.") return
icon=wx.ICON_ERROR)) return folderModel.SetDatasetModel(datasetModel) self.VerifyDatafiles(folderModel) except requests.exceptions.ConnectionError, err: if not self.IsShuttingDown(): disconnected = \ ConnectionStatus.DISCONNECTED wx.PostEvent( self.notifyWindow, self.connectionStatusEvent( myTardisUrl=myTardisUrl, connectionStatus=disconnected)) return except ValueError, err: logger.error("Failed to retrieve experiment " "for folder " + str(folderModel.GetFolder())) logger.error(traceback.format_exc()) return if experimentModel is None and not fc.testRun: logger.error("Failed to acquire a MyTardis " "experiment to store data in for " "folder " + folderModel.GetFolder()) return if self.IsShuttingDown(): return fc.finishedCountingVerifications[folderModel].set() if self.foldersModel.GetRowCount() == 0 or \ fc.numVerificationsToBePerformed == 0: # For the case of zero folders or zero files, we # can't use the usual triggers (e.g. datafile # upload complete) to determine when to check if
def ImportGroupFolders(self, groupFolderPath, groupModel): """ Scan folders within a user group folder, e.g. D:\\Data\\Smith-Lab\\ """ # pylint: disable=bare-except try: logger.debug("Scanning " + groupFolderPath + " for instrument folders...") datasetFilterString = \ '*%s*' % self.settingsModel.GetDatasetFilter() instrumentName = self.settingsModel.GetInstrumentName() filesDepth1 = glob(os.path.join(groupFolderPath, instrumentName)) dirsDepth1 = [item for item in filesDepth1 if os.path.isdir(item)] instrumentFolders = [os.path.basename(d) for d in dirsDepth1] if len(instrumentFolders) > 1: message = "Multiple instrument folders found in %s" \ % groupFolderPath logger.warning(message) elif len(instrumentFolders) == 0: message = "No instrument folder was found in %s" \ % groupFolderPath logger.warning(message) return # Rather than using any folder we happen to find at this level, # we will use the instrument name specified in MyData's Settings # dialog. That way, we can run MyData on a collection of data # from multiple instruments, and just select one instrument at # a time. instrumentFolderPath = \ os.path.join(groupFolderPath, self.settingsModel.GetInstrumentName()) if not os.path.exists(instrumentFolderPath): logger.warning("Path %s doesn't exist." % instrumentFolderPath) return # For the User Group / Instrument / Researcher's Name / Dataset # folder structure, the default owner in MyTardis will always # by the user listed in MyData's settings dialog. An additional # ObjectACL will be created in MyTardis to grant access to the # User Group. The researcher's name in this folder structure is # used to determine the default experiment name, but it is not # used to determine access control. owner = self.settingsModel.GetDefaultOwner() logger.debug("Scanning " + instrumentFolderPath + " for user folders...") userFolders = os.walk(instrumentFolderPath).next()[1] for userFolderName in userFolders: userFolderPath = os.path.join(instrumentFolderPath, userFolderName) logger.debug("Scanning " + userFolderPath + " for dataset folders...") filesDepth1 = glob(os.path.join(userFolderPath, datasetFilterString)) dirsDepth1 = [item for item in filesDepth1 if os.path.isdir(item)] datasetFolders = [os.path.basename(d) for d in dirsDepth1] for datasetFolderName in datasetFolders: if self.ignoreOldDatasets: datasetFolderPath = os.path.join(userFolderPath, datasetFolderName) ctimestamp = os.path.getctime(datasetFolderPath) ctime = datetime.fromtimestamp(ctimestamp) age = datetime.now() - ctime if age.total_seconds() > self.ignoreIntervalSeconds: message = "Ignoring \"%s\", because it is " \ "older than %d %s" \ % (datasetFolderPath, self.ignoreIntervalNumber, self.ignoreIntervalUnit) logger.warning(message) continue groupFolderName = os.path.basename(groupFolderPath) dataViewId = self.GetMaxDataViewId() + 1 folderModel = \ FolderModel(dataViewId=dataViewId, folder=datasetFolderName, location=userFolderPath, userFolderName=userFolderName, groupFolderName=groupFolderName, owner=owner, foldersModel=self, usersModel=self.usersModel, settingsModel=self.settingsModel) folderModel.SetGroup(groupModel) folderModel.SetCreatedDate() folderModel.SetExperimentTitle( "%s - %s" % (self.settingsModel.GetInstrumentName(), userFolderName)) self.AddRow(folderModel) except InvalidFolderStructure: raise except: logger.error(traceback.format_exc())
def GetDataFiles(self): # pylint: disable=too-many-nested-blocks if not self.datafiles: try: self.getDatasetFilesThreadingLock.acquire() if self.datafiles: return self.datafiles myTardisUrl = self.settingsModel.GetMyTardisUrl() myTardisUsername = self.settingsModel.GetUsername() myTardisApiKey = self.settingsModel.GetApiKey() # limit=0 can still encounter a limit of 1000 unless # API_LIMIT_PER_PAGE is set to 0 in MyTardis's settings.py limit = 0 url = "%s/api/v1/dataset/%d/files/?format=json&limit=%d" \ % (myTardisUrl, self.GetId(), limit) headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey) } logger.debug(url) response = requests.get(headers=headers, url=url) if response.status_code >= 200 and response.status_code < 300: from .datafile import DataFileModel self.datafiles = [] datafilesJson = response.json()['objects'] for datafileJson in datafilesJson: self.datafiles.append( DataFileModel(self.settingsModel, self, datafileJson)) offset = 0 while response.json()['meta']['next']: # We should be able to use # response.json()['meta']['next'] in the URL, # instead of manually constructing the next # URL using offset. # But response.json()['meta']['next'] seems to give # the wrong URL for /api/v1/dataset/%d/files/ offset += 1 url = "%s/api/v1/dataset/%d/files/?format=json" \ "&limit=%d&offset=%d" % (myTardisUrl, self.GetId(), limit, offset) logger.debug(url) response = requests.get(headers=headers, url=url) if response.status_code >= 200 and \ response.status_code < 300: datafilesJson = response.json()['objects'] for datafileJson in datafilesJson: self.datafiles\ .append(DataFileModel(self.settingsModel, self, datafileJson)) else: logger.error(url) logger.error("response.status_code = " + str(response.status_code)) logger.error(response.text) else: logger.error(url) logger.error("response.status_code = " + str(response.status_code)) logger.error(response.text) if response.status_code == 401: message = "Couldn't list files for dataset \"%s\". " \ % (self.GetDescription()) message += "\n\n" message += "Please ask your MyTardis administrator " \ "to check the permissions of the \"%s\" " \ "user account." % myTardisUsername self.getDatasetFilesThreadingLock.release() raise Unauthorized(message) self.getDatasetFilesThreadingLock.release() raise Exception(response.text) finally: self.getDatasetFilesThreadingLock.release() return self.datafiles
def ScanForDatasetFolders(self, pathToScan, owner, userFolderName): """ Scan for dataset folders. """ # pylint: disable=bare-except try: logger.debug("Scanning " + pathToScan + " for dataset folders...") datasetFilterString = \ '*%s*' % self.settingsModel.GetDatasetFilter() filesDepth1 = glob(os.path.join(pathToScan, datasetFilterString)) dirsDepth1 = [item for item in filesDepth1 if os.path.isdir(item)] datasetFolders = [os.path.basename(d) for d in dirsDepth1] for datasetFolderName in datasetFolders: logger.debug("Found folder assumed to be dataset: " + datasetFolderName) if self.ignoreOldDatasets: datasetFolderPath = os.path.join(pathToScan, datasetFolderName) ctimestamp = os.path.getctime(datasetFolderPath) ctime = datetime.fromtimestamp(ctimestamp) age = datetime.now() - ctime if age.total_seconds() > \ self.ignoreIntervalSeconds: message = "Ignoring \"%s\", because it is " \ "older than %d %s" \ % (datasetFolderPath, self.ignoreIntervalNumber, self.ignoreIntervalUnit) logger.warning(message) continue dataViewId = self.GetMaxDataViewId() + 1 folderModel = \ FolderModel(dataViewId=dataViewId, folder=datasetFolderName, location=pathToScan, userFolderName=userFolderName, groupFolderName=None, owner=owner, foldersModel=self, usersModel=self.usersModel, settingsModel=self.settingsModel) folderModel.SetCreatedDate() if not owner.UserNotFoundInMyTardis(): if owner.GetName().strip() != "": experimentTitle = "%s - %s" \ % (self.settingsModel.GetInstrumentName(), owner.GetName()) else: experimentTitle = "%s - %s" \ % (self.settingsModel.GetInstrumentName(), owner.GetUsername()) elif owner.GetName() != UserModel.userNotFoundString: experimentTitle = "%s - %s (%s)" \ % (self.settingsModel.GetInstrumentName(), owner.GetName(), UserModel.userNotFoundString) elif owner.GetUsername() != UserModel.userNotFoundString: experimentTitle = "%s - %s (%s)" \ % (self.settingsModel.GetInstrumentName(), owner.GetUsername(), UserModel.userNotFoundString) elif owner.GetEmail() != UserModel.userNotFoundString: experimentTitle = "%s - %s (%s)" \ % (self.settingsModel.GetInstrumentName(), owner.GetEmail(), UserModel.userNotFoundString) else: experimentTitle = "%s - %s" \ % (self.settingsModel.GetInstrumentName(), UserModel.userNotFoundString) folderModel.SetExperimentTitle(experimentTitle) self.AddRow(folderModel) except: logger.error(traceback.format_exc())
existingUploaderRecords['meta']['total_count'] if numExistingUploaderRecords > 0: self.id = existingUploaderRecords['objects'][0]['id'] else: logger.debug("Uploader record doesn't exist yet, so " "we can't save settings to the server.") return url = "%s/api/v1/mydata_uploader/%s/" % (myTardisUrl, self.id) patchData = {'settings': settingsList, 'uuid': self.uuid} response = requests.patch(headers=headers, url=url, data=json.dumps(patchData)) if response.status_code != 202: logger.error(url) message = response.text logger.error(message) raise Exception(message) def GetSettings(self): """ Used to retrieve uploader settings from the mytardis-app-mydata's UploaderSettings model on the MyTardis server. """ myTardisUrl = self.settingsModel.GetMyTardisUrl() myTardisUsername = self.settingsModel.GetUsername() myTardisApiKey = self.settingsModel.GetApiKey() headers = { "Authorization": "ApiKey %s:%s" % (myTardisUsername, myTardisApiKey),