def __init__(self, work_dir, num_workers, host_address, is_verbose, is_daemon, aws_info, workflow_id = None): #set the address of the http node self.ec2_logger = logging.getLogger('dwf.ec2.EC2CoordinatorNode') # create the model manager for database updating if settings.DATABASE_UPDATING: from modelmanager import ModelManager self.model_manager = ModelManager(self, workflow_id) self.init_failed = False self.s3_manager = S3Manager(aws_info["accesskey"], aws_info["secretkey"], aws_info["s3bucket"]) #download the base file from S3, extract it in the working directory #self.retrieve_extract_base(work_dir, aws_info["s3filename"], num_greenlets=1) if not self.init_failed: super(EC2CoordinatorNode, self).__init__( work_dir, num_workers, host_address, is_verbose, is_daemon) # set the worker_manager to an ec2 worker manager self.set_worker_manager(EC2WorkerManager( self, num_workers, aws_info["accesskey"], aws_info["secretkey"])) #lease the initial instances self.lease_initial_instances(aws_info["initialworkers"]) self.previous_progress = -1.0
def __init__(self, parent = None): super(MainWindow, self).__init__(parent) self.ui = Ui_MainWindow() self.ui.setupUi(self) self.setWindowIcon(QtGui.QIcon("res/dollar-icon.png")) self.connectAll() self._manager = ModelManager(self)
class MainWindow(QtWidgets.QMainWindow): def __init__(self, parent = None): super(MainWindow, self).__init__(parent) self.ui = Ui_MainWindow() self.ui.setupUi(self) self.setWindowIcon(QtGui.QIcon("res/dollar-icon.png")) self.connectAll() self._manager = ModelManager(self) def connectAll(self): self.ui.addButton.pressed.connect(self.addButtonPressed) self.ui.deleteButton.pressed.connect(self.deleteButtonPressed) self.ui.calcButton.pressed.connect(self.calcButtonPressed) self.ui.file_open.triggered.connect(self.open) self.ui.file_save.triggered.connect(self.save) def addButtonPressed(self): cur, flag = QtWidgets.QInputDialog.getText(self, "Ввод названия", "Введите название валюты:") if not flag: return self._manager.addCurrency(cur.strip()) def deleteButtonPressed(self): index = self.ui.listView.currentIndex() if not index.isValid(): QtWidgets.QMessageBox.warning(self, "Не выбрана валюта", "Вы не выбрали валюту для удаления.") else: self._manager.deleteCurrencyNum(index.row()) def calcButtonPressed(self): spec = self._manager.speculation() if spec is not None and spec.exists: self.ui.pathEdit.setText(" -> ".join(spec.path + spec.path[0:1])) self.ui.ratioEdit.setText("{:.5f}".format(spec.ratio)) else: self.ui.pathEdit.clear() self.ui.ratioEdit.clear() QtWidgets.QMessageBox.information(self, "Решения нет", "В данных условиях спекуляция невозможна.") def open(self): filename = QtWidgets.QFileDialog.getOpenFileName(self, u'Открыть', filter = u"Таблица валют (*.ctb);;Все файлы (*.*)")[0] if filename: self._manager.loadModel(filename) def save(self): filename = QtWidgets.QFileDialog.getSaveFileName(self, u'Сохранить', filter = u"Таблица валют (*.ctb);;Все файлы (*.*)")[0] if filename: self._manager.saveModel(filename)
sys_argv += ['--style', 'material'] app = QApplication(sys_argv) app.setOrganizationName("MantrixSoft") app.setApplicationName("thesa") #QQuickStyle.setStyle("Material") engine = QQmlApplicationEngine() engine_point = engine QtCore.qInstallMessageHandler(qt_message_handler) jchc = QJsonNetwork(app) jchc.setEngine(engine) systemnet = SystemNet(jchc) modelmanager = ModelManager(jchc, engine, app) mtools = Tools(app) mDir=QDir.currentPath() engine.rootContext().setContextProperty("QJsonNetworkQml", jchc) engine.rootContext().setContextProperty("ModelManagerQml", modelmanager) engine.rootContext().setContextProperty("SystemNet", systemnet) engine.rootContext().setContextProperty("ThesaVersion", ThesaVersion) engine.rootContext().setContextProperty("Tools", mtools) engine.rootContext().setContextProperty("DirParent", mDir)
class EC2CoordinatorNode(CoordinatorNode): """ EC2 Coordinator implements a coordinator with EC2 infrastructure worker nodes are dynamically leased and released as required """ def __init__(self, work_dir, num_workers, host_address, is_verbose, is_daemon, aws_info, workflow_id = None): #set the address of the http node self.ec2_logger = logging.getLogger('dwf.ec2.EC2CoordinatorNode') # create the model manager for database updating if settings.DATABASE_UPDATING: from modelmanager import ModelManager self.model_manager = ModelManager(self, workflow_id) self.init_failed = False self.s3_manager = S3Manager(aws_info["accesskey"], aws_info["secretkey"], aws_info["s3bucket"]) #download the base file from S3, extract it in the working directory #self.retrieve_extract_base(work_dir, aws_info["s3filename"], num_greenlets=1) if not self.init_failed: super(EC2CoordinatorNode, self).__init__( work_dir, num_workers, host_address, is_verbose, is_daemon) # set the worker_manager to an ec2 worker manager self.set_worker_manager(EC2WorkerManager( self, num_workers, aws_info["accesskey"], aws_info["secretkey"])) #lease the initial instances self.lease_initial_instances(aws_info["initialworkers"]) self.previous_progress = -1.0 def lease_initial_instances(self, initial_workers): """create the initial workers required in ec2 this is example functionality """ instances_to_lease = [] for instance_type, number in initial_workers: instances_to_lease.extend([instance_type for _ in range(number)]) self.create_ec2_workers(instances_to_lease) #========================================================================== def run(self): """ the run method for the class until completion or failure, handle job completion record the runtime, finalize the node - send finished message to all worker machines, etc. """ try: if self.init_failed: self.failed = True return #create the subsystems self._setup_and_synchronize() self.time_start = time.time() self.started = True #------ no updating ------- if settings.DATABASE_UPDATING: spawn(self.model_manager.workflow_started) self.ec2_logger.info("Distributing jobs...") # begin job dispatching self.job_dispatcher.dispatch(self.dag.jobs["START"]) #spin until complete or failed while not (self.finished or self.failed): self.job_completed(self.completed_job_queue.get(True)) #args = self.completed_job_queue.get(True) #job_id, internal_worker_id, run_time, worker_address, output_file_sizes, start_time = args #assignment = self.job_dispatcher.assignments[job_id] #assignment.date_started = start_time #assignment.run_time = run_time #spawn(self.model_manager.add_job_assignment, assignment) if settings.DATABASE_UPDATING: if settings.UPDATE_ON_PERCENTAGE: self.update_if_progress() else: spawn(self.model_manager.update_jobs_completed) #self.job_completed(args) self.run_time = time.time() - self.time_start except: self.logger.exception("Exception caught") self.failed = True finally: if self.failed: self.finalize(terminate=False) else: self.finalize(terminate=True) def update_if_progress(self): """ if the specified amount of progress has occurred, update the database """ if floor(self.get_progress()) == (self.previous_progress + settings.UPDATE_PERCENTAGE): if settings.DATABASE_UPDATING: spawn(self.model_manager.update_status) self.previous_progress = floor(self.get_progress()) #========================================================================== def retrieve_extract_base(self, work_dir, base_filename, num_greenlets): """ retrieve and extract the base file from S3 """ self.ec2_logger.info("Retrieving base file...") #retrieve the montage base from S3 if settings.DATABASE_UPDATING: self.model_manager.update_workflow_status("Retrieving base file...") if not work_dir.endswith("/"): work_dir += "/" try: self.s3_manager.get_base_file( base_filename, work_dir + base_filename, num_greenlets ) except KeyboardInterrupt: self.init_failed = True raise except: self.logger.exception("Failure retrieving the base file") raise if settings.DATABASE_UPDATING: self.model_manager.update_workflow_status("Extracting file...") self.ec2_logger.info("Extracting file...") for _ in range(settings.S3_MAX_FILE_REQUEST_ATTEMPTS): try: #extract all files from the montage base tar = tarfile.open(work_dir + base_filename, 'r:gz') tar.extractall() self.ec2_logger.info("Finished extracting file.") except KeyboardInterrupt: self.init_failed = True raise except IOError: self.ec2_logger.exception("Exception in extraction") self.s3_manager.get_base_file( base_filename, work_dir + base_filename, num_greenlets ) else: self.ec2_logger.info("Successfully extracted file") return else: # ----- no updating if settings.DATABASE_UPDATING: self.model_manager.workflow_failed() self.init_failed = True def create_ec2_workers(self, instances, async=True): """ create workers in Amazon ec2 """ self.ec2_logger.debug("Constructing leasing greenlets") if async: for instance_type in instances: spawn(self.worker_manager.lease_worker, instance_type) else: leasing_greenlets = [] for instance_type in instances: leasing_greenlets.append( spawn(self.worker_manager.lease_worker, instance_type) ) joinall(leasing_greenlets) self.ec2_logger.debug("All leasing greenlets joined")