コード例 #1
0
 def getAndUpdateFullRevisionList(self, node_test_suite):
     full_revision_list = []
     config = self.config
     log = self.log
     try:
         for vcs_repository in node_test_suite.vcs_repository_list:
             repository_path = vcs_repository['repository_path']
             repository_id = vcs_repository['repository_id']
             branch = vcs_repository.get('branch')
             # Make sure we have local repository
             updater = Updater(
                 repository_path,
                 git_binary=config['git_binary'],
                 branch=branch,
                 log=log,
                 process_manager=self.process_manager,
                 working_directory=node_test_suite.working_directory,
                 url=vcs_repository["url"])
             updater.checkout()
             revision = "-".join(updater.getRevision())
             full_revision_list.append('%s=%s' % (repository_id, revision))
         node_test_suite.revision = ','.join(full_revision_list)
     except SubprocessError, e:
         log("Error while getting repository, ignoring this test suite : %r"
             % (e, ),
             exc_info=sys.exc_info())
         full_revision_list = None
コード例 #2
0
    def __init__(self):
        self.updater_client = Updater("Urls.config")
        self.email_content = self.set_up_email_content()

        # set up the SMTP server
        self.email, self.password = self.get_creditionals(
            "Creditionals.config")
        self.server = smtplib.SMTP("smtp-mail.outlook.com", 587)
        #self.server = smtplib.SMTP("smtp.gmail.com", 587)
        self.server.starttls()
        self.server.login(self.email, self.password)
コード例 #3
0
 def get_update():
     instance = int(request.args.get("instance"))
     if instance == 1:
         return to_JSON(
             UpdateAvailable(Updater().check_version(),
                             Updater().last_version))
     else:
         result = APIController().slave_request(instance,
                                                "get_last_version", 10)
         if result is None:
             return to_JSON(UpdateAvailable(False, ""))
         return to_JSON(UpdateAvailable(result[0], result[1]))
コード例 #4
0
def updatePlugin():
    global sessionId

    stater = Stater(sessionId)
    updater = Updater(sessionId)
    while True:
        time.sleep(updater.getPool())
        info = stater.getCurrentService()
        if "name" in info:  #Update only when watching TV
            updater.updateRequests()
        if sessionId == None:
            break
コード例 #5
0
	def __init__(self):
		self.client = None
		self.__loadMonitors()
		try:
			app_key, app_secret = self.__getAppKeyAndSecret()
		except Exception as e:
			Dialog(visual).error("Could not contact key server", "Could not contact key server, unable to verify app. \n%s" % e)
			return
		self.flow = dropbox.client.DropboxOAuth2FlowNoRedirect(app_key, app_secret)
		self.loadSettings()
		update = Updater()
		
		update.update(self, visual)
コード例 #6
0
ファイル: testnode.py プロジェクト: fedoraisahat/erp5
 def checkRevision(self, test_result, node_test_suite):
     config = self.config
     log = self.log
     if log is None:
         log = self.log
     if node_test_suite.revision != test_result.revision:
         log('Disagreement on tested revision, checking out: %r' %
             ((node_test_suite.revision, test_result.revision), ))
         for i, repository_revision in enumerate(
                 test_result.revision.split(',')):
             vcs_repository = node_test_suite.vcs_repository_list[i]
             repository_path = vcs_repository['repository_path']
             revision = repository_revision.rsplit('-', 1)[1]
             # other testnodes on other boxes are already ready to test another
             # revision
             log('  %s at %s' % (repository_path, node_test_suite.revision))
             updater = Updater(repository_path,
                               git_binary=config['git_binary'],
                               revision=revision,
                               log=log,
                               process_manager=self.process_manager)
             updater.checkout()
             updater.git_update_server_info()
             updater.git_create_repository_link()
             node_test_suite.revision = test_result.revision
コード例 #7
0
 def update():
     instance = int(request.args.get("instance"))
     if instance == 1:
         Updater().update()
     else:
         APIController().slave_command(instance, "updater", "update")
     return "OK"
コード例 #8
0
    def init():
        from Controllers.TradfriManager import TradfriManager
        from MediaPlayer.MediaManager import MediaManager
        from MediaPlayer.Player.VLCPlayer import VLCPlayer
        from Updater import Updater
        from Shared.State import StateManager
        from Shared.Stats import Stats

        APIController.slaves.register_callback(
            lambda old, new: UIWebsocketController.broadcast(
                "slaves", new.data))
        TradfriManager().tradfri_state.register_callback(
            lambda old, new: UIWebsocketController.broadcast("tradfri", new))
        StateManager().state_data.register_callback(
            lambda old, new: UIWebsocketController.broadcast("1.state", new))
        VLCPlayer().player_state.register_callback(
            lambda old, new: UIWebsocketController.broadcast("1.player", new))
        MediaManager().media_data.register_callback(
            lambda old, new: UIWebsocketController.broadcast("1.media", new))
        MediaManager().torrent_data.register_callback(
            lambda old, new: UIWebsocketController.broadcast("1.torrent", new))
        Stats().cache.register_callback(
            lambda old, new: UIWebsocketController.broadcast("1.stats", new))
        Updater().update_state.register_callback(
            lambda old, new: UIWebsocketController.broadcast("1.update", new))
コード例 #9
0
ファイル: Engine.py プロジェクト: chagge/returnn
 def init_train_from_config(self, config, train_data, dev_data=None, eval_data=None):
   """
   :type config: Config.Config
   :type train_data: Dataset.Dataset
   :type dev_data: Dataset.Dataset | None
   :type eval_data: Dataset.Dataset | None
   """
   self.train_data = train_data
   self.dev_data = dev_data
   self.eval_data = eval_data
   self.start_epoch, self.start_batch = self.get_train_start_epoch_batch(config)
   self.batch_size = config.int('batch_size', 1)
   self.shuffle_batches = config.bool('shuffle_batches', True)
   self.update_batch_size = config.int('update_batch_size', 0)
   self.model_filename = config.value('model', None)
   self.save_model_epoch_interval = config.int('save_interval', 1)
   self.save_epoch1_initial_model = config.bool('save_epoch1_initial_model', False)
   self.learning_rate_control = loadLearningRateControlFromConfig(config)
   self.learning_rate = self.learning_rate_control.defaultLearningRate
   self.initial_learning_rate = self.learning_rate
   self.pretrain_learning_rate = config.float('pretrain_learning_rate', self.learning_rate)
   self.final_epoch = self.config_get_final_epoch(config)  # Inclusive.
   self.max_seqs = config.int('max_seqs', -1)
   self.updater = Updater.initFromConfig(config)
   self.ctc_prior_file = config.value('ctc_prior_file', None)
   self.exclude = config.int_list('exclude', [])
   self.init_train_epoch_posthook = config.value('init_train_epoch_posthook', None)
   self.share_batches = config.bool('share_batches', False)
   self.batch_variance = config.float('batch_variance', 0.0)
   self.max_seq_length = config.float('max_seq_length', 0)
   self.inc_seq_length = config.float('inc_seq_length', 0)
   if self.max_seq_length == 0:
     self.max_seq_length = sys.maxint
   # And also initialize the network. That depends on some vars here such as pretrain.
   self.init_network_from_config(config)
コード例 #10
0
 def init_train_from_config(self,
                            config,
                            train_data,
                            dev_data=None,
                            eval_data=None):
     """
 :type config: Config.Config
 :type train_data: Dataset.Dataset
 :type dev_data: Dataset.Dataset | None
 :type eval_data: Dataset.Dataset | None
 """
     self.train_data = train_data
     self.dev_data = dev_data
     self.eval_data = eval_data
     self.start_epoch, self.start_batch = self.get_train_start_epoch_batch(
         config)
     self.batch_size = config.int('batch_size', 1)
     self.shuffle_batches = config.bool('shuffle_batches', False)
     self.update_batch_size = config.float('update_batch_size', 0)
     self.batch_size_eval = config.int('batch_size_eval',
                                       self.update_batch_size)
     self.model_filename = config.value('model', None)
     self.save_model_epoch_interval = config.int('save_interval', 1)
     self.save_epoch1_initial_model = config.bool(
         'save_epoch1_initial_model', False)
     self.learning_rate_control = load_learning_rate_control_from_config(
         config)
     self.learning_rate = self.learning_rate_control.default_learning_rate
     self.initial_learning_rate = self.learning_rate
     self.pretrain_learning_rate = config.float('pretrain_learning_rate',
                                                self.learning_rate)
     self.final_epoch = self.config_get_final_epoch(config)  # Inclusive.
     self.max_seqs = config.int('max_seqs', -1)
     self.max_seqs_eval = config.int('max_seqs_eval', self.max_seqs)
     self.updater = Updater.initFromConfig(config)
     self.ctc_prior_file = config.value('ctc_prior_file', None)
     self.exclude = config.int_list('exclude', [])
     self.init_train_epoch_posthook = config.value(
         'init_train_epoch_posthook', None)
     self.share_batches = config.bool('share_batches', False)
     self.seq_drop = config.float('seq_drop', 0.0)
     self.seq_drop_freq = config.float('seq_drop_freq', 10)
     self.max_seq_length = config.float('max_seq_length', 0)
     self.inc_seq_length = config.float('inc_seq_length', 0)
     self.max_seq_length_eval = config.int('max_seq_length_eval', 2e31)
     self.output_precision = config.int('output_precision', 12)
     self.reduction_rate = config.float('reduction_rate', 1.0)
     self.batch_pruning = config.float('batch_pruning', 0.0)
     if self.max_seq_length == 0:
         self.max_seq_length = sys.maxsize
     if config.is_typed("seq_train_parallel"):
         self.seq_train_parallel = SeqTrainParallelControl(
             engine=self,
             config=config,
             **config.typed_value("seq_train_parallel"))
     else:
         self.seq_train_parallel = None
     # And also initialize the network. That depends on some vars here such as pretrain.
     self.init_network_from_config(config)
コード例 #11
0
ファイル: Manager.py プロジェクト: mystilleef/scribes
 def __init__(self, editor):
     GObject.__init__(self)
     self.__init_attributes(editor)
     from Updater import Updater
     Updater(editor, self)
     from TreeView import TreeView
     TreeView(editor, self)
     from Window import Window
     Window(editor, self)
コード例 #12
0
ファイル: testnode.py プロジェクト: esosaja/erp5
 def getAndUpdateFullRevisionList(self, node_test_suite):
   full_revision_list = []
   config = self.config
   log = self.log
   for vcs_repository in node_test_suite.vcs_repository_list:
     repository_path = vcs_repository['repository_path']
     repository_id = vcs_repository['repository_id']
     branch = vcs_repository.get('branch')
     # Make sure we have local repository
     updater = Updater(repository_path, git_binary=config['git_binary'],
        branch=branch, log=log, process_manager=self.process_manager,
        working_directory=node_test_suite.working_directory,
        url=vcs_repository["url"])
     updater.checkout()
     revision = "-".join(updater.getRevision())
     full_revision_list.append('%s=%s' % (repository_id, revision))
   node_test_suite.revision = ','.join(full_revision_list)
   return full_revision_list
コード例 #13
0
ファイル: testnode.py プロジェクト: joseanm/erp5
 def checkRevision(self, test_result, node_test_suite):
   config = self.config
   log = self.log
   if node_test_suite.revision != test_result.revision:
    log('Disagreement on tested revision, checking out: %r' % (
         (node_test_suite.revision,test_result.revision),))
    for i, repository_revision in enumerate(test_result.revision.split(',')):
     vcs_repository = node_test_suite.vcs_repository_list[i]
     repository_path = vcs_repository['repository_path']
     revision = repository_revision.rsplit('-', 1)[1]
     # other testnodes on other boxes are already ready to test another
     # revision
     log('  %s at %s' % (repository_path, node_test_suite.revision))
     updater = Updater(repository_path, git_binary=config['git_binary'],
                       revision=revision, log=log,
                       process_manager=self.process_manager)
     updater.checkout()
     node_test_suite.revision = test_result.revision
コード例 #14
0
    def init(self):
        self = objc.super(BlinkAppDelegate, self).init()
        if self:
            self.applicationName = str(NSBundle.mainBundle().infoDictionary().objectForKey_("CFBundleExecutable"))
            self.applicationNamePrint = str(NSBundle.mainBundle().infoDictionary().objectForKey_("CFBundleName"))
            build = str(NSBundle.mainBundle().infoDictionary().objectForKey_("CFBundleVersion"))
            date = str(NSBundle.mainBundle().infoDictionary().objectForKey_("BlinkVersionDate"))

            branding_file = NSBundle.mainBundle().infoDictionary().objectForKey_("BrandingFile")

            try:
                branding = __import__(branding_file)
            except ImportError:
                try:
                    import branding
                except ImportError:
                    branding = Null

            branding.init(self)

            BlinkLogger().log_info("Starting %s %s" % (self.applicationNamePrint, build))

            self.registerURLHandler()
            NSWorkspace.sharedWorkspace().notificationCenter().addObserver_selector_name_object_(self, "computerDidWake:", NSWorkspaceDidWakeNotification, None)
            NSWorkspace.sharedWorkspace().notificationCenter().addObserver_selector_name_object_(self, "computerWillSleep:", NSWorkspaceWillSleepNotification, None)
            NSDistributedNotificationCenter.defaultCenter().addObserver_selector_name_object_suspensionBehavior_(self, "callFromAddressBook:", "CallTelephoneNumberWithBlinkFromAddressBookNotification", "AddressBook", NSNotificationSuspensionBehaviorDeliverImmediately)
            NSDistributedNotificationCenter.defaultCenter().addObserver_selector_name_object_suspensionBehavior_(self, "callFromAddressBook:", "CallSipAddressWithBlinkFromAddressBookNotification", "AddressBook", NSNotificationSuspensionBehaviorDeliverImmediately)

            NotificationCenter().add_observer(self, name="CFGSettingsObjectDidChange")
            NotificationCenter().add_observer(self, name="SIPApplicationDidStart")
            NotificationCenter().add_observer(self, name="SIPApplicationWillEnd")
            NotificationCenter().add_observer(self, name="SIPApplicationDidEnd")
            NotificationCenter().add_observer(self, name="NetworkConditionsDidChange")
            NotificationCenter().add_observer(self, name="SIPEngineTransportDidDisconnect")
            NotificationCenter().add_observer(self, name="SIPEngineTransportDidConnect")
            NotificationCenter().add_observer(self, name="DNSNameserversDidChange")
            NotificationCenter().add_observer(self, name="SystemDidWakeUpFromSleep")

            # remove obsolete settings
            userdef = NSUserDefaults.standardUserDefaults()
            userdef.removeObjectForKey_('SIPTrace')
            userdef.removeObjectForKey_('MSRPTrace')
            userdef.removeObjectForKey_('XCAPTrace')
            userdef.removeObjectForKey_('EnablePJSIPTrace')
            userdef.removeObjectForKey_('EnableNotificationsTrace')

            try:
                from Updater import Updater
            except ImportError:
                pass
            else:
                self.updater = Updater()

            self.purge_temporary_files()

        return self
コード例 #15
0
ファイル: start.py プロジェクト: JKorf/MediaPi
 def init_singletons():
     Stats()
     VLCPlayer()
     NextEpisodeManager()
     WiFiController()
     MediaManager()
     Updater()
     ThreadManager()
     PresenceManager()
     RuleManager()
コード例 #16
0
    def run(self):
        """Moves the Updater module to a separate thread and runs it"""
        self.update_label('Starting updater...')
        self.running_gui_shift()

        self.thread = QtCore.QThread()
        self.updater = Updater(self.download_url, self.download_name,
                               self.new_version, self.program_files_location)
        self.cancel_signal.connect(self.updater.stop)
        self.updater.moveToThread(self.thread)
        self.thread.started.connect(self.updater.run_update)
        self.updater.update_label.connect(self.update_label)
        self.updater.setup_progress_bar.connect(self.setup_progress_bar)
        self.updater.update_progress_bar.connect(self.update_progress_bar)
        self.updater.error_signal.connect(self.update_error)
        self.updater.finished.connect(self.thread.quit)
        self.updater.finished.connect(self.updater.deleteLater)
        self.updater.finished.connect(self.finished_gui_shift)
        self.thread.finished.connect(self.thread.deleteLater)
        self.thread.start()
コード例 #17
0
ファイル: testnode.py プロジェクト: ccwalkerjm/erp5
 def getAndUpdateFullRevisionList(self, node_test_suite):
   full_revision_list = []
   config = self.config
   log = self.log
   try:
     for vcs_repository in node_test_suite.vcs_repository_list:
       repository_path = vcs_repository['repository_path']
       repository_id = vcs_repository['repository_id']
       branch = vcs_repository.get('branch')
       # Make sure we have local repository
       updater = Updater(repository_path, git_binary=config['git_binary'],
          branch=branch, log=log, process_manager=self.process_manager,
          working_directory=node_test_suite.working_directory,
          url=vcs_repository["url"])
       updater.checkout()
       revision = "-".join(updater.getRevision())
       full_revision_list.append('%s=%s' % (repository_id, revision))
     node_test_suite.revision = ','.join(full_revision_list)
   except SubprocessError, e:
     log("Error while getting repository, ignoring this test suite : %r" % (e,), exc_info=sys.exc_info())
     full_revision_list = None
コード例 #18
0
ファイル: testnode.py プロジェクト: fedoraisahat/erp5
 def getAndUpdateFullRevisionList(self, node_test_suite):
     full_revision_list = []
     config = self.config
     log = self.log
     for vcs_repository in node_test_suite.vcs_repository_list:
         repository_path = vcs_repository['repository_path']
         repository_id = vcs_repository['repository_id']
         branch = vcs_repository.get('branch')
         # Make sure we have local repository
         updater = Updater(
             repository_path,
             git_binary=config['git_binary'],
             branch=branch,
             log=log,
             process_manager=self.process_manager,
             working_directory=node_test_suite.working_directory,
             url=vcs_repository["url"])
         updater.checkout()
         revision = "-".join(updater.getRevision())
         full_revision_list.append('%s=%s' % (repository_id, revision))
     node_test_suite.revision = ','.join(full_revision_list)
     return full_revision_list
コード例 #19
0
ファイル: testnode.py プロジェクト: joseanm/erp5
 def getAndUpdateFullRevisionList(self, node_test_suite):
   full_revision_list = []
   config = self.config
   log = self.log
   for vcs_repository in node_test_suite.vcs_repository_list:
     repository_path = vcs_repository['repository_path']
     repository_id = vcs_repository['repository_id']
     if not os.path.exists(repository_path):
       parameter_list = [config['git_binary'], 'clone',
                         vcs_repository['url']]
       if vcs_repository.get('branch') is not None:
         parameter_list.extend(['-b',vcs_repository.get('branch')])
       parameter_list.append(repository_path)
       log(subprocess.check_output(parameter_list, stderr=subprocess.STDOUT))
     # Make sure we have local repository
     updater = Updater(repository_path, git_binary=config['git_binary'],
        log=log, process_manager=self.process_manager)
     updater.checkout()
     revision = "-".join(updater.getRevision())
     full_revision_list.append('%s=%s' % (repository_id, revision))
   node_test_suite.revision = ','.join(full_revision_list)
   return full_revision_list
コード例 #20
0
def main(argv):
    def __call__(self):
        pass

    stdscr = curses.initscr()
    curses.noecho()  # @UndefinedVariable
    curses.cbreak()  # @UndefinedVariable
    stdscr.keypad(1)
    curses.start_color()

    try:
        wl = WindowLogic(stdscr)
        #wl.start()
    except Exception as e:
        raise

    try:
        dlog = wl.dlog
        dlog.msg("Logging debug output to " + str(dlog.outputFile))
        dlog.msg("Images will be cached in " +
                 wl.cfg.get('file.image.directory'))

        ci = CommandInterpreter(stdscr, wl)
        ci.start()

        updater = Updater(stdscr, wl)
        updater.start()
    except Exception as e:
        dlog.excpt(e)
        raise

    ci.join()
    dlog.msg("Command Interpreter joined.")
    updater.stop()
    updater.join()
    dlog.msg("Updater joined.")
    #wl.stop()
    #wl.join()
    dlog.msg("Thread Fetcher joined.")

    curses.nocbreak()  # @UndefinedVariable
    stdscr.keypad(0)
    curses.echo()  # @UndefinedVariable
    curses.endwin()  # @UndefinedVariable
    curses.resetty()  # @UndefinedVariable
    dlog.msg("Terminal restored.")
コード例 #21
0
 def updateRevisionList(self, node_test_suite):
     config = self.config
     log = self.log
     revision_list = []
     try:
         for vcs_repository in node_test_suite.vcs_repository_list:
             repository_path = vcs_repository["repository_path"]
             repository_id = vcs_repository["repository_id"]
             branch = vcs_repository.get("branch")
             # Make sure we have local repository
             updater = Updater(
                 repository_path,
                 git_binary=config["git_binary"],
                 branch=branch,
                 log=log,
                 process_manager=self.process_manager,
                 working_directory=node_test_suite.working_directory,
                 url=vcs_repository["url"],
             )
             updater.checkout()
             revision_list.append((repository_id, updater.getRevision()))
     except SubprocessError, e:
         log("Error while getting repository, ignoring this test suite", exc_info=1)
         return False
コード例 #22
0
ファイル: Engine.py プロジェクト: rwth-i6/returnn
 def init_train_from_config(self, config, train_data, dev_data=None, eval_data=None):
   """
   :type config: Config.Config
   :type train_data: Dataset.Dataset
   :type dev_data: Dataset.Dataset | None
   :type eval_data: Dataset.Dataset | None
   """
   self.train_data = train_data
   self.dev_data = dev_data
   self.eval_data = eval_data
   self.start_epoch, self.start_batch = self.get_train_start_epoch_batch(config)
   self.batch_size = config.int('batch_size', 1)
   self.shuffle_batches = config.bool('shuffle_batches', False)
   self.update_batch_size = config.float('update_batch_size', 0)
   self.batch_size_eval = config.int('batch_size_eval', self.update_batch_size)
   self.model_filename = config.value('model', None)
   self.save_model_epoch_interval = config.int('save_interval', 1)
   self.save_epoch1_initial_model = config.bool('save_epoch1_initial_model', False)
   self.learning_rate_control = load_learning_rate_control_from_config(config)
   self.learning_rate = self.learning_rate_control.default_learning_rate
   self.initial_learning_rate = self.learning_rate
   self.pretrain_learning_rate = config.float('pretrain_learning_rate', self.learning_rate)
   self.final_epoch = self.config_get_final_epoch(config)  # Inclusive.
   self.max_seqs = config.int('max_seqs', -1)
   self.max_seqs_eval = config.int('max_seqs_eval', self.max_seqs)
   self.updater = Updater.initFromConfig(config)
   self.ctc_prior_file = config.value('ctc_prior_file', None)
   self.exclude = config.int_list('exclude', [])
   self.init_train_epoch_posthook = config.value('init_train_epoch_posthook', None)
   self.share_batches = config.bool('share_batches', False)
   self.seq_drop = config.float('seq_drop', 0.0)
   self.seq_drop_freq = config.float('seq_drop_freq', 10)
   self.max_seq_length = config.float('max_seq_length', 0)
   self.inc_seq_length = config.float('inc_seq_length', 0)
   self.max_seq_length_eval = config.int('max_seq_length_eval', 2e31)
   self.output_precision = config.int('output_precision', 12)
   self.reduction_rate = config.float('reduction_rate', 1.0)
   self.batch_pruning = config.float('batch_pruning', 0.0)
   if self.max_seq_length == 0:
     self.max_seq_length = sys.maxsize
   if config.is_typed("seq_train_parallel"):
     self.seq_train_parallel = SeqTrainParallelControl(engine=self, config=config, **config.typed_value("seq_train_parallel"))
   else:
     self.seq_train_parallel = None
   # And also initialize the network. That depends on some vars here such as pretrain.
   self.init_network_from_config(config)
コード例 #23
0
    def init():
        if Settings.get_int("log_level") == 0:
            import logging
            logging.getLogger('requests').setLevel(logging.WARNING)
            logging.basicConfig(level=logging.DEBUG)

        from MediaPlayer.MediaManager import MediaManager
        from MediaPlayer.Player.VLCPlayer import VLCPlayer
        from Updater import Updater
        from Shared.State import StateManager
        from Shared.Stats import Stats

        StateManager().state_data.register_callback(lambda old, new: SlaveClientController.broadcast("state", new))
        VLCPlayer().player_state.register_callback(lambda old, new: SlaveClientController.broadcast("player", new))
        MediaManager().media_data.register_callback(lambda old, new: SlaveClientController.broadcast("media", new))
        MediaManager().torrent_data.register_callback(lambda old, new: SlaveClientController.broadcast("torrent", new))
        Stats().cache.register_callback(lambda old, new: SlaveClientController.broadcast("stats", new))
        Updater().update_state.register_callback(lambda old, new: SlaveClientController.broadcast("update", new))
コード例 #24
0
ファイル: main.py プロジェクト: SimoneMottadelli/ShoeFinder
def init():
    bot_id = '1437569240:AAEd2sZ0faC1EwPvQGJPPW4xf7ohP1hTzV8'
    updater = Updater(bot_id)
    updater.setPhotoHandler(imageHandler)

    QualityChecker.init()
    ShoeDetector.init()
    FeatureExtractor.init()
    data_structure = Indexer.build_data_structure(config.DATASET_PATH)
    Matcher.init(data_structure)

    print("Bot is running...")
    updater.start()
コード例 #25
0
 def checkRevision(self, test_result, node_test_suite):
     if node_test_suite.revision == test_result.revision:
         return
     log = self.log
     log("Disagreement on tested revision, checking out: %r != %r", node_test_suite.revision, test_result.revision)
     updater_kw = dict(git_binary=self.config["git_binary"], log=log, process_manager=self.process_manager)
     revision_list = []
     for i, revision in enumerate(test_result.revision.split(",")):
         vcs_repository = node_test_suite.vcs_repository_list[i]
         repository_path = vcs_repository["repository_path"]
         count, revision = revision.split("=")[1].split("-")
         revision_list.append((vcs_repository["repository_id"], (int(count), revision)))
         # other testnodes on other boxes are already ready to test another
         # revision
         updater = Updater(repository_path, revision=revision, **updater_kw)
         updater.checkout()
         updater.git_update_server_info()
         updater.git_create_repository_link()
     node_test_suite.revision_list = revision_list
コード例 #26
0
    def on_command(topic, command, args):
        Logger().write(LogVerbosity.Debug, "Master command " + topic + ": " + command)

        method = None
        if topic == "media":
            from MediaPlayer.MediaManager import MediaManager
            method = getattr(MediaManager(), command)

        if topic == "updater":
            from Updater import Updater
            method = getattr(Updater(), command)

        if topic == "system":
            if command == "restart_device":
                os.system('sudo reboot')
            if command == "restart_application":
                python = sys.executable
                os.execl(python, python, *sys.argv)
            if command == "close_application":
                sys.exit()

        if method is not None:
            cb_thread = CustomThread(method, "Master command", args)
            cb_thread.start()
コード例 #27
0
ファイル: main.py プロジェクト: cs3235group3/app
    def __init__(self, parent, *args, **kwargs):
        tk.Frame.__init__(self, parent, *args, **kwargs)
        self.parent = parent

        self.uiMenu = UIMenu(self.parent)
        self.parent.config(menu=self.uiMenu.menu)

        # self.toolbar = ttk.Frame(self.parent)
        # self.toolbarButton1 = ttk.Button(self.toolbar, text='Button')
        # self.toolbarButton1.pack(side=LEFT, padx=2, pady=2)
        # self.toolbar.pack(side=TOP, fill=X)

        self.statusbar = Label(self.parent, text='Ready', bd=1, relief=SUNKEN, anchor=W)
        self.statusbar.pack(side=BOTTOM, fill=X)

        self.notebook = ttk.Notebook(self.parent)
        self.frameSniff = ttk.Frame(self.notebook)
        self.frameArp = ttk.Frame(self.notebook)
        self.frameDhcp = ttk.Frame(self.notebook)
        self.frameSysInfo = ttk.Frame(self.notebook)
        self.notebook.add(self.frameSniff, text='Sniffer')
        self.notebook.add(self.frameArp, text='ARP')
        self.notebook.add(self.frameDhcp, text='DHCP Servers')
        self.notebook.add(self.frameSysInfo, text='System Info')
        self.notebook.pack(fill=BOTH)

        self.sniffLabelFrame = ttk.LabelFrame(self.frameSniff, text="Sniffer")
        self.sniffLabelFrame.pack(padx=10, pady=10)
        self.sniffButton = ttk.Button(self.sniffLabelFrame, text="Sniff", command=self.beginSniff)
        self.sniffButton.pack(side=LEFT)
        self.stopSniffButton = ttk.Button(self.sniffLabelFrame, text="Stop sniffing", command=self.stopSniff, state="disabled")
        self.stopSniffButton.pack(side=LEFT)

        self.sniffTv = ttk.Treeview(self.frameSniff)
        ysb = ttk.Scrollbar(self, orient='vertical', command=self.sniffTv.yview)
        xsb = ttk.Scrollbar(self, orient='horizontal', command=self.sniffTv.xview)
        self.sniffTv.configure(yscroll=ysb.set, xscroll=xsb.set)
        self.sniffTv['columns'] = ('senderip', 'sendermac', 'received')
        self.sniffTv.heading('#0', text='Description', anchor='w')
        self.sniffTv.column('#0', anchor='w')
        self.sniffTv.heading('senderip', text='Sender IP')
        self.sniffTv.column('senderip', width=100)
        self.sniffTv.heading('sendermac', text='Sender MAC')
        self.sniffTv.column('sendermac', width=100)
        self.sniffTv.heading('received', text='Received at')
        self.sniffTv.column('received', width=100)
        self.sniffTv.pack(fill=BOTH)

        self.arpLabel = ttk.Label(self.frameArp, text="ARP cache")
        self.arpLabel.pack()
        self.arpTv = ttk.Treeview(self.frameArp)
        self.arpTv['columns'] = ('ip', 'status')
        self.arpTv.heading('#0', text='MAC address', anchor='w')
        self.arpTv.column('#0', anchor='w')
        self.arpTv.heading('ip', text='IP address')
        self.arpTv.column('ip', width=100)
        self.arpTv.heading('status', text='Status')
        self.arpTv.column('status', width=100)
        self.arpTv.pack(fill=X)

        self.addDhcpLabelFrame = ttk.LabelFrame(self.frameDhcp, text="Add trusted server")
        self.addDhcpLabelFrame.pack(padx=10, pady=10)

        self.addDhcpNameLabel = ttk.Label(self.addDhcpLabelFrame, text="Server name")
        self.addDhcpNameLabel.pack()
        self.addDhcpNameEntry = ttk.Entry(self.addDhcpLabelFrame)
        self.addDhcpNameEntry.pack()

        self.addDhcpIpLabel = ttk.Label(self.addDhcpLabelFrame, text="Server IP address")
        self.addDhcpIpLabel.pack()
        self.addDhcpIpEntry = ttk.Entry(self.addDhcpLabelFrame)
        self.addDhcpIpEntry.pack()

        self.addDhcpMacLabel = ttk.Label(self.addDhcpLabelFrame, text="Server Mac address")
        self.addDhcpMacLabel.pack()
        self.addDhcpMacEntry = ttk.Entry(self.addDhcpLabelFrame)
        self.addDhcpMacEntry.pack()
        self.addDhcpButton = ttk.Button(self.addDhcpLabelFrame, text="Add", command=self.addDhcpButtonPressed)
        self.addDhcpButton.pack()
        self.clrDhcpButton = ttk.Button(self.addDhcpLabelFrame, text="Clear", command=self.clrDhcpButtonPressed)
        self.clrDhcpButton.pack()

        self.dhcpTv = ttk.Treeview(self.frameDhcp)
        self.dhcpTv['columns'] = ('ip', 'mac', 'date')
        self.dhcpTv.heading('#0', text='Server name', anchor='w')
        self.dhcpTv.column('#0', anchor='w')
        self.dhcpTv.heading('ip', text='IP address')
        self.dhcpTv.column('ip', width=100)
        self.dhcpTv.heading('mac', text='MAC address')
        self.dhcpTv.column('mac', width=100)
        self.dhcpTv.heading('date', text='Date added')
        self.dhcpTv.column('date', width=100)
        self.dhcpTv.pack(fill=X)

        strVersion = 'Python ' + platform.python_version()
        self.versionLabel = ttk.Label(self.frameSysInfo, text=strVersion)
        self.versionLabel.pack()
        strPlatform = 'Platform: ' + platform.platform()
        self.platformLabel = ttk.Label(self.frameSysInfo, text=strPlatform)
        self.platformLabel.pack()

        self.dhcpDefender = DhcpDefender(self)
        self.arpDefender = ArpDefender(self)

        self.sniffer = Sniffer(1, 'Sniffer-1', 1, self)
        self.sniffer.start()

        self.updater = Updater(1, 'Updater-1', 1, self)
        self.updater.start()
コード例 #28
0
    print('Augmenting dataframe with translation columns... ',
          datetime.now().isoformat())
    pm.execute_notebook('EntityTranslCols.ipynb',
                        outdir + '/EntityTranslCols.out.ipynb',
                        parameters=dict(repo=repo_src,
                                        version=version,
                                        store_data_dir=outdir),
                        kernel_name=kernel)

    print('Generating entity clusters... ', datetime.now().isoformat())
    pm.execute_notebook(
        cluster_nb,
        outdir + '/er.out.ipynb',
        parameters=dict(
            input_df_path=outdir + '/entity_trans_all_' + version + '.h5',
            repo_name=repo_src,
            version=version,
            kg_tab_dir_path=kg_tab_dir_path,
            output_path=outdir + '/entity_clusters_' + version + '.jl'),
        kernel_name=kernel)

    print('Generating event clusters... ', datetime.now().isoformat())
    gen_event_clusters(endpoint_src,
                       outdir + '/event_clusters_' + version + '.jl')

    print('Insert into GraphDB... ', datetime.now().isoformat())
    up = Updater(endpoint_src, endpoint_dst, repo_dst, outdir, graph, True)
    up.run_all(delete_existing_clusters=delete_existing_clusters,
               entity_clusters='entity_clusters_' + version + '.jl',
               event_clusters='event_clusters_' + version + '.jl')
コード例 #29
0
ファイル: main.py プロジェクト: cs3235group3/app
class MainApplication(tk.Frame):
    def __init__(self, parent, *args, **kwargs):
        tk.Frame.__init__(self, parent, *args, **kwargs)
        self.parent = parent

        self.uiMenu = UIMenu(self.parent)
        self.parent.config(menu=self.uiMenu.menu)

        # self.toolbar = ttk.Frame(self.parent)
        # self.toolbarButton1 = ttk.Button(self.toolbar, text='Button')
        # self.toolbarButton1.pack(side=LEFT, padx=2, pady=2)
        # self.toolbar.pack(side=TOP, fill=X)

        self.statusbar = Label(self.parent, text='Ready', bd=1, relief=SUNKEN, anchor=W)
        self.statusbar.pack(side=BOTTOM, fill=X)

        self.notebook = ttk.Notebook(self.parent)
        self.frameSniff = ttk.Frame(self.notebook)
        self.frameArp = ttk.Frame(self.notebook)
        self.frameDhcp = ttk.Frame(self.notebook)
        self.frameSysInfo = ttk.Frame(self.notebook)
        self.notebook.add(self.frameSniff, text='Sniffer')
        self.notebook.add(self.frameArp, text='ARP')
        self.notebook.add(self.frameDhcp, text='DHCP Servers')
        self.notebook.add(self.frameSysInfo, text='System Info')
        self.notebook.pack(fill=BOTH)

        self.sniffLabelFrame = ttk.LabelFrame(self.frameSniff, text="Sniffer")
        self.sniffLabelFrame.pack(padx=10, pady=10)
        self.sniffButton = ttk.Button(self.sniffLabelFrame, text="Sniff", command=self.beginSniff)
        self.sniffButton.pack(side=LEFT)
        self.stopSniffButton = ttk.Button(self.sniffLabelFrame, text="Stop sniffing", command=self.stopSniff, state="disabled")
        self.stopSniffButton.pack(side=LEFT)

        self.sniffTv = ttk.Treeview(self.frameSniff)
        ysb = ttk.Scrollbar(self, orient='vertical', command=self.sniffTv.yview)
        xsb = ttk.Scrollbar(self, orient='horizontal', command=self.sniffTv.xview)
        self.sniffTv.configure(yscroll=ysb.set, xscroll=xsb.set)
        self.sniffTv['columns'] = ('senderip', 'sendermac', 'received')
        self.sniffTv.heading('#0', text='Description', anchor='w')
        self.sniffTv.column('#0', anchor='w')
        self.sniffTv.heading('senderip', text='Sender IP')
        self.sniffTv.column('senderip', width=100)
        self.sniffTv.heading('sendermac', text='Sender MAC')
        self.sniffTv.column('sendermac', width=100)
        self.sniffTv.heading('received', text='Received at')
        self.sniffTv.column('received', width=100)
        self.sniffTv.pack(fill=BOTH)

        self.arpLabel = ttk.Label(self.frameArp, text="ARP cache")
        self.arpLabel.pack()
        self.arpTv = ttk.Treeview(self.frameArp)
        self.arpTv['columns'] = ('ip', 'status')
        self.arpTv.heading('#0', text='MAC address', anchor='w')
        self.arpTv.column('#0', anchor='w')
        self.arpTv.heading('ip', text='IP address')
        self.arpTv.column('ip', width=100)
        self.arpTv.heading('status', text='Status')
        self.arpTv.column('status', width=100)
        self.arpTv.pack(fill=X)

        self.addDhcpLabelFrame = ttk.LabelFrame(self.frameDhcp, text="Add trusted server")
        self.addDhcpLabelFrame.pack(padx=10, pady=10)

        self.addDhcpNameLabel = ttk.Label(self.addDhcpLabelFrame, text="Server name")
        self.addDhcpNameLabel.pack()
        self.addDhcpNameEntry = ttk.Entry(self.addDhcpLabelFrame)
        self.addDhcpNameEntry.pack()

        self.addDhcpIpLabel = ttk.Label(self.addDhcpLabelFrame, text="Server IP address")
        self.addDhcpIpLabel.pack()
        self.addDhcpIpEntry = ttk.Entry(self.addDhcpLabelFrame)
        self.addDhcpIpEntry.pack()

        self.addDhcpMacLabel = ttk.Label(self.addDhcpLabelFrame, text="Server Mac address")
        self.addDhcpMacLabel.pack()
        self.addDhcpMacEntry = ttk.Entry(self.addDhcpLabelFrame)
        self.addDhcpMacEntry.pack()
        self.addDhcpButton = ttk.Button(self.addDhcpLabelFrame, text="Add", command=self.addDhcpButtonPressed)
        self.addDhcpButton.pack()
        self.clrDhcpButton = ttk.Button(self.addDhcpLabelFrame, text="Clear", command=self.clrDhcpButtonPressed)
        self.clrDhcpButton.pack()

        self.dhcpTv = ttk.Treeview(self.frameDhcp)
        self.dhcpTv['columns'] = ('ip', 'mac', 'date')
        self.dhcpTv.heading('#0', text='Server name', anchor='w')
        self.dhcpTv.column('#0', anchor='w')
        self.dhcpTv.heading('ip', text='IP address')
        self.dhcpTv.column('ip', width=100)
        self.dhcpTv.heading('mac', text='MAC address')
        self.dhcpTv.column('mac', width=100)
        self.dhcpTv.heading('date', text='Date added')
        self.dhcpTv.column('date', width=100)
        self.dhcpTv.pack(fill=X)

        strVersion = 'Python ' + platform.python_version()
        self.versionLabel = ttk.Label(self.frameSysInfo, text=strVersion)
        self.versionLabel.pack()
        strPlatform = 'Platform: ' + platform.platform()
        self.platformLabel = ttk.Label(self.frameSysInfo, text=strPlatform)
        self.platformLabel.pack()

        self.dhcpDefender = DhcpDefender(self)
        self.arpDefender = ArpDefender(self)

        self.sniffer = Sniffer(1, 'Sniffer-1', 1, self)
        self.sniffer.start()

        self.updater = Updater(1, 'Updater-1', 1, self)
        self.updater.start()

    def addDhcpButtonPressed(self):
        name = self.addDhcpNameEntry.get()
        ip = self.addDhcpIpEntry.get()
        mac = self.addDhcpMacEntry.get().lower()
        isValidIp =re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$",ip)
        macRe = re.compile(r'^([0-9A-F]{1,2})' + '\:([0-9A-F]{1,2})'*5 + '$', re.IGNORECASE)
        isValidMac = macRe.match(mac)
        if isValidIp and isValidMac:
            self.dhcpDefender.add_trusted_server(name, ip, mac)

    def clrDhcpButtonPressed(self):
        self.dhcpDefender.clear_db()

    def beginSniff(self):
        self.sniffButton.config(state="disabled")
        self.stopSniffButton.config(state="normal")
        self.sniffer.resume()

    def stopSniff(self):
        self.stopSniffButton.config(state="disabled")
        self.sniffButton.config(state="normal")
        self.sniffer.pause()

    def updateSniffTv(self, packet):
        if packet[ARP].op == 1:
            response = 'Request: ' + packet[ARP].psrc + ' is asking about ' + packet[ARP].pdst
        elif packet[ARP].op == 2:
            response = 'Response: ' + packet[ARP].hwsrc + ' has address ' + packet[ARP].psrc
        timeStr = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        self.sniffTv.insert("", 0, text=response, values=(packet[ARP].psrc, packet[ARP].hwsrc, timeStr))

    def updateDhcpTv(self, trusted_servers):
        self.dhcpTv.delete(*self.dhcpTv.get_children())
        for server in trusted_servers:
            self.dhcpTv.insert("", 0, text=server['name'], values=(server['ip'], server['mac'], server['date']))

    def import_arp_cache(self):
        self.arpTv.delete(*self.arpTv.get_children())
        out, err = Popen(['arp', '-na'], stdout=PIPE, stderr=PIPE).communicate()
        out = out.splitlines()
        for line in out:
            ip = self.find_between(line, '(', ')')
            mac = self.find_between(line, 'at ', ' on')
            self.arpTv.insert("", 0, text=mac, values=(ip, ""))

    def find_between(self, s, first, last):
        try:
            start = s.index(first) + len(first)
            end = s.index(last, start)
            return s[start:end]
        except ValueError:
            return ""

    def process_arp(self, packet):
        result = self.arpDefender.arp_pkt_callback(packet)
        if result == -1:    # inconsistent headers
            print("inconsistent headers")
            self.statusbar.config(text="Warning: Inconsistent header packet detected.")
            pass
        elif result == 0:   # failed active check
            print("failed active check")
            self.statusbar.config(text="Warning: You might be under ARP spoof attack.")
            pass
        elif result == 1:   # passed active check
            print("passed active check")
            pass

    def process_dhcp(self, packet):
        result = self.dhcpDefender.dhcp_pkt_callback(packet)
        if result == 0:     # unknown dhcp server
            print("Unknown dhcp server")
            self.statusbar.config(text="Warning: Untrusted DHCP server present in network.")
            pass
        elif result == 1:
            print("Trusted dhcp server")
            pass
コード例 #30
0
class UpdaterGUI(Window):
    def __init__( self, parent):
        self.updater = Updater()
        super(UpdaterGUI, self).__init__(parent)
        self.m_dirPicker1.SetPath(self.updater.skin_folder)

    def UpdateButton( self, event ):
        result = False

        if self.updater.skin_folder == "":
            dlg = wx.MessageDialog( self, "The Steam folder has not been specified. Please input the Steam folder location first.", "Input Steam Folder Location", wx.OK)
            dlg.ShowModal()
            dlg.Destroy()
            event.Skip()

        if self.updater.current_version == self.updater.latest_version:
            dlg = wx.MessageDialog( self, "You seem to have the latest version installed already. Do you want to update anyway?", "Latest Version Already Installed", wx.OK | wx.CANCEL)
            answer = dlg.ShowModal() # Show it
            dlg.Destroy() # finally destroy it when finished.
            if answer == wx.ID_OK:
                result = self.updater.UpdateSkin()
        else:
            result = self.updater.UpdateSkin()

        if result:
            dlg = wx.MessageDialog( self, "Updated to {}. Please restart Steam.".format(self.updater.latest_version), "Finished", wx.OK)
            dlg.ShowModal()
            dlg.Destroy()
            self.m_staticText2.SetLabel('Installed Version: {}'.format(self.updater.current_version))


    def OnClose( self, event ):
        self.updater.SaveConfig()
        event.Skip()


    def UpdateSteamFolder( self, event ):
        print "cool"
        print self.m_dirPicker1.GetPath()
        if not self.updater.SetSteamFolder(self.m_dirPicker1.GetPath()):
            dlg = wx.MessageDialog( self, "Couldn't find the folder \"{}\". Please double check this is the where steam is installed.".format(self.m_dirPicker1.GetPath()), "Cannot find Steam Folder", wx.OK)
            dlg.ShowModal()
            dlg.Destroy()
        else:
            self.m_dirPicker1.SetPath(self.updater.skin_folder)
        event.Skip()


    def UpdateCurrentVersion( self, event ):
        self.m_staticText1.SetLabel('Latest Version: {}'.format(self.updater.latest_version))
        event.Skip()


    def UpdateLatestVersion( self, event ):
        self.m_staticText2.SetLabel('Installed Version: {}'.format(self.updater.current_version))
        print self.updater.current_version
        event.Skip()


    def UpdateStatusBar( self, event ):
        self.SetStatusText("Status")
        event.Skip()
コード例 #31
0
ファイル: test_NativeOp.py プロジェクト: tazdriver/returnn
def load(lstm_opts=None):
    if not lstm_opts: lstm_opts = {"class": "lstm2"}
    lstm_opts = lstm_opts.copy()
    lstm_opts.update({"n_out": 10, "from": "in"})
    num_inputs = 9
    num_outputs = 2
    net_topo = {
        "in": {
            "class": "dump",
            "filename": "in"
        },
        "lstm": lstm_opts,
        "lstm_dump": {
            "class": "dump",
            "from": "lstm",
            "filename": "lstm"
        },
        "output": {
            "class": "softmax",
            "loss": "ce",
            "from": "lstm_dump"
        }
    }

    collected_data = {}
    DumpLayer.global_debug_container = collected_data

    net = Network.LayerNetwork.from_json(json_content=net_topo,
                                         n_in=num_inputs,
                                         n_out={"classes": (num_outputs, 1)},
                                         train_flag=True)
    net.declare_train_params()

    # Init dataset and prepare one minibatch.
    epoch = 1
    dataset = Task12AXDataset(num_seqs=1000,
                              seq_ordering="random",
                              chunking="200:200")
    dataset.init_seq_order(epoch=epoch)
    batch_gen = dataset.generate_batches(recurrent_net=net.recurrent,
                                         batch_size=5000,
                                         max_seqs=10)
    batches = batch_gen.peek_next_n(1)
    # We need the DummyDevice for assign_dev_data.
    dev = DummyDevice()
    assign_success, _ = EngineUtil.assign_dev_data(device=dev,
                                                   dataset=dataset,
                                                   batches=batches)
    assert assign_success
    dev.initialize(net)
    dev.update_data()
    givens = [(net.y[k], dev.y[k]) for k in dev.used_data_keys]
    givens += [(net.j[k], dev.j[k]) for k in dev.used_data_keys]

    # Now gradients, updates and compile everything.
    gradients = {
        p: T.grad(net.get_objective(), p, known_grads=net.known_grads)
        for p in net.train_params_vars
    }
    updater = Updater(adam=True)
    updater.initVars(net, gradients)
    updater.setLearningRate(learning_rate=0.01)
    trainer = theano.function(inputs=[],
                              outputs=[net.total_cost],
                              givens=givens,
                              updates=updater.getUpdateList(),
                              on_unused_input='warn',
                              name="train_and_updater")

    for p in net.train_params_vars:
        collected_data["param:%s" % p.name] = p.get_value()

    # And finally, run it.
    cost = trainer()
    collected_data["cost"] = cost
    return collected_data
コード例 #32
0
import sys
from Updater import Updater

repo_name_src = 'gaia0531ta1'
repo_name_dst = 'gaia0531ta2'

endpoint_src = 'http://gaiadev01.isi.edu:7200/repositories/' + repo_name_src
endpoint_dst = 'http://gaiadev01.isi.edu:7200/repositories/' + repo_name_dst
output = '/Users/jenniferchen/Documents/AIDA/ta2pipline/store_data/' + repo_name_dst
graph = 'http://www.isi.edu/baseline-20190605-001'
has_jl = True
print('---')
print('your source endpoint: ', endpoint_src)
print('your destination endpoint: ', endpoint_dst)
print('your output: ', output)
print('your graph: ', graph)
print('your has jl: ', has_jl)
print('---')

up = Updater(endpoint_src, endpoint_dst, repo_name_src, output, graph, has_jl)
#up.run_delete_ori()  # run this only if creating the first named graph in the repo
up.run_load_jl(entity_clusters='clusters-baseline-20190605-001.jl')
up.run_entity_nt()
up.run_event_nt()
up.run_relation_nt()
up.run_insert_proto()
up.run_super_edge()
コード例 #33
0
ファイル: test_NativeOp.py プロジェクト: atuxhe/returnn
def load(lstm_opts=None):
  if not lstm_opts: lstm_opts = {"class": "lstm2"}
  lstm_opts = lstm_opts.copy()
  lstm_opts.update({"n_out": 10, "from": "in"})
  num_inputs = 9
  num_outputs = 2
  net_topo = {
    "in": {"class": "dump", "filename": "in"},
    "lstm": lstm_opts,
    "lstm_dump": {"class": "dump", "from": "lstm", "filename": "lstm"},
    "output": {"class": "softmax", "loss": "ce", "from": "lstm_dump"}
  }

  collected_data = {}
  DumpLayer.global_debug_container = collected_data

  net = Network.LayerNetwork.from_json(
    json_content=net_topo,
    n_in=num_inputs,
    n_out={"classes": (num_outputs, 1)},
    train_flag=True
  )
  net.declare_train_params()

  # Init dataset and prepare one minibatch.
  epoch = 1
  dataset = Task12AXDataset(num_seqs=1000, seq_ordering="random", chunking="200:200")
  dataset.init_seq_order(epoch=epoch)
  batch_gen = dataset.generate_batches(
    recurrent_net=net.recurrent,
    batch_size=5000,
    max_seqs=10)
  batches = batch_gen.peek_next_n(1)
  # We need the DummyDevice for assign_dev_data.
  dev = DummyDevice()
  assign_success, _ = EngineUtil.assign_dev_data(device=dev, dataset=dataset, batches=batches)
  assert assign_success
  dev.initialize(net)
  dev.update_data()
  givens = [(net.y[k], dev.y[k]) for k in dev.used_data_keys]
  givens += [(net.j[k], dev.j[k]) for k in dev.used_data_keys]

  # Now gradients, updates and compile everything.
  gradients = {p: T.grad(net.get_objective(), p, known_grads=net.known_grads)
               for p in net.train_params_vars}
  updater = Updater(adam=True)
  updater.initVars(net, gradients)
  updater.setLearningRate(learning_rate=0.01)
  trainer = theano.function(
    inputs=[],
    outputs=[net.total_cost],
    givens=givens,
    updates=updater.getUpdateList(),
    on_unused_input='warn',
    name="train_and_updater")

  for p in net.train_params_vars:
    collected_data["param:%s" % p.name] = p.get_value()

  # And finally, run it.
  cost = trainer()
  collected_data["cost"] = cost
  return collected_data
コード例 #34
0
 def __init__( self, parent):
     self.updater = Updater()
     super(UpdaterGUI, self).__init__(parent)
     self.m_dirPicker1.SetPath(self.updater.skin_folder)
コード例 #35
0
 def __init__(self):
     Updater.__init__(self)
コード例 #36
0
detector_model_path = 'models/model_frcnn.hdf5'
model_rpn.load_weights(detector_model_path, by_name=True)
model_classifier.load_weights(detector_model_path, by_name=True)

# COMPILE MODEL
model_rpn.compile(optimizer='sgd', loss='mse')
model_classifier.compile(optimizer='sgd', loss='mse')

# LOAD PREDICTION MODEL
classifier_model_path = 'models/MobileNetV2_192_NoCar.h5'
classifier = load_model(classifier_model_path)

# SETUP BOT AND FOLDERS
alphabeth = [
    'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'L', 'M', 'N', 'O', 'P', 'Q',
    'R', 'S', 'T', 'U', 'V', 'Z'
]

if not os.path.exists("res"):
    os.makedirs("res")

classif_input = 192
marker_true = cv2.imread("V_tick.png", -1)
marker_false = cv2.imread("X_tick.png", -1)

bot_id = '*********'

updater = Updater(bot_id)
updater.setPhotoHandler(imageHandler)
print('---------- BOT IS READY')
updater.start()
コード例 #37
0
class UpdaterWidget(QtWidgets.QWidget, Ui_updater_gui):

    cancel_signal = QtCore.pyqtSignal()

    def __init__(self):
        """
        The GUI window that runs the updater in a separate thread.  Used mainly to show the progress of the updater and
        ensure the user that it is doing something while it downloads the newest version from github
        """
        super().__init__()
        self.setupUi(self)
        self.settings = QtCore.QSettings('SomeGuySoftware', 'dfr_updater')
        self.dfr_settings = QtCore.QSettings('SomeGuySoftware',
                                             'RedditDownloader')
        self.restoreGeometry(
            self.settings.value('geometry', self.saveGeometry()))
        self.download_url = self.settings.value('download_url', None, type=str)
        self.download_name = self.settings.value('download_name',
                                                 None,
                                                 type=str)
        self.new_version = self.settings.value('new_version', None, type=str)
        self.launch_checkbox.setChecked(
            self.settings.value('launch_checkbox', False, type=bool))
        self.save_height = True
        self.label.setOpenExternalLinks(True)
        self.program_files_location = self.settings.value(
            'program_files_location', None, type=str)
        self.running = False
        self.up_to_date = False

        self.button_box.accepted.connect(self.assign_accept)
        self.button_box.rejected.connect(self.assign_reject)

        self.progress_bar.setVisible(False)
        self.launch_checkbox.setVisible(False)

    def run(self):
        """Moves the Updater module to a separate thread and runs it"""
        self.update_label('Starting updater...')
        self.running_gui_shift()

        self.thread = QtCore.QThread()
        self.updater = Updater(self.download_url, self.download_name,
                               self.new_version, self.program_files_location)
        self.cancel_signal.connect(self.updater.stop)
        self.updater.moveToThread(self.thread)
        self.thread.started.connect(self.updater.run_update)
        self.updater.update_label.connect(self.update_label)
        self.updater.setup_progress_bar.connect(self.setup_progress_bar)
        self.updater.update_progress_bar.connect(self.update_progress_bar)
        self.updater.error_signal.connect(self.update_error)
        self.updater.finished.connect(self.thread.quit)
        self.updater.finished.connect(self.updater.deleteLater)
        self.updater.finished.connect(self.finished_gui_shift)
        self.thread.finished.connect(self.thread.deleteLater)
        self.thread.start()

    def assign_accept(self):
        if self.up_to_date:
            self.open_file_location()
        else:
            self.run()

    def assign_reject(self):
        if self.running:
            self.stop_download()
        else:
            self.close()

    def running_gui_shift(self):
        """Changes some GUI behavior so as not to interfere with the updater while providing a way to stop it"""
        self.progress_bar.setVisible(True)
        self.running = True
        self.button_box.button(QtWidgets.QDialogButtonBox.Ok).setVisible(False)
        self.button_box.button(
            QtWidgets.QDialogButtonBox.Cancel).setText('Stop')

    def finished_gui_shift(self):
        """Changes the GUI behaviour again so that certain operations can be performed once the update is complete"""
        self.running = False
        self.up_to_date = True
        self.update_label(
            'Update complete. You are now running the latest version.')
        self.button_box.button(QtWidgets.QDialogButtonBox.Ok).setVisible(True)
        self.button_box.button(
            QtWidgets.QDialogButtonBox.Ok).setText('Open exe location')
        self.button_box.button(
            QtWidgets.QDialogButtonBox.Cancel).setText('Close')
        self.launch_checkbox.setVisible(True)
        self.dfr_settings.setValue('first_run', True)

    def stopped_gui_shift(self):
        """A different GUI shift that provides a way to restart the updater if it is stopped"""
        self.running = False
        self.button_box.button(QtWidgets.QDialogButtonBox.Ok).setVisible(True)
        self.button_box.button(
            QtWidgets.QDialogButtonBox.Cancel).setText('Close')

    def update_label(self, text):
        self.label.setText(text)

    def setup_progress_bar(self, setup):
        if setup > 100:
            minimum = 0 - (setup - 100)
            maximum = 100
        else:
            minimum = 0
            maximum = setup
        self.progress_bar.setMinimum(minimum)
        self.progress_bar.setMaximum(maximum)
        self.progress_bar.setValue(minimum)

    def update_progress_bar(self, length):
        self.progress_bar.setValue(self.progress_bar.value() + length)

    def stop_download(self):
        self.cancel_signal.emit()
        self.finished_gui_shift()

    def open_file_location(self):
        """Opens the source folder of the new program version"""
        if sys.platform == 'win32':
            os.startfile(self.program_files_location)
        else:
            subprocess.call(['xdg-open', self.program_files_location])

    def launch_program(self):
        """Launches the newly installed program version"""
        if sys.platform == 'win32':
            os.startfile(
                os.path.join(self.program_files_location,
                             'DownloaderForReddit.exe'))
        elif sys.platform == 'linux':
            subprocess.Popen([
                os.path.join(self.program_files_location,
                             'DownloaderForReddit'),
                os.path.join(self.program_files_location,
                             'DownloaderForReddit')
            ])

    """
    The methods below are to alert the user to any type of error that may arise during the update process and provide
    them with a way to resolve the error manually
    """

    def update_error(self, code):
        self.stopped_gui_shift()
        if code[0] == 0:
            self.update_label(
                'There was a problem establishing a connection to the github download url.  Please try '
                'the update again.  If the problem persists the package can be downloaded manually at: '
                '<a href="https://github.com/MalloyDelacroix/DownloaderForReddit/releases">https://github.com/MalloyDelacroix/DownloaderForReddit/releases</a>'
            )
            self.setMinimumHeight(self.height() + self.label.height())
            self.save_height = False

        elif code[0] == 1:
            self.update_label(
                'There was a problem deleting the outdated files. There was no Downloader for Reddit '
                'executable found in the folder location. Please try the update again. If the '
                'problem persists, the latest update has been downloaded at can be moved manually to '
                'the desired location.')
            self.setMinimumHeight(self.height() + self.label.height())
            self.button_box.button(QtWidgets.QDialogButtonBox.Ok).setText(
                'Open Download Location')
            self.button_box.accepted.connect(
                lambda: self.open_temporary_download_location(code[1]))
            self.save_height = False
        elif code[0] == 2:
            self.update_label(
                'There was a problem extracting or moving the downloaded files to the programs directory '
                'The original program files have been removed.  You may manually move the downloaded '
                'files to the desired location and run the Downloader for Reddit from there.'
            )
            self.setMinimumHeight(self.height() + self.label.height())
            self.button_box.button(QtWidgets.QDialogButtonBox.Ok).setText(
                'Open Download Location')
            self.button_box.accepted.connect(
                lambda: self.open_temporary_download_location(code[1]))
            self.save_height = False

    def open_temporary_download_location(self, location):
        """
        In case the downloader fails after downloading the new update but before moving it, this will open the
        temporary location
        """
        if sys.platform == 'win32':
            os.startfile(location)
        else:
            subprocess.call(['xdg-open', location])
        self.close()

    def closeEvent(self, QCloseEvent):
        if self.save_height:
            self.settings.setValue('geometry', self.saveGeometry())
        self.settings.setValue('launch_checkbox',
                               self.launch_checkbox.isChecked())
        if self.launch_checkbox.isChecked() and self.launch_checkbox.isVisible(
        ):
            self.launch_program()
コード例 #38
0
ファイル: testnode.py プロジェクト: remotesyssupport/slapos
def run(args):
    config = args[0]
    slapgrid = None
    supervisord_pid_file = os.path.join(config["instance_root"], "var", "run", "supervisord.pid")
    subprocess.check_call([config["git_binary"], "config", "--global", "http.sslVerify", "false"])
    previous_revision = None

    run_software = True
    # Write our own software.cfg to use the local repository
    custom_profile_path = os.path.join(config["working_directory"], "software.cfg")
    config["custom_profile_path"] = custom_profile_path
    vcs_repository_list = config["vcs_repository_list"]
    profile_content = None
    assert len(vcs_repository_list), "we must have at least one repository"
    for vcs_repository in vcs_repository_list:
        url = vcs_repository["url"]
        buildout_section_id = vcs_repository.get("buildout_section_id", None)
        repository_id = buildout_section_id or url.split("/")[-1].split(".")[0]
        repository_path = os.path.join(config["working_directory"], repository_id)
        vcs_repository["repository_id"] = repository_id
        vcs_repository["repository_path"] = repository_path
        if profile_content is None:
            profile_content = """
[buildout]
extends = %(software_config_path)s
""" % {
                "software_config_path": os.path.join(repository_path, config["profile_path"])
            }
        if not (buildout_section_id is None):
            profile_content += """
[%(buildout_section_id)s]
repository = %(repository_path)s
branch = %(branch)s
""" % {
                "buildout_section_id": buildout_section_id,
                "repository_path": repository_path,
                "branch": vcs_repository.get("branch", "master"),
            }

    custom_profile = open(custom_profile_path, "w")
    custom_profile.write(profile_content)
    custom_profile.close()
    config["repository_path"] = repository_path
    sys.path.append(repository_path)
    test_suite_title = config["test_suite_title"] or config["test_suite"]

    retry_software = False
    try:
        while True:
            # kill processes from previous loop if any
            try:
                for pgpid in process_group_pid_set:
                    try:
                        os.killpg(pgpid, signal.SIGTERM)
                    except:
                        pass
                process_group_pid_set.clear()
                full_revision_list = []
                # Make sure we have local repository
                for vcs_repository in vcs_repository_list:
                    repository_path = vcs_repository["repository_path"]
                    repository_id = vcs_repository["repository_id"]
                    if not os.path.exists(repository_path):
                        parameter_list = [config["git_binary"], "clone", vcs_repository["url"]]
                        if vcs_repository.get("branch") is not None:
                            parameter_list.extend(["-b", vcs_repository.get("branch")])
                        parameter_list.append(repository_path)
                        subprocess.check_call(parameter_list)
                    # Make sure we have local repository
                    updater = Updater(repository_path, git_binary=config["git_binary"])
                    updater.checkout()
                    revision = "-".join(updater.getRevision())
                    full_revision_list.append("%s=%s" % (repository_id, revision))
                revision = ",".join(full_revision_list)
                if previous_revision == revision:
                    time.sleep(120)
                    if not (retry_software):
                        continue
                retry_software = False
                previous_revision = revision

                print config
                portal_url = config["test_suite_master_url"]
                test_result_path = None
                test_result = (test_result_path, revision)
                if portal_url:
                    if portal_url[-1] != "/":
                        portal_url += "/"
                    portal = xmlrpclib.ServerProxy("%s%s" % (portal_url, "portal_task_distribution"), allow_none=1)
                    master = portal.portal_task_distribution
                    assert master.getProtocolRevision() == 1
                    test_result = safeRpcCall(
                        master.createTestResult,
                        config["test_suite"],
                        revision,
                        [],
                        False,
                        test_suite_title,
                        config["test_node_title"],
                        config["project_title"],
                    )
                print "testnode, test_result : %r" % (test_result,)
                if test_result:
                    test_result_path, test_revision = test_result
                    if revision != test_revision:
                        for i, repository_revision in enumerate(test_revision.split(",")):
                            vcs_repository = vcs_repository_list[i]
                            repository_path = vcs_repository["repository_path"]
                            # other testnodes on other boxes are already ready to test another
                            # revision
                            updater = Updater(
                                repository_path,
                                git_binary=config["git_binary"],
                                revision=repository_revision.split("-")[1],
                            )
                            updater.checkout()

                    # Now prepare the installation of SlapOS and create instance
                    slapos_controler = SlapOSControler(config, process_group_pid_set=process_group_pid_set)
                    for method_name in ("runSoftwareRelease", "runComputerPartition"):
                        stdout, stderr = getInputOutputFileList(config, method_name)
                        slapos_method = getattr(slapos_controler, method_name)
                        status_dict = slapos_method(
                            config,
                            environment=config["environment"],
                            process_group_pid_set=process_group_pid_set,
                            stdout=stdout,
                            stderr=stderr,
                        )
                        if status_dict["status_code"] != 0:
                            break
                    if status_dict["status_code"] != 0:
                        safeRpcCall(master.reportTaskFailure, test_result_path, status_dict, config["test_node_title"])
                        retry_software = True
                        continue

                    partition_path = os.path.join(config["instance_root"], config["partition_reference"])
                    run_test_suite_path = os.path.join(partition_path, "bin", "runTestSuite")
                    if not os.path.exists(run_test_suite_path):
                        raise ValueError("No %r provided" % run_test_suite_path)

                    run_test_suite_revision = revision
                    if isinstance(revision, tuple):
                        revision = ",".join(revision)
                    # Deal with Shebang size limitation
                    file_object = open(run_test_suite_path, "r")
                    line = file_object.readline()
                    file_object.close()
                    invocation_list = []
                    if line[:2] == "#!":
                        invocation_list = line[2:].split()
                    invocation_list.extend(
                        [
                            run_test_suite_path,
                            "--test_suite",
                            config["test_suite"],
                            "--revision",
                            revision,
                            "--test_suite_title",
                            test_suite_title,
                            "--node_quantity",
                            config["node_quantity"],
                            "--master_url",
                            config["test_suite_master_url"],
                        ]
                    )
                    run_test_suite = subprocess.Popen(invocation_list)
                    process_group_pid_set.add(run_test_suite.pid)
                    run_test_suite.wait()
                    process_group_pid_set.remove(run_test_suite.pid)
            except SubprocessError:
                time.sleep(120)
                continue

    finally:
        # Nice way to kill *everything* generated by run process -- process
        # groups working only in POSIX compilant systems
        # Exceptions are swallowed during cleanup phase
        print "going to kill %r" % (process_group_pid_set,)
        for pgpid in process_group_pid_set:
            try:
                os.killpg(pgpid, signal.SIGTERM)
            except:
                pass
        try:
            if os.path.exists(supervisord_pid_file):
                os.kill(int(open(supervisord_pid_file).read().strip()), signal.SIGTERM)
        except:
            pass
コード例 #39
0
class EmailClient:
    def __init__(self):
        self.updater_client = Updater("Urls.config")
        self.email_content = self.set_up_email_content()

        # set up the SMTP server
        self.email, self.password = self.get_creditionals(
            "Creditionals.config")
        self.server = smtplib.SMTP("smtp-mail.outlook.com", 587)
        #self.server = smtplib.SMTP("smtp.gmail.com", 587)
        self.server.starttls()
        self.server.login(self.email, self.password)

    def send_email(self, sender_email, single=True):
        # the single parameter is used in order to dictate if the connection is closed or not.
        self.server.sendmail(self.email, sender_email, self.email_content)
        if (single):
            self.close_connection()

    def get_emails(self):
        file_object = open("Emails.config", "r")
        config_by_line = file_object.readlines()
        config_by_line = [line.rstrip() for line in config_by_line]
        file_object.close()
        return config_by_line

    def send_email_group(self):
        # would be easy to implement; simply just read from a text file of some sort... Then move server.quit to here
        email_list = self.get_emails()
        for email in email_list:
            print "Sending email to {}...".format(email)
            self.send_email(email, single=False)
        self.close_connection()

    def set_up_email_content(self):
        updated_list = self.updater_client.get_updated_blog_list()
        #updated_list = ["https://krebsonsecurity.com","https://maxwelldulin.com/blog"]
        msg = """
Hey! Security blogs are amazing! So, here's your custom list of followed blogs that have been updated in the last day:

"""
        blogs_updated = ""
        for blog in updated_list:
            blogs_updated += '\t' + blog + '\n'

        if (blogs_updated == ""):
            msg += "No blogs you are following have been updated. :( "
        else:
            msg += blogs_updated
            msg += "\n Have a wonderful rest of your day; and happy reading!\n"
            msg += "This blog follower list was created by Maxwell Dulin at http://maxwelldulin.com"

        subject = "Security Blog Post Update"
        message = "Subject: {}\n\n{}".format(subject, msg)
        return message

    def close_connection(self):
        self.server.quit()

    def get_creditionals(self, filename):
        file_object = open(filename, "r")
        files_list_per_line = file_object.readlines()

        files_list_per_line = list(
            filter(lambda x: x != '\n', files_list_per_line))
        formatted_file = list(map(lambda x: x.rstrip(), files_list_per_line))

        username = formatted_file[0].replace(" ", "").split(":")[1]
        password = formatted_file[1].strip(" ").replace(" ", "").split(":")[1]
        return username, password
コード例 #40
0
 def on_request(request_id, topic, data):
     Logger().write(LogVerbosity.Debug, "Master request: " + topic)
     if topic == "get_last_version":
         SlaveClientController.slave_ns.emit("response", request_id, Updater().check_version(), Updater().last_version)
コード例 #41
0
ファイル: testnode.py プロジェクト: Provab-Solutions/erp5
  def run(self):
    log = self.log
    process_manager = self.process_manager
    config = self.config
    slapgrid = None
    previous_revision = None

    run_software = True
    # Write our own software.cfg to use the local repository
    custom_profile_path = os.path.join(config['working_directory'], 'software.cfg')
    config['custom_profile_path'] = custom_profile_path
    vcs_repository_list = config['vcs_repository_list']
    profile_content = ''
    assert len(vcs_repository_list), "we must have at least one repository"
    try:
      # BBB: Accept global profile_path, which is the same as setting it for the
      # first configured repository.
      profile_path = config.pop(PROFILE_PATH_KEY)
    except KeyError:
      pass
    else:
      vcs_repository_list[0][PROFILE_PATH_KEY] = profile_path
    profile_path_count = 0
    for vcs_repository in vcs_repository_list:
      url = vcs_repository['url']
      buildout_section_id = vcs_repository.get('buildout_section_id', None)
      repository_id = buildout_section_id or \
                                    url.split('/')[-1].split('.')[0]
      repository_path = os.path.join(config['working_directory'],repository_id)
      vcs_repository['repository_id'] = repository_id
      vcs_repository['repository_path'] = repository_path
      try:
        profile_path = vcs_repository[PROFILE_PATH_KEY]
      except KeyError:
        pass
      else:
        profile_path_count += 1
        if profile_path_count > 1:
          raise ValueError(PROFILE_PATH_KEY + ' defined more than once')
        profile_content = """
[buildout]
extends = %(software_config_path)s
""" %  {'software_config_path': os.path.join(repository_path, profile_path)}

      if not(buildout_section_id is None):
        profile_content += """
[%(buildout_section_id)s]
repository = %(repository_path)s
branch = %(branch)s
""" %  {'buildout_section_id': buildout_section_id,
        'repository_path' : repository_path,
        'branch' : vcs_repository.get('branch','master')}

    if not profile_path_count:
      raise ValueError(PROFILE_PATH_KEY + ' not defined')
    custom_profile = open(custom_profile_path, 'w')
    custom_profile.write(profile_content)
    custom_profile.close()
    config['repository_path'] = repository_path
    sys.path.append(repository_path)
    test_suite_title = config['test_suite_title'] or config['test_suite']

    retry = False
    retry_software_count = 0
    same_revision_count = 0
    try:
      while True:
        remote_test_result_needs_cleanup = False
        remote_logger = None
        remote_logger_thread = None
        try:
          # kill processes from previous loop if any
          process_manager.killPreviousRun()
          full_revision_list = []
          # Make sure we have local repository
          for vcs_repository in vcs_repository_list:
            repository_path = vcs_repository['repository_path']
            repository_id = vcs_repository['repository_id']
            if not os.path.exists(repository_path):
              parameter_list = [config['git_binary'], 'clone',
                                vcs_repository['url']]
              if vcs_repository.get('branch') is not None:
                parameter_list.extend(['-b',vcs_repository.get('branch')])
              parameter_list.append(repository_path)
              log(subprocess.check_output(parameter_list, stderr=subprocess.STDOUT))
            # Make sure we have local repository
            updater = Updater(repository_path, git_binary=config['git_binary'],
              log=log, process_manager=process_manager)
            updater.checkout()
            revision = "-".join(updater.getRevision())
            full_revision_list.append('%s=%s' % (repository_id, revision))
          revision = ','.join(full_revision_list)
          if previous_revision == revision:
            log('Same Revision')
            same_revision_count += 1
            if not(retry) and same_revision_count <= 2:
              log('Sleeping a bit since same revision')
              time.sleep(DEFAULT_SLEEP_TIMEOUT)
              continue
            same_revision_count = 0
            log('Retrying install or checking if previous test was cancelled')
          retry = False
          previous_revision = revision
          portal_url = config['test_suite_master_url']
          test_result_path = None
          test_result = (test_result_path, revision)
          if portal_url:
            if portal_url[-1] != '/':
              portal_url += '/'
            portal = xmlrpclib.ServerProxy("%s%s" %
                        (portal_url, 'portal_task_distribution'),
                        allow_none=1)
            assert safeRpcCall(log, portal, "getProtocolRevision", True) == 1
            test_result = safeRpcCall(log, portal, "createTestResult", True,
              config['test_suite'], revision, [],
              False, test_suite_title,
              config['test_node_title'], config['project_title'])
            remote_test_result_needs_cleanup = True
            
          log("testnode, test_result : %r" % (test_result, ))
          if test_result:
            test_result_path, test_revision = test_result
            if config.get('log_file'):
              remote_logger = RemoteLogger(log, config['log_file'],
                                           config['test_node_title'],
                                           process_manager)
              remote_logger.portal = portal
              remote_logger.test_result_path = test_result_path
              remote_logger_thread = threading.Thread(target=remote_logger)
              remote_logger_thread.start()
            if revision != test_revision:
              previous_revision = test_revision
              log('Disagreement on tested revision, checking out:')
              for i, repository_revision in enumerate(test_revision.split(',')):
                vcs_repository = vcs_repository_list[i]
                repository_path = vcs_repository['repository_path']
                revision = repository_revision.rsplit('-', 1)[1]
                # other testnodes on other boxes are already ready to test another
                # revision
                log('  %s at %s' % (repository_path, revision))
                updater = Updater(repository_path, git_binary=config['git_binary'],
                                  revision=revision, log=log,
                                  process_manager=process_manager)
                updater.checkout()

            # Now prepare the installation of SlapOS and create instance
            slapproxy_log = os.path.join(config['log_directory'],
                'slapproxy.log')
            log('Configured slapproxy log to %r' % slapproxy_log)
            log('testnode, retry_software_count : %r' % retry_software_count)
            slapos_controler = SlapOSControler.SlapOSControler(config,
              log=log, slapproxy_log=slapproxy_log, process_manager=process_manager,
              reset_software=(retry_software_count>0 and retry_software_count%10 == 0))
            for method_name in ("runSoftwareRelease", "runComputerPartition",):
              slapos_method = getattr(slapos_controler, method_name)
              status_dict = slapos_method(config,
                environment=config['environment'],
                )
              if status_dict['status_code'] != 0:
                retry = True
                retry_software_count += 1
                raise SubprocessError(status_dict)
              else:
                retry_software_count = 0
            # Give some time so computer partitions may start
            # as partitions can be of any kind we have and likely will never have
            # a reliable way to check if they are up or not ...
            time.sleep(20)

            run_test_suite_path_list = glob.glob("%s/*/bin/runTestSuite" %config['instance_root'])
            if not len(run_test_suite_path_list):
              raise ValueError('No runTestSuite provided in installed partitions.')
            run_test_suite_path = run_test_suite_path_list[0]
            run_test_suite_revision = revision
            if isinstance(revision, tuple):
              revision = ','.join(revision)
            # Deal with Shebang size limitation
            line = open(run_test_suite_path, 'r').readline()
            invocation_list = []
            if line[:2] == '#!':
              invocation_list = line[2:].split()
            invocation_list.extend([run_test_suite_path,
                                    '--test_suite', config['test_suite'],
                                    '--revision', revision,
                                    '--test_suite_title', test_suite_title,
                                    '--node_quantity', config['node_quantity'],
                                    '--master_url', portal_url])
            bt5_path_list = config.get("bt5_path")
            if bt5_path_list not in ('', None,):
              invocation_list.extend(["--bt5_path", bt5_path_list])
            # From this point, test runner becomes responsible for updating test
            # result. We only do cleanup if the test runner itself is not able
            # to run.
            process_manager.spawn(*invocation_list,
              cwd=config['test_suite_directory'],
              log_prefix='runTestSuite', get_output=False)
            if remote_logger:
              remote_logger.quit = True
              remote_logger_thread.join()
        except SubprocessError, e:
          log("SubprocessError", exc_info=sys.exc_info())
          if remote_logger:
            remote_logger.finish = True
            remote_logger_thread.join()
          if remote_test_result_needs_cleanup:
            safeRpcCall(log, portal, "reportTaskFailure", True,
              test_result_path, e.status_dict, config['test_node_title'])
          log("SubprocessError, going to sleep %s" % DEFAULT_SLEEP_TIMEOUT)
          time.sleep(DEFAULT_SLEEP_TIMEOUT)
          continue
        except CancellationError, e:
          log("CancellationError", exc_info=sys.exc_info())
          process_manager.under_cancellation = False
          retry = True
          continue
コード例 #42
0
ファイル: testnode.py プロジェクト: smetsjp/erp5
def run(config):
  log = config['logger']
  slapgrid = None
  global supervisord_pid_file
  supervisord_pid_file = os.path.join(config['instance_root'], 'var', 'run',
        'supervisord.pid')
  previous_revision = None

  run_software = True
  # Write our own software.cfg to use the local repository
  custom_profile_path = os.path.join(config['working_directory'], 'software.cfg')
  config['custom_profile_path'] = custom_profile_path
  vcs_repository_list = config['vcs_repository_list']
  profile_content = None
  assert len(vcs_repository_list), "we must have at least one repository"
  try:
    # BBB: Accept global profile_path, which is the same as setting it for the
    # first configured repository.
    profile_path = config.pop(PROFILE_PATH_KEY)
  except KeyError:
    pass
  else:
    vcs_repository_list[0][PROFILE_PATH_KEY] = profile_path
  for vcs_repository in vcs_repository_list:
    url = vcs_repository['url']
    buildout_section_id = vcs_repository.get('buildout_section_id', None)
    repository_id = buildout_section_id or \
                                  url.split('/')[-1].split('.')[0]
    repository_path = os.path.join(config['working_directory'],repository_id)
    vcs_repository['repository_id'] = repository_id
    vcs_repository['repository_path'] = repository_path
    try:
      profile_path = vcs_repository[PROFILE_PATH_KEY]
    except KeyError:
      pass
    else:
      if profile_content is not None:
        raise ValueError(PROFILE_PATH_KEY + ' defined more than once')
      profile_content = """
[buildout]
extends = %(software_config_path)s
""" %  {'software_config_path': os.path.join(repository_path, profile_path)}
    if not(buildout_section_id is None):
      profile_content += """
[%(buildout_section_id)s]
repository = %(repository_path)s
branch = %(branch)s
""" %  {'buildout_section_id': buildout_section_id,
        'repository_path' : repository_path,
        'branch' : vcs_repository.get('branch','master')}

  if profile_content is None:
    raise ValueError(PROFILE_PATH_KEY + ' not defined')
  custom_profile = open(custom_profile_path, 'w')
  custom_profile.write(profile_content)
  custom_profile.close()
  config['repository_path'] = repository_path
  sys.path.append(repository_path)
  test_suite_title = config['test_suite_title'] or config['test_suite']

  retry_software = False
  try:
    while True:
      remote_test_result_needs_cleanup = False
      # kill processes from previous loop if any
      try:
        killPreviousRun()
        process_group_pid_set.clear()
        full_revision_list = []
        # Make sure we have local repository
        for vcs_repository in vcs_repository_list:
          repository_path = vcs_repository['repository_path']
          repository_id = vcs_repository['repository_id']
          if not os.path.exists(repository_path):
            parameter_list = [config['git_binary'], 'clone',
                              vcs_repository['url']]
            if vcs_repository.get('branch') is not None:
              parameter_list.extend(['-b',vcs_repository.get('branch')])
            parameter_list.append(repository_path)
            log(subprocess.check_output(parameter_list, stderr=subprocess.STDOUT))
          # Make sure we have local repository
          updater = Updater(repository_path, git_binary=config['git_binary'],
            log=log, realtime_output=False)
          updater.checkout()
          revision = "-".join(updater.getRevision())
          full_revision_list.append('%s=%s' % (repository_id, revision))
        revision = ','.join(full_revision_list)
        if previous_revision == revision:
          log('Sleeping a bit')
          time.sleep(120)
          if not(retry_software):
            continue
          log('Retrying install')
        retry_software = False
        previous_revision = revision

        portal_url = config['test_suite_master_url']
        test_result_path = None
        test_result = (test_result_path, revision)
        if portal_url:
          if portal_url[-1] != '/':
            portal_url += '/'
          portal = xmlrpclib.ServerProxy("%s%s" %
                      (portal_url, 'portal_task_distribution'),
                      allow_none=1)
          master = portal.portal_task_distribution
          assert safeRpcCall(master.getProtocolRevision) == 1
          test_result = safeRpcCall(master.createTestResult,
            config['test_suite'], revision, [],
            False, test_suite_title,
            config['test_node_title'], config['project_title'])
          remote_test_result_needs_cleanup = True
        log("testnode, test_result : %r" % (test_result, ))
        if test_result:
          test_result_path, test_revision = test_result
          if revision != test_revision:
            log('Disagreement on tested revision, checking out:')
            for i, repository_revision in enumerate(test_revision.split(',')):
              vcs_repository = vcs_repository_list[i]
              repository_path = vcs_repository['repository_path']
              revision = repository_revision.split('-')[1]
              # other testnodes on other boxes are already ready to test another
              # revision
              log('  %s at %s' % (repository_path, revision))
              updater = Updater(repository_path, git_binary=config['git_binary'],
                                revision=revision, log=log,
                                realtime_output=False)
              updater.checkout()

          # Now prepare the installation of SlapOS and create instance
          slapproxy_log = os.path.join(config['log_directory'],
              'slapproxy.log')
          log('Configured slapproxy log to %r' % slapproxy_log)
          slapos_controler = SlapOSControler.SlapOSControler(config,
            process_group_pid_set=process_group_pid_set, log=log,
            slapproxy_log=slapproxy_log)
          for method_name in ("runSoftwareRelease", "runComputerPartition"):
            stdout, stderr = getInputOutputFileList(config, method_name)
            slapos_method = getattr(slapos_controler, method_name)
            status_dict = slapos_method(config,
              environment=config['environment'],
              process_group_pid_set=process_group_pid_set,
              stdout=stdout, stderr=stderr
              )
            if status_dict['status_code'] != 0:
              retry_software = True
              raise SubprocessError(status_dict)

          run_test_suite_path = config['runTestSuite']
          if not os.path.exists(run_test_suite_path):
            raise SubprocessError({
              'command': 'os.path.exists(run_test_suite_path)',
              'status_code': 1,
              'stdout': '',
              'stderr': 'File does not exist: %r' % (run_test_suite_path, ),
            })

          run_test_suite_revision = revision
          if isinstance(revision, tuple):
            revision = ','.join(revision)
          # Deal with Shebang size limitation
          line = open(run_test_suite_path, 'r').readline()
          invocation_list = []
          if line[:2] == '#!':
            invocation_list = line[2:].split()
          invocation_list.extend([run_test_suite_path,
                                  '--test_suite', config['test_suite'],
                                  '--revision', revision,
                                  '--test_suite_title', test_suite_title,
                                  '--node_quantity', config['node_quantity'],
                                  '--master_url', config['test_suite_master_url']])
          # From this point, test runner becomes responsible for updating test
          # result.
          # XXX: is it good for all cases (eg: test runner fails too early for
          # any custom code to pick the failure up and react ?)
          remote_test_result_needs_cleanup = False
          run_test_suite = subprocess.Popen(invocation_list,
            preexec_fn=os.setsid, cwd=config['test_suite_directory'],
            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
          process_group_pid_set.add(run_test_suite.pid)
          log(run_test_suite.communicate()[0])
          process_group_pid_set.remove(run_test_suite.pid)
      except SubprocessError, e:
        if remote_test_result_needs_cleanup:
          safeRpcCall(master.reportTaskFailure,
            test_result_path, e.status_dict, config['test_node_title'])
        time.sleep(120)
        continue

  finally:
    # Nice way to kill *everything* generated by run process -- process
    # groups working only in POSIX compilant systems
    # Exceptions are swallowed during cleanup phase
    killPreviousRun()
コード例 #43
0
import sys
from Updater import Updater

endpoint = input('Enter localhost with or without / (no route name like "update", "query")\n')
output = input('Enter output folder with / \n')
graph = input('Enter graph name \n')
has_jl = input('Enter "True" if you has jl, otherwise press enter\n')
print('---')
print('you endpoint: ', endpoint)
print('you output: ', output)
print('you graph: ', graph)
print('you has jl: ', has_jl)
print('---')

steps = ['run_load_jl', 'run_delete_ori', 'run_entity_nt', 'run_event_nt', 'run_relation_nt', 'run_insert_proto', 'run_super_edge']

print('\n'.join(['Step %d : %s ' % (i, steps[i]) for i in range(len(steps))]))

start, end = input('Enter step range you want to run like "0,2"(inclusive)\n').split(',')
print('your start end: ', start, end)

up = Updater(endpoint, output, graph, True if has_jl == 'True' else False)
runs = [up.run_load_jl, up.run_delete_ori, up.run_entity_nt, up.run_event_nt, up.run_relation_nt, up.run_insert_proto, up.run_super_edge]

for i in range(int(start), int(end)+1):
    runs[i]()


コード例 #44
0
     format du csv 'id;followers_count;friends_count;favourites_count;statuses_count;lang;text'
     Ecrit dans un nouveau fichier csv le nombre de retweet
'''

    if len(argv) < 3:
        print(argv[0] + ' files.csv updatedCsv')
        exit(1)

    csv_files_in = argv[1:-1]
    csv_file_out = argv[len(argv) - 1]
    history = argv[len(argv) - 2]


    for i, csvFileIn in enumerate(csv_files_in):
        dic = dict()
        with open(csvFileIn, 'r') as fIn:
            fIn.readline()
            for line in fIn:
                tab = line[:-1].split(';')
                dic[tab[0]] = tab[1:]
        updater = Updater(dic)
        updater.find()
        for key in updater.dict_of_retweet:
            dic[key].append(updater.dict_of_retweet[key])
        print(i, ' / ', len(csv_files_in))
        saveFile(csv_file_out)
        print("Sleeping for 900 s last fic:" + csvFileIn + ' gathered ', 900 * i, ' / ', 900 * i / len(csv_files_in) * 900 * 100, '%')
        with open('history', 'a+') as h:
            h.write(csvFileIn + '\n')
        time.sleep(900)
コード例 #45
0
ファイル: GameOfLife.py プロジェクト: johnbooch/ConwaysGOF
 def createUpdater(self):
     if self.opts == None:
         raise GOFException("Program options have not been initialized")
     self.updater = Updater(algorithm=self.opts.algorithm)
コード例 #46
0
 def __init__(self):
     Updater.__init__(self)