Exemplo n.º 1
0
 def _set_daemon_export(self, value):
     filename = os.path.join(self.path, self.DAEMON_EXPORT_FILE)
     fileexists = os.path.exists(filename)
     if value and not fileexists:
         touch(filename)
     elif not value and fileexists:
         os.unlink(filename)
Exemplo n.º 2
0
 def fix(self, force = False):
     nofix_filename = os.path.join(self.path, 'nofix')
     continue_filename = os.path.join(self.path, 'continue')
     with self.logenv(silent = False):
         if os.path.exists(nofix_filename):
             if force:
                 os.remove(nofix_filename)
             else:
                 return
         if os.path.exists(continue_filename):
             return
         self.logger.info('Trying to fix {}'.format(self.run))
         fixers = ['nbkupmax_fixer',
                   'nstop_fixer',
                   'O_Si_knot_fixer',
                   'Si_Fe_knot_fixer',
                   'O_Fe_knot_fixer',
                   'tqsemin_fixer',
                   ]
         for fixer in fixers:
             if getattr(self, fixer)():
                 self.logger.info('Fixed {}'.format(self.run))
                 break
         else:
             self.logger.info('Could not fix {}'.format(self.run))
             touch(nofix_filename)
Exemplo n.º 3
0
    def setUpClass(cls):
        cls.maxDiff = 2000
        # we create the state in a different dir from the one we run our tests
        # on, to verify that the saved state does not depend on any absolute
        # paths
        init_dir = tempfile.mkdtemp()
        cls.repo_dir = tempfile.mkdtemp()
        cls.saved_state_dir = tempfile.mkdtemp()

        touch(os.path.join(init_dir, '.hhconfig'))

        cls.files = {}

        cls.files['foo_1.php'] = """
        <?hh
        function f() {
            return g() + 1;
        }
        """

        cls.files['foo_2.php'] = """
        <?hh
        function g(): string {
            return "a";
        }
        """

        cls.files['foo_3.php'] = """
        <?hh
        function h(): string {
            return 1;
        }

        class Foo {}

        function some_long_function_name() {
            new Foo();
            h();
        }
        """

        cls.initial_errors = [
            '{root}foo_1.php:4:20,22: Typing error (Typing[4110])',
            '  {root}foo_1.php:4:20,22: This is a num (int/float) because this is used in an arithmetic operation',
            '  {root}foo_2.php:3:23,28: It is incompatible with a string',
            '{root}foo_3.php:4:20,20: Invalid return type (Typing[4110])',
            '  {root}foo_3.php:3:23,28: This is a string',
            '  {root}foo_3.php:4:20,20: It is incompatible with an int',
        ]

        write_files(cls.files, init_dir)
        write_files(cls.files, cls.repo_dir)

        subprocess.call([
            cls.hh_server,
            '--check', init_dir,
            '--save', os.path.join(cls.saved_state_dir, 'foo'),
        ])

        shutil.rmtree(init_dir)
    def get_subproc_env(self):
        if self.reporter.uses_standard_output():
            self.validatelogs = os.path.join(
                tempfile.gettempdir(), 'tmp.validate.logs')
            logfiles = self.validatelogs
            logfiles += os.pathsep + \
                self.reporter.out.name.replace("<", '').replace(">", '')
        else:
            self.validatelogs = self.logfile + '.validate.logs'
            logfiles = self.validatelogs

        subproc_env = os.environ.copy()

        utils.touch(self.validatelogs)
        subproc_env["GST_VALIDATE_FILE"] = logfiles
        self.extra_logfiles.append(self.validatelogs)

        if 'GST_DEBUG' in os.environ and \
                not self.reporter.uses_standard_output():
            gstlogsfile = self.logfile + '.gstdebug'
            self.extra_logfiles.append(gstlogsfile)
            subproc_env["GST_DEBUG_FILE"] = gstlogsfile
        elif self.options.no_color:
            subproc_env["GST_DEBUG_NO_COLOR"] = '1'

        return subproc_env
Exemplo n.º 5
0
def check_presn(path, base):
    # Hmm, we do not want to check all the time, so we want some
    # record that the run is OK.  We may need some run info data base
    # in the run directory, or somewhere in general.
    dump_filename = os.path.join(path, base + '#presn')
    if not os.path.exists(dump_filename):
        return False
    invalid_tokens = ('broken', )
    for token in invalid_tokens:
        broken_filename = os.path.join(path, token)
        if os.path.exists(broken_filename):
            return False
    presnok_filename = os.path.join(path, 'presn')
    if not os.path.exists(presnok_filename):
        # sometimes file is not quite written to disk, then wait.
        for i in range(10):
            try:
                d = kepdump._load(dump_filename)
            except RecLenError:
                time.sleep(i + 1)
                continue
            else:
                break
        ok = d.is_presn
        if not ok:
            touch(broken_filename)
            print('broken: ', dump_filename)
        else:
            touch(presnok_filename)
            print('ok: ', dump_filename)
        return ok
    return True
Exemplo n.º 6
0
 def handle_query(self, q_output_dir, query_params, method):
     print('Saving query to  %s... (This may take a while)' %
           (q_output_dir))
     try:
         os.makedirs(q_output_dir)
     except:
         pass
     arrays = [v for (k, v) in query_params.values()]
     sub_query_keys = [k for (k, v) in query_params.values()]
     all_sub_query_params = list(product(*arrays))
     # convert the full query into all composing base queries
     for i, params in tqdm(enumerate(all_sub_query_params),
                           total=len(all_sub_query_params)):
         sub_query_string = gen_query_string(zip(sub_query_keys, params))
         base_output_name = q_output_dir / (
             '_'.join([str(x) for x in params]) + '_' + str(i))
         output_lck = Path(str(base_output_name) + '.lck')
         # simple lock mechanism to enable running concurrently in multiple processes
         if output_lck.exists():
             continue
         touch(output_lck)
         pages = self.get_base_query(method,
                                     sub_query_string,
                                     fulltext=True)
         for p_num, page in enumerate(pages):
             self.save_page(
                 page,
                 Path(str(base_output_name) + '_' + str(p_num) + '.json'))
     # remove temporary files
     for lck_file in q_output_dir.glob('*.lck'):
         os.remove(lck_file)
 def execute(self,
             purpose,
             sindarin,
             fifo=None,
             proc_id=None,
             options='',
             analysis=''):
     cmd = 'nice -n 9 ' + self.binary + ' ' + sindarin + ' ' + options
     if (purpose == 'histograms'):
         cmd = 'export RIVET_ANALYSIS_PATH=../../rivet; ' + cmd
         yoda_file = '../../rivet/' + fifo.replace('hepmc', 'yoda')
         cmd = cmd + ' & rivet --quiet -H ' + yoda_file + ' -a ' + analysis + ' ' + fifo
     num = ' in ' + str(proc_id) if proc_id is not None else ''
     ut.logger.info('Calling subprocess ' + cmd + num)
     try:
         return_code = self.call(cmd)
     except Exception as e:
         ut.fatal('Exception occured: ' + str(e) +
                  'Whizard failed on executing ' + sindarin + num)
         return FAIL
     else:
         if not ut.grep('ERROR', 'whizard.log'):
             ut.logger.info('Whizard finished' + num)
             ut.touch('done')
             return return_code
         else:
             ut.fatal('ERROR in whizard.log of ' + sindarin + num)
             return FAIL
Exemplo n.º 8
0
 def fix(self, force=False):
     nofix_filename = os.path.join(self.path, 'nofix')
     continue_filename = os.path.join(self.path, 'continue')
     with self.logenv(silent=False):
         if os.path.exists(nofix_filename):
             if force:
                 os.remove(nofix_filename)
             else:
                 return
         if os.path.exists(continue_filename):
             return
         self.logger.info('Trying to fix {}'.format(self.run))
         fixers = [
             'nbkupmax_fixer',
             'nstop_fixer',
             'O_Si_knot_fixer',
             'Si_Fe_knot_fixer',
             'O_Fe_knot_fixer',
             'tqsemin_fixer',
         ]
         for fixer in fixers:
             if getattr(self, fixer)():
                 self.logger.info('Fixed {}'.format(self.run))
                 break
         else:
             self.logger.info('Could not fix {}'.format(self.run))
             touch(nofix_filename)
Exemplo n.º 9
0
 def _set_daemon_export(self, value):
     filename = os.path.join(self.path, self.DAEMON_EXPORT_FILE)
     fileexists = os.path.exists(filename)
     if value and not fileexists:
         touch(filename)
     elif not value and fileexists:
         os.unlink(filename)
Exemplo n.º 10
0
def check_presn(path, base):
    # Hmm, we do not want to check all the time, so we want some
    # record that the run is OK.  We may need some run info data base
    # in the run directory, or somewhere in general.
    dump_filename = os.path.join(path, base + '#presn')
    if not os.path.exists(dump_filename):
        return False
    invalid_tokens = ('broken', )
    for token in invalid_tokens:
        broken_filename = os.path.join(path, token)
        if os.path.exists(broken_filename):
            return False
    presnok_filename = os.path.join(path, 'presn')
    if not os.path.exists(presnok_filename):
        # sometimes file is not quite written to disk, then wait.
        for i in range(10):
            try:
                d = kepdump._load(dump_filename)
            except RecLenError:
                time.sleep(i + 1)
                continue
            else:
                break
        ok = d.is_presn
        if not ok:
            touch(broken_filename)
            print('broken: ', dump_filename)
        else:
            touch(presnok_filename)
            print('ok: ', dump_filename)
        return ok
    return True
Exemplo n.º 11
0
    def setUpClass(cls):
        cls.maxDiff = 2000
        # we create the state in a different dir from the one we run our tests
        # on, to verify that the saved state does not depend on any absolute
        # paths
        init_dir = tempfile.mkdtemp()
        cls.repo_dir = tempfile.mkdtemp()
        cls.saved_state_dir = tempfile.mkdtemp()

        touch(os.path.join(init_dir, '.hhconfig'))

        cls.files = {}

        cls.files['foo_1.php'] = """
        <?hh
        function f() {
            return g() + 1;
        }
        """

        cls.files['foo_2.php'] = """
        <?hh
        function g(): string {
            return "a";
        }
        """

        cls.files['foo_3.php'] = """
        <?hh
        function h(): string {
            return 1;
        }

        class Foo {}

        function some_long_function_name() {
            new Foo();
            h();
        }
        """

        cls.initial_errors = [
            '{root}foo_1.php:4:20,22: Typing error (Typing[4110])',
            '  {root}foo_1.php:4:20,22: This is a num (int/float) because this is used in an arithmetic operation',
            '  {root}foo_2.php:3:23,28: It is incompatible with a string',
            '{root}foo_3.php:4:20,20: Invalid return type (Typing[4110])',
            '  {root}foo_3.php:3:23,28: This is a string',
            '  {root}foo_3.php:4:20,20: It is incompatible with an int',
        ]

        write_files(cls.files, init_dir)
        write_files(cls.files, cls.repo_dir)

        subprocess.call([
            cls.hh_server,
            '--check', init_dir,
            '--save', os.path.join(cls.saved_state_dir, 'foo'),
        ])

        shutil.rmtree(init_dir)
Exemplo n.º 12
0
 def load_index(self, index_path=None):
     """
       Load the historical index for previously downloaded files
       :param index_path:
       :return:
       [
       IndexItem,
       IndexItem,
       IndexItem,
       ...
       IndexItem,
       ]
       """
     if index_path:
         self.index_path = index_path
     if not destination_exists(self.index_path):
         touch(self.index_path)
         self.index = [IndexItem('')]
         return self.index
     try:
         with open(self.index_path, mode='r') as f:
             index = map(lambda x: IndexItem(json.loads(x)), f.readlines())
             if index:
                 self.index = index
                 return self.index
             else:
                 self.index = [IndexItem('')]
                 return self.index
     except Exception as e:
         logger.exception("LOAD_INDEX_FAILURE:MESSAGE:{}".format(e))
         raise Exception("LOAD_INDEX_FAILURE")
Exemplo n.º 13
0
    def parse_with_encoding (self, def_f, fn, encoding):
        """Folder object to which the parsed contacts will be added. fn is the
        name of the BBDB file/message store. encoding is a string representing
        a text encoding such as utf-8, latin-1, etc."""

        if not os.path.exists(fn):
            utils.touch(fn)

        with codecs.open(fn, encoding=encoding) as bbf:
            ver = self._parse_preamble(fn, bbf)
            if not ver:
                ## We encountered a blank BBDB file.
                ver = self._set_default_preamble()

            ## Now fetch and set up the parsing routines specific to the file
            ## format 
            self._set_regexes(ver)

            cnt = 0
            while True:
                try:
                    ff = bbf.readline().strip()
                except UnicodeDecodeError, e:
                    ## We got the encoding wrong. We will have to drop
                    ## everything we have done, and start all over again.  At
                    ## a later stage, we could optimize by skipping over
                    ## whatever we have read so far, but then we will need to
                    ## evalute if the parsed strings will be in the same
                    ## encoding or not. Tricky and shady business, this.
                    raise ASynKBBDBUnicodeError('')

                if re.search('^\s*$', ff):
                    break

                if re.search('^;', ff):
                    self.append_preamble(ff + "\n")
                    continue

                try:
                    c  = BBContact(def_f, rec=ff.rstrip())
                except BBDBParseError, e:
                    logging.error('Could not parse BBDB record: %s', ff)

                    raise BBDBFileFormatError(('Cannot proceed with '
                                              'processing file "%s" ') % fn)

                fon = c.get_bbdb_folder()

                if fon:
                    f = self.get_folder(fon)
                    if not f:
                        f = BBContactsFolder(self.get_db(), fon, self)
                        self.add_folder(f)
                    f.add_contact(c)
                else:
                    def_f.add_contact(c)

                cnt += 1
Exemplo n.º 14
0
    def parse_with_encoding(self, def_f, fn, encoding):
        """Folder object to which the parsed contacts will be added. fn is the
        name of the BBDB file/message store. encoding is a string representing
        a text encoding such as utf-8, latin-1, etc."""

        if not os.path.exists(fn):
            utils.touch(fn)

        with codecs.open(fn, encoding=encoding) as bbf:
            ver = self._parse_preamble(fn, bbf)
            if not ver:
                ## We encountered a blank BBDB file.
                ver = self._set_default_preamble()

            ## Now fetch and set up the parsing routines specific to the file
            ## format
            self._set_regexes(ver)

            cnt = 0
            while True:
                try:
                    ff = bbf.readline().strip()
                except UnicodeDecodeError, e:
                    ## We got the encoding wrong. We will have to drop
                    ## everything we have done, and start all over again.  At
                    ## a later stage, we could optimize by skipping over
                    ## whatever we have read so far, but then we will need to
                    ## evalute if the parsed strings will be in the same
                    ## encoding or not. Tricky and shady business, this.
                    raise ASynKBBDBUnicodeError('')

                if re.search('^\s*$', ff):
                    break

                if re.search('^;', ff):
                    self.append_preamble(ff + "\n")
                    continue

                try:
                    c = BBContact(def_f, rec=ff.rstrip())
                except BBDBParseError, e:
                    logging.error('Could not parse BBDB record: %s', ff)

                    raise BBDBFileFormatError(('Cannot proceed with '
                                               'processing file "%s" ') % fn)

                fon = c.get_bbdb_folder()

                if fon:
                    f = self.get_folder(fon)
                    if not f:
                        f = BBContactsFolder(self.get_db(), fon, self)
                        self.add_folder(f)
                    f.add_contact(c)
                else:
                    def_f.add_contact(c)

                cnt += 1
Exemplo n.º 15
0
 def resume(self):
     stop_tokens = ('broken', 'nofix')
     for token in stop_tokens:
         token_filename = os.path.join(self.path, token)
         try:
             os.remove(token_filename)
         except FileNotFoundError:
             pass
     continue_filename = os.path.join(self.path, 'continue')
     touch(continue_filename)
Exemplo n.º 16
0
def update_state_file(end_time, file_path):
    utils.touch(file_path)

    fp = open(file_path, 'r')
    last_day = fp.readlines()[-1].split()[0]
    start_time = utils.next_day(last_day)
    fp.close()

    fp = open(file_path, 'a')
    write_state_lines(start_time, end_time, fp)
Exemplo n.º 17
0
 def resume(self):
     stop_tokens = ('broken', 'nofix')
     for token in stop_tokens:
         token_filename = os.path.join(self.path, token)
         try:
             os.remove(token_filename)
         except FileNotFoundError:
             pass
     continue_filename = os.path.join(self.path, 'continue')
     touch(continue_filename)
Exemplo n.º 18
0
    def test_hmmpress_task_existing(self, tmpdir, datadir):
        with tmpdir.as_cwd():
            tf = datadir('test-profile.hmm')
            for ext in self.extensions:
                touch(tf + ext)
            task = HMMPressTask().task(tf)
            run_tasks([task], ['run'])
            print(os.listdir(), file=sys.stderr)
            print(task, file=sys.stderr)
            status = check_status(task)

            assert status.status == 'up-to-date'
Exemplo n.º 19
0
    def test_getset_metadata(self):
        m = meta.Metadata()
        md = m.get_metadata("files/one")
        assert md == "r--------"

        d = self.tmpdir()
        p = os.path.join(d, "test")
        utils.touch(p)

        assert m.get_metadata(p) != md
        m.set_metadata(p, md)
        assert m.get_metadata(p) == md
Exemplo n.º 20
0
    def test_mtime_update(self):
        """
        Update mtimes of files and check that errors remain unchanged.
        """
        write_load_config(self.repo_dir,
                          os.path.join(self.saved_state_dir, 'foo'))

        self.check_cmd(self.initial_errors)
        touch(os.path.join(self.repo_dir, 'foo_1.php'))
        self.check_cmd(self.initial_errors)
        touch(os.path.join(self.repo_dir, 'foo_2.php'))
        self.check_cmd(self.initial_errors)
Exemplo n.º 21
0
def deploy_server(**kwargs):
    u"""Основная команда для выкладывания кода на сервер."""
    # Компилируем переводы, если это требуется
    if 'po' in kwargs:
        local('./po_compile.sh')
    # Передаём код на сервер
    if 'rsync' in kwargs:
        target = '%(user)s@%(host)s:%(dir)s' % {
            'user': env.conf.HOST_USER,
            'host': env.conf.HOST_NAME,
            'dir': env.conf.PROJECT_DIR,
        }
        print 'Rsync project with %s' % target
        local(
            'rsync -v --stats --archive --recursive --update %(exclude)s %(src)s %(target)s'
            % {
                'exclude':
                ' '.join(
                    map(lambda x: '--exclude "%s"' % x, [
                        '.git/', '.gitignore', '*.sql', '*.sh', '*.rst',
                        '*.po', '*.pyc', '*.sqlite', '*template', 'cache/',
                        'env/', 'fabfile/', 'logs/', 'sshfs/', 'tmp/',
                        'src/public/', 'src/search', 'wsgi.py',
                        'settings_dump.py', 'test_settings.py',
                        'local_settings.py', 'prod_settings.py'
                    ])),
                'src':
                '.',
                'target':
                target
            })
        put('./src/%s' % env.conf.CONFIG,
            os.path.join(env.conf.PROJECT_DIR, 'src', 'local_settings.py'))
    # Установка/обновление зависимостей
    if 'pip' in kwargs:
        options = ''
        if 'u' == kwargs.get('pip', 'i').lower():
            options = '-U'
        pip_install(options=options)
    # Накат миграций, если это требуется
    if 'migrate' in kwargs:
        db_dump()
        manage('syncdb --migrate --noinput')

    if 'static' in kwargs:
        manage('collectstatic --noinput')
    if 'i18n' in kwargs:
        manage('update_translation_fields')
    if 'haystack' in kwargs:
        manage('rebuild_index --noinput')
    if 'touch' in kwargs:
        touch()
Exemplo n.º 22
0
    def test_mtime_update(self):
        """
        Update mtimes of files and check that errors remain unchanged.
        """
        write_load_config(
            self.repo_dir,
            os.path.join(self.saved_state_dir, 'foo'))

        self.check_cmd(self.initial_errors)
        touch(os.path.join(self.repo_dir, 'foo_1.php'))
        self.check_cmd(self.initial_errors)
        touch(os.path.join(self.repo_dir, 'foo_2.php'))
        self.check_cmd(self.initial_errors)
Exemplo n.º 23
0
    def test_cmpress_task_existing(self, tmpdir, datadir):
        with tmpdir.as_cwd():
            tf = datadir('test-covariance-model.cm')
            for ext in self.extensions:
                touch(tf + ext)

            task = CMPressTask().task(tf, params=self.cmpress_cfg)
            run_tasks([task], ['run'])
            print(os.listdir(), file=sys.stderr)
            print(task, file=sys.stderr)
            status = check_status(task)

            assert status.status == 'up-to-date'
Exemplo n.º 24
0
 def _read(self, dbname):
     with self._lock:
         path = os.path.join(XVM_DB_DIR, dbname + '.xdb')
         utils.touch(path)
         with open(path, 'r') as f:
             try:
                 try:
                     data = json.load(f)
                 except:
                     data = {}
                 return data
             finally:
                 f.close()
Exemplo n.º 25
0
Arquivo: db.py Projeto: Mummut/wot-xvm
 def _read(self, dbname):
     with self._lock:
         path = os.path.join(XVM_DB_DIR, dbname + '.xdb')
         utils.touch(path)
         with open(path, 'r') as f:
             try:
                 try:
                     data = json.load(f)
                 except:
                     data = {}
                 return data
             finally:
                 f.close()
Exemplo n.º 26
0
    def test_mtime_update(self):
        """
        Update mtimes of files and check that errors remain unchanged.
        """
        self.write_load_config()
        server_proc = self.start_hh_server()
        ensure_output_contains(server_proc.stderr,
                'Load state found at %s.' % self.saved_state_path())

        self.check_cmd(self.initial_errors)
        touch(os.path.join(self.repo_dir, 'foo_1.php'))
        self.check_cmd(self.initial_errors)
        touch(os.path.join(self.repo_dir, 'foo_2.php'))
        self.check_cmd(self.initial_errors)
Exemplo n.º 27
0
    def test_mtime_update(self):
        """
        Update mtimes of files and check that errors remain unchanged.
        """
        state_fn = os.path.join(self.saved_state_dir, "foo")
        write_load_config(self.repo_dir, state_fn)
        server_proc = self.start_hh_server()
        ensure_output_contains(server_proc.stderr, "Load state found at {0!s}.".format(state_fn))

        self.check_cmd(self.initial_errors)
        touch(os.path.join(self.repo_dir, "foo_1.php"))
        self.check_cmd(self.initial_errors)
        touch(os.path.join(self.repo_dir, "foo_2.php"))
        self.check_cmd(self.initial_errors)
Exemplo n.º 28
0
    def get_subproc_env(self):
        subproc_env = os.environ.copy()

        self.validatelogs = self.logfile + '.validate.logs'
        utils.touch(self.validatelogs)
        subproc_env["GST_VALIDATE_FILE"] = self.validatelogs
        self.extra_logfiles.append(self.validatelogs)

        if 'GST_DEBUG' in os.environ:
            gstlogsfile = self.logfile + '.gstdebug'
            self.extra_logfiles.append(gstlogsfile)
            subproc_env["GST_DEBUG_FILE"] = gstlogsfile

        return subproc_env
Exemplo n.º 29
0
    def test_hmmpress_task_existing(self):
        with TemporaryDirectory() as td:
            with Move(td):
                with TestData('test-profile.hmm', td) as tf:
                    for ext in self.extensions:
                        touch(tf + ext)

                    task = tasks.get_hmmpress_task(tf, self.hmmpress_cfg)
                    run_tasks([task], ['run'])
                    print(os.listdir(td), file=sys.stderr)
                    print(task, file=sys.stderr)
                    status = check_status(task)

                    self.assertEquals(status.status, 'up-to-date')
Exemplo n.º 30
0
    def test_hmmpress_task_existing(self):
        with TemporaryDirectory() as td:
            with Move(td):
                with TestData('test-profile.hmm', td) as tf:
                    for ext in self.extensions:
                        touch(tf + ext)

                    task = tasks.get_hmmpress_task(tf, self.hmmpress_cfg)
                    run_tasks([task], ['run'])
                    print(os.listdir(td), file=sys.stderr)
                    print(task, file=sys.stderr)
                    status = check_status(task)
                    
                    self.assertEquals(status.status, 'up-to-date')
Exemplo n.º 31
0
    def test_mtime_update(self):
        """
        Update mtimes of files and check that errors remain unchanged.
        """
        state_fn = os.path.join(self.saved_state_dir, 'foo')
        write_load_config(self.repo_dir, state_fn)
        server_proc = self.start_hh_server()
        ensure_output_contains(server_proc.stderr,
                               'Load state found at %s.' % state_fn)

        self.check_cmd(self.initial_errors)
        touch(os.path.join(self.repo_dir, 'foo_1.php'))
        self.check_cmd(self.initial_errors)
        touch(os.path.join(self.repo_dir, 'foo_2.php'))
        self.check_cmd(self.initial_errors)
Exemplo n.º 32
0
def deploy_server(**kwargs):
    u"""Основная команда для выкладывания кода на сервер."""
    # Компилируем переводы, если это требуется
    if 'po' in kwargs:
        local('./po_compile.sh')
    # Передаём код на сервер
    if 'rsync' in kwargs:
        target = '%(user)s@%(host)s:%(dir)s' % {
            'user': env.conf.HOST_USER,
            'host': env.conf.HOST_NAME,
            'dir': env.conf.PROJECT_DIR,
        }
        print 'Rsync project with %s' % target
        local('rsync -v --stats --archive --recursive --update %(exclude)s %(src)s %(target)s' % {
                'exclude': ' '.join(
                    map(
                        lambda x: '--exclude "%s"' % x,
                        ['.git/', '.gitignore', '.vagrant/', 'VAGRANT.txt', 'Vagrantfile',
                         '*~', '*.sql', '*.sql.bz2', '*.gz', '*.sh', '*.rst', '*.po', '*.pdf', '*.deb',
                         '*.pyc', '*.sqlite', '*template', 'SEO.txt',
                         'cache/', 'docs/', 'env/', 'fabfile/', 'dumps/', 'logs/', 'sshfs/', 'tmp/',
                         'src/public/', 'src/search', 'search/',
                         'wsgi.py', 'settings_dump.py', 'test_settings.py',
                         'local_settings.py', 'prod_settings.py'
                         ])),
                'src': '.',
                'target': target
            })
        put('./src/%s' % env.conf.CONFIG, os.path.join(env.conf.PROJECT_DIR, 'src', 'local_settings.py'))
    # Установка/обновление зависимостей
    if 'pip' in kwargs:
        options = ''
        if 'u' == kwargs.get('pip', 'i').lower():
            options = '-U'
        pip_install(options=options)
    # Накат миграций, если это требуется
    if 'migrate' in kwargs:
        db_dump()
        manage('syncdb --migrate --noinput')

    if 'static' in kwargs:
        manage('collectstatic --noinput')
    if 'i18n' in kwargs:
        manage('update_translation_fields')
    if 'haystack' in kwargs:
        manage('rebuild_index --noinput')
    if 'touch' in kwargs:
        touch()
Exemplo n.º 33
0
    def create_header_file(self):

        os.mkdir(ENGYN_CONFIG.SRC_DIR + '/' + self.app_stem)

        filepath = os.path.join(
            ENGYN_CONFIG.SRC_DIR, self.app_stem,
            "{}.{}".format(self.app_stem, ENGYN_CONFIG.HEADER_EXT))

        # create empty
        status = utils.touch(filepath)
        if (status != 0):
            self.err = status
            return status

        # parse content
        content = utils.parse(ENGYN_CONFIG.DEFAULT_HEADER)

        # inject class name
        content = content.replace(ENGYN_CONFIG.DEFAULT_APP_STEM, self.app_stem)\
                         .replace(ENGYN_CONFIG.DEFAULT_APP_STEM.upper(), self.app_stem.upper())

        # parse content
        status = utils.write_content(content, filepath)
        if (status != 0):
            self.err = status

        return status
Exemplo n.º 34
0
    def create_cpp_file(self):

        # create empty
        filepath = os.path.join(
            ENGYN_CONFIG.SRC_DIR, self.app_stem,
            "{}.{}".format(self.app_stem, ENGYN_CONFIG.CPP_EXT))
        status = utils.touch(filepath)

        if (status != 0):
            self.err = status
            return status

        # parse content
        content = utils.parse(ENGYN_CONFIG.DEFAULT_CPP)

        # inject class name
        content = content.replace(ENGYN_CONFIG.DEFAULT_APP_STEM, self.app_stem)

        # parse content
        status = utils.write_content(content, filepath)

        if (status != 0):
            self.err = status

        return status
Exemplo n.º 35
0
    def write_load_config(self, *changed_files):
        with open(os.path.join(self.repo_dir, 'server_options.sh'), 'w') as f:
            f.write(r"""
#! /bin/sh
echo %s
""" % self.saved_state_path())
            os.fchmod(f.fileno(), 0o700)

        with open(os.path.join(self.repo_dir, 'hh.conf'), 'w') as f:
            # we can't just write 'echo ...' inline because Hack server will
            # be passing this command some command-line options
            f.write(r"""
# some comment
load_mini_script = %s
""" % os.path.join(self.repo_dir, 'server_options.sh'))

        touch(os.path.join(self.repo_dir, '.hhconfig'))
Exemplo n.º 36
0
    def write_load_config(self, *changed_files):
        with open(os.path.join(self.repo_dir, 'server_options.sh'), 'w') as f:
            f.write(r"""
#! /bin/sh
echo %s
""" % self.saved_state_path())
            os.fchmod(f.fileno(), 0o700)

        with open(os.path.join(self.repo_dir, 'hh.conf'), 'w') as f:
            # we can't just write 'echo ...' inline because Hack server will
            # be passing this command some command-line options
            f.write(r"""
# some comment
load_mini_script = %s
""" % os.path.join(self.repo_dir, 'server_options.sh'))

        touch(os.path.join(self.repo_dir, '.hhconfig'))
Exemplo n.º 37
0
    def setUpClass(cls):
        cls.maxDiff = 2000
        cls.repo_dir = tempfile.mkdtemp()
        cls.saved_state_dir = tempfile.mkdtemp()

        touch(os.path.join(cls.repo_dir, '.hhconfig'))

        cls.files = {}

        cls.files['foo_1.php'] = """
        <?hh
        function f() {
            return g() + 1;
        }
        """

        cls.files['foo_2.php'] = """
        <?hh
        function g(): string {
            return "a";
        }
        """

        cls.files['foo_3.php'] = """
        <?hh
        function h(): string {
            return 1;
        }
        """

        cls.initial_errors = [
            'foo_1.php:4:20,22: Typing error (Typing[4110])',
            '  foo_1.php:4:20,22: This is a num (int/float) because this is used in an arithmetic operation',
            '  foo_2.php:3:23,28: It is incompatible with a string',
            'foo_3.php:4:20,20: Invalid return type (Typing[4110])',
            '  foo_3.php:3:23,28: This is a string',
            '  foo_3.php:4:20,20: It is incompatible with an int',
        ]

        write_files(cls.files, cls.repo_dir)

        subprocess.call([
            cls.hh_server,
            '--check', cls.repo_dir,
            '--save', os.path.join(cls.saved_state_dir, 'foo'),
        ])
Exemplo n.º 38
0
    def run(self):
        Log.info("Initilizing empty migrations file into app..")
        migrations_path = os.path.join(paths.APP_PATH, 'migrations.py')
        result = touch(migrations_path)
        Log.info("Creating glim_migrations table on db..")

        engine = DB.engine('default')
        Migration.metadata.create_all(engine)
        Log.info("Done.")
Exemplo n.º 39
0
Arquivo: main.py Projeto: floor66/fsr
    def rec_start(self):
        self.recording = True
        self.status("Initiating connection")

        self.rec_start_btn.configure(state="disabled")
        self.rec_stop_btn.configure(state="normal")
        self.root.focus(
        )  # Remove focus from the start button, could cause problems when trying to annotate

        # Check if we can initiate the serial communication
        if self.init_serial():
            self.status("Connection initiated (COM port: %s)" % self.COM_PORT)
            self.recordings += 1
            self.SAVE_FILE = "sensordata/data_%i_%i.txt" % (self.__start__,
                                                            self.recordings)
            self.ANNOTATION_FILE = "sensordata/annotations_%i_%i.txt" % (
                self.__start__, self.recordings)

            # Generate new, empty data files
            touch(self.SAVE_FILE)
            touch(self.ANNOTATION_FILE)

            self.logger.log(
                "Arduino initialized, starting recording #%i of this session" %
                self.recordings)
            self.logger.log("Currently recording to file: %s" % self.SAVE_FILE)
            self.save_data("; Recording @ %i Hz, Baud rate %i\n" %
                           (self.MEASURE_FRQ, self.BAUD_RATE.get()))
            self.save_data("; Vcc = %.02f V, pulldown = %i Ohm\n" %
                           (self.Vcc.get(), self.pulldown.get()))
            self.save_data("; Key: time (ms), pin (A0-5), readout (0-1023)\n")

            self.check_rec_pins()
            self.__rec_start__ = time.time()
            self.record()
        else:
            self.recording = False

            self.rec_start_btn.configure(state="normal")
            self.rec_stop_btn.configure(state="disabled")

            self.status("Connection failed")
            self.logger.log("Connection failed")
Exemplo n.º 40
0
def download_caida_restricted_worker(url, file_path, resource=""):
  opener = utils.get_caida_opener(utils.caida_trace_base_url)

  if not os.path.exists(file_path):
    utils.touch(file_path)

  res = True
  ex = ''
  try:
    utils.log("downloading: " + url)
    if os.path.exists(file_path):
      f = opener.open(url, timeout=10)
      fp = open(file_path, 'wb')
      fp.write(f.read())
      fp.close();f.close()
  except Exception, e:
    utils.log(str(e))
    res = False
    ex = e
    if os.path.exists(file_path):
      os.remove(file_path)
Exemplo n.º 41
0
def test_nchash():

    for f in glob(os.path.join(tmpdir,"*.nc")):

        hashtime = NCDataHash(f).gethash()
        hashnotime = NCDataHash(f,nomtime=True).gethash()
        hashnoname = NCDataHash(f,noname=True,nomtime=True).gethash()

        # Hash should not change if file is unchanged
        assert(NCDataHash(f).gethash() == hashtime)

        mtime = os.path.getmtime(f)
        touch(f,(mtime+1,mtime+1))

        # mtime dependent hash should not match
        assert(NCDataHash(f).gethash() != hashtime)

        # mtime independent hashes should match
        assert(NCDataHash(f,nomtime=True).gethash() == hashnotime)

        # mtime independent hash with different name should match hashnoname
        assert(NCDataHash(f+'.notsamename',noname=True,nomtime=True).gethash() == hashnoname)
Exemplo n.º 42
0
def download_date(date, file_path, seg_size=20 * 1024 * 1024, mt_num=-1):
    utils.touch(file_path)

    url = construct_url_fromtime(date)
    opener = utils.get_iplane_opener()

    #get the size first.
    file_size = get_iplane_file_size(url, opener)
    if file_size == -1:
        return
    file_num = int(math.ceil(float(file_size) / seg_size))
    if file_num == 0:
        return

    #to get the range list.
    range_list = []
    for i in range(0, file_num - 1):
        range_list.append((i * seg_size, (i + 1) * seg_size - 1))
    if (file_num == 1):
        i = -1
    range_list.append(((i + 1) * seg_size, file_size))

    #resources
    resources = ['']

    #build argv_list.
    argv = []
    for i in range(len(range_list)):
        r = range_list[i]
        arg = (url, r[0], r[1], file_path + '.' + str(i), opener)
        argv.append(arg)

    #run with multi thread.
    multi_thread.run_with_multi_thread(download_iplane_restricted_wrapper,
                                       argv, resources, mt_num)

    #assemble segements.
    assemble_segements(file_path)
Exemplo n.º 43
0
    def cross_validate(self, feature_selection_func=None, **kwargs):
        if self.save is True:
            if os.path.exists(self.save_path):
                try:
                    pred = pickle.load(open(self.save_path, 'rb'))
                    oof_gini = pred['normalized_gini']
                    print '`{}` exists. CV: {}'.format(
                        self.save_path.split('/')[-1],
                        np.round(oof_gini, 4)
                    )
                    return
                except EOFError:
                    pass

            # reserve the filename
            touch(self.save_path)

        X_train, y_train, X_test = self._load_data()
        oof_predictions, oof_gini, cv_scores = self._generate_oof_predictions(X_train, y_train)
        lb_predictions = self._generate_lb_predictions(X_train, y_train, X_test)

        # save
        if self.save is True:
            to_save = {
                'oof_predictions': oof_predictions,
                'lb_predictions': lb_predictions,
                'normalized_gini': oof_gini,
                'normalized_gini_cv': cv_scores,
                'model_params': self._extract_model_parameters(),
                'model_name': self._get_model_name(),
                'dataset_func': self._get_dataset_func_name(),
                'dataset_params': self.dataset.kwargs,
                'target_transform': self._get_target_transform_name(),
                'feature_selector': self._get_feature_selector_name(),
                'feature_selector_params': self._get_init_args_from_class(self.feature_selector)
            }
            pickle.dump(to_save, open(self.save_path, 'wb'))
Exemplo n.º 44
0
def minion_opts(request, tmpdir_factory, file_roots):
    tmpdir = tmpdir_factory.mktemp('root')
    cachedir = tmpdir.mkdir('var').mkdir('cache')
    cachedir.join('.touch').write('')
    config = touch('/etc/salt/minion', tmpdir)
    __opts__ = salt.config.minion_config(str(config))
    __opts__.update({
        'root_dir': str(tmpdir),
        'file_client': 'local',
        'file_roots': {
            'base': file_roots
        },
        'cachedir': str(cachedir),
        'id': 'test-minion',
    })
    if request.config.getini('SALT_MODULE_DIRS'):
        __opts__['module_dirs'] = request.config.getini('SALT_MODULE_DIRS')
    return __opts__
Exemplo n.º 45
0
def check_all_presn(
    path=default_path,
    fix=False,
):
    paths = glob.glob(os.path.join(path, '*'))
    for x in paths:
        print('.', end='', flush=True)
        run = os.path.basename(x)
        dump_filename = os.path.join(x, run + '#presn')
        presn_filename = os.path.join(x, 'presn')
        broken_filename = os.path.join(x, 'broken')
        if os.path.exists(dump_filename):
            b = os.path.exists(broken_filename)
            p = os.path.exists(presn_filename)
            d = kepdump._load(dump_filename)
            if d.is_presn:
                if b:
                    print('presn seems OK, but marked broken: {}'.format(run))
                    if fix:
                        os.remove(broken_filename)
                if not p:
                    print(
                        'presn seems OK, but not marked presn: {}'.format(run))
                    if fix:
                        touch(presn_filename)
            else:
                if p:
                    print('presn not OK, but marked presn: {}'.format(run))
                    if fix:
                        os.remove(presn_filename)
                        os.remove(dump_filename)
                        touch(broken_filename)
        else:
            b = os.path.exists(broken_filename)
            p = os.path.exists(presn_filename)
            if p:
                print('presn not present, but marked presn: {}'.format(run))
                if fix:
                    os.remove(presn_filename)
                    touch(broken_filename)
Exemplo n.º 46
0
def check_all_presn(path = default_path,
                    fix = False,
                    ):
    paths = glob.glob(os.path.join(path, '*'))
    for x in paths:
        print('.', end = '', flush = True)
        run = os.path.basename(x)
        dump_filename = os.path.join(x, run + '#presn')
        presn_filename = os.path.join(x, 'presn')
        broken_filename = os.path.join(x, 'broken')
        if os.path.exists(dump_filename):
            b = os.path.exists(broken_filename)
            p = os.path.exists(presn_filename)
            d = kepdump._load(dump_filename)
            if d.is_presn:
                if b:
                    print('presn seems OK, but marked broken: {}'.format(run))
                    if fix:
                        os.remove(broken_filename)
                if not p:
                    print('presn seems OK, but not marked presn: {}'.format(run))
                    if fix:
                        touch(presn_filename)
            else:
                if p:
                    print('presn not OK, but marked presn: {}'.format(run))
                    if fix:
                        os.remove(presn_filename)
                        os.remove(dump_filename)
                        touch(broken_filename)
        else:
            b = os.path.exists(broken_filename)
            p = os.path.exists(presn_filename)
            if p:
                print('presn not present, but marked presn: {}'.format(run))
                if fix:
                    os.remove(presn_filename)
                    touch(broken_filename)
Exemplo n.º 47
0
def check_aboarded(path = default_path,
                   filename = None,
                   fix = False,
                   paths = None,
                   ):
    if filename is None:
        timestamp = 0
    else:
        timestamp = os.path.getmtime(filename)
    dirs = dict()
    if paths is None:
        paths = glob.glob(os.path.join(path, '*'))
    for x in paths:
        run = os.path.basename(x)
        # filter out runs just set up
        if not (os.path.exists(os.path.join(x, run + '.cnv')) or
                os.path.exists(os.path.join(x, run + '.wnd')) or
                os.path.exists(os.path.join(x, run + '.log')) or
                os.path.exists(os.path.join(x, run + 'z')) or
                os.path.exists(os.path.join(x, run + 'z1'))):
            continue
        # filter out runs with online status
        if (os.path.exists(os.path.join(x, 'presn')) or
            os.path.exists(os.path.join(x, 'continue')) or
            os.path.exists(os.path.join(x, 'broken')) or
            os.path.exists(os.path.join(x, 'nofix'))):
            continue
        if os.path.getmtime(x) < timestamp:
            continue
        dirs[x] = dict()
    #if fix is not True:
    #    return dirs
    # find last OK dumps
    logs = {
        'cnv': convdata._load,
        'wnd': winddata._load,
        'log': logdata._load,
            }
    for x, v in dirs.items():
        run = os.path.basename(x)
        backup = os.path.join(os.path.dirname(x), 'backup', run)
        if not os.path.exists(backup):
            print('copying directory {} --> {}'.format(x, backup))
            if fix:
                shutil.copytree(x, backup)
        print('checking {} ... '.format(run), end = '', flush = True)
        dumps = [os.path.join(x, run + 'z'),
                 os.path.join(x, run + 'z1')]
        for f in glob.glob(os.path.join(x, run + '#*')):
            dumps += [f]
        dumps_broken = []
        dumps_ok = []
        for f in dumps:
            if not os.path.exists(f):
                continue
            try:
                d = kepdump._load(f)
            except:
                dumps_broken += [f]
            else:
                dumps_ok += [(d.ncyc, f)]
        print(len(dumps_broken), len(dumps_ok))
        dumps_ok = sorted(dumps_ok, key = lambda x: x[0])
        v['dumps_ok'] = dumps_ok
        v['dumps_broken'] = dumps_broken
        for d in dumps_broken:
            print('Removing: ', d, os.path.getsize(d))
            if fix:
                os.remove(d)
        print('last OK dump ', dumps_ok[-1])

        # find last OK history file
        for ext, loader in logs.items():
            data = loader(os.path.join(x, run + '.' + ext),
                          silent = 40,
                          raise_exceptions = False)
            u = data.ncyc
            jj, = np.where(np.not_equal(u[1:], u[:-1]+1))
            if len(jj) == 0:
                ncyc = u[-1]
                print(ext, ncyc)
            else:
                ncyc = u[jj[0]]
                print(ext, 'CORRUPT', ncyc, u[-1], '{:5.2f}%'.format(100* ncyc/ u[-1]))
            v[ext] = ncyc

        max_seq = min([v[ext] for ext in logs.keys()])
        restart_file = None
        for ncyc, f in dumps_ok[::-1]:
            if ncyc <= max_seq:
                restart_file = f
                break
            else:
                print('Removing: ', ncyc, f)
                if fix:
                    os.remove(f)
        print('*'*72)
        print('*** Last complete model: {:d} {}'.format(ncyc, restart_file))
        print('*'*72)

        z_file = os.path.join(x, run + 'z')
        if restart_file != z_file:
            try:
                print('Removing {}'.format(z_file))
                if fix:
                    os.remove(z_file)
            except FileNotFoundError:
                pass
            print('Copying {} --> {}'.format(restart_file, z_file))
            if fix:
                shutil.copy2(restart_file, z_file)
        if fix:
            touch(os.path.join(x, 'continue'), verbose = True)
Exemplo n.º 48
0
def checkout_and_build_libmecab(basedir):
    from os import environ
    from os.path import isdir, isfile, join as p_join, exists
    from sys import stderr
    from time import sleep
    from utils import get_parallel_jobs, run, chdir, mkdir_p, touch, symlink

    if basedir:
        chdir(basedir)

    if environ.get('LIBMECAB_DIR'):
        # This lets you point at an extracted tarball
        run("ls", "-l", environ.get("LIBMECAB_DIR"))
        assert exists(
            p_join(environ.get("LIBMECAB_DIR"), "mecab", "aclocal.m4"))
        assert not exists(LIBMECAB_DIR)
        run("rsync", "-avP", "--delete-during",
            environ.get('LIBMECAB_DIR') + "/", LIBMECAB_DIR)
        run("ls", "-l", LIBMECAB_DIR)
    else:
        mkdir_p(LIBMECAB_DIR)

    if exists(p_join(LIBMECAB_DIR, "mecab", "aclocal.m4")):
        # assume already checked out/extracted
        chdir(p_join(LIBMECAB_DIR, "mecab"))

    else:
        chdir(LIBMECAB_DIR)
        # You would think git clone would have an option to clone an
        # arbitrary <treeish>, but you would be wrong.
        run("git", "init")
        run("git", "remote", "add", "origin", LIBMECAB_REPO)
        run("git", "fetch", "origin", "--depth", "1", LIBMECAB_REV)
        run("git", "reset", "--hard", "FETCH_HEAD")
        chdir("mecab")

    CC, CXX, CFLAGS, CXXFLAGS, MDT = compile_flags_for_platform()

    stderr.write(
        "+ compile_flags_for_platform\n"
        "Building MeCab with:\n       {}\n      {}\n   {}\n {}\n".format(
            CC, CXX, CFLAGS, CXXFLAGS))

    if MDT is not None:
        stderr.write(" MACOSX_DEPLOYMENT_TARGET={}\n".format(MDT))
        environ["MACOSX_DEPLOYMENT_TARGET"] = MDT

    if not isfile("mecab-config"):
        # Not yet configured.
        # Adjust time stamps to make sure that Make doesn't think it
        # needs to re-run autoconf or automake.
        for f in [
                "aclocal.m4", "config.h.in", "configure", "Makefile.in",
                "src/Makefile.in"
        ]:
            touch(f)
            sleep(1)

        # We build with the default charset set to UTF-8, but we don't
        # disable support for EUC-JP or Shift-JIS.
        run("./configure", "--enable-static", "--disable-shared",
            "--with-charset=utf8", CC, CXX)

    # Only build the actual library, not the utilities.
    chdir("src")

    run("make", "-j{}".format(get_parallel_jobs()), "libmecab.la", CC, CXX,
        CFLAGS, CXXFLAGS)

    # Bypass libtool.
    if not isfile("libmecab.a"):
        symlink(".libs/libmecab.a", "libmecab.a")
Exemplo n.º 49
0
 def init(self, args):
     g = self._git()
     utils.touch(self.manifest)
     g.init()
     g.add(self.manifest)
                if len(runs) > n_runs0:
                    xprint('skipping runs=%d n_runs0=%d' % (len(runs), n_runs0))
                    continue

                set_random_seed(random_seed + n_runs0)
                evaluator = Evaluator(n=1)
                ok, auc_reductions, best_method = evaluator.evaluate_reductions(get_clf,
                    PREDICT_METHODS_GOOD)
                assert ok

                for predict_method in sorted(auc_reductions):
                    auc = auc_reductions[predict_method]
                    xprint('<->.' * 25)
                    xprint('predict_method=%s' % predict_method)
                    if predict_method == 'BEST':
                        xprint('best_method=%s' % best_method)
                    assert auc.all() > 0.0, auc

                    auc_list.append((auc, get_clf.__name__, str(get_clf())))
                    show_results(auc_list)

                    runs.append(auc_score_list(auc))
                    completed_tests[str(get_clf())] = runs
                    save_json(run_summary_path, completed_tests)
                    xprint('n_completed=%d = %d + %d' % (len(completed_tests), n_completed0,
                        len(completed_tests) - n_completed0))
                xprint('&' * 100)

touch('completed.spacy_lstm121.txt')
xprint('$' * 100)
Exemplo n.º 51
0
            is_succeeded = True
        except Exception, e:
            utils.log(str(e))
            is_succeed = False
            round_cnt = round_cnt + 1
            time.sleep(1 * round_cnt)

    url_list = []
    for r in result_list:
        url_list.append(r["result"])

    #temp_list to contain result content of url_list
    temp_list = ["" for i in range(len(url_list))]

    #destination file_path.
    utils.touch(file_path)

    #resources.
    resources = ['']

    #build argv
    argv = []
    #for i in range(len(url_list)):
    for i in range(40):  #debug
        url = url_list[i]
        arg = (url, temp_list, i)
        argv.append(arg)

    #run with multi thread.
    multi_thread.run_with_multi_thread(
        download_ripe_atlas_detail_worker_wrapper, argv, resources, mt_num)
Exemplo n.º 52
0
def main():

    settingsf = param_file.split('.')[-2]
    expt = countModel(modelFamily,
                      nlaws,
                      settingsf, [dataset],
                      floatNoise,
                      doRedshiftSlices=doRedshiftSlices)

    # Set up MPI
    world = MPI.COMM_WORLD
    rank = world.rank
    size = world.size
    master = rank == 0

    if master:
        set_module = importlib.import_module(settingsf)
        globals().update(set_module.__dict__)

    note = 'MPI processors checked in: rank/size = (%i/%i)' % (rank, size)
    print note

    if master:
        try:
            os.mkdir(outdir)
            # Fix permissions
            os.chmod(outdir, 0755)
        except OSError:
            pass

        logf = os.path.join(outdir, logfile)
        if master and os.path.exists(logf): os.remove(logf)
        log = open(logf, 'w')
        remark(log, note)

    # Wait here after check-in...
    world.Barrier()
    if master: print 'All %i processors checked in...' % size

    # Broadcast global settings variables
    if master:
        set_dict = set_module.__dict__
    else:
        set_dict = None
    set_dict = world.bcast(set_dict, root=0)

    if not master:
        globals().update(set_dict)
        #print globals()

    # Wait here after broadcast...
    world.Barrier()
    if master: print 'All %i processors received OK...\n' % size

    # Write settings variables to file
    if master:
        variablesf = os.path.join(outdir, variablesfile)
        dump_variable_values(set_module, variablesf, verbose=False)

        startTime = time.strftime('%X %x %Z')
        shutil.copy(param_file, os.path.join(outdir, 'bayestack_settings.py'))
        if doRedshiftSlices:
            for datafile in datafiles:
                shutil.copy(datafile, outdir)
        else:
            shutil.copy(datafile, outdir)
        shutil.copy('bayestackClasses.py', outdir)
        shutil.copy('bayestack.py', outdir)
        shutil.copy('lumfuncUtils.py', outdir)

        notes=['Time now is %s' % startTime,\
               'Settings file: %s' % param_file,\
               'Data file: %s' % datafile]
        remarks(log, notes)

        # This is to allow import of settings from outdir
        # i.e. from outdir import * [or whatever]
        init_file = '__init__.py'
        initf = os.path.join(outdir, init_file)
        touch(initf)

        notes=['Bins taken from %s' % datafile,\
               '# Bin occupancies [i uJy uJy field^-1]:']
        remarks(log, notes)
        if doRedshiftSlices:
            print 'Multi data'
            for df in expt.survey.datafiles[0]:
                print df
                for ibin in xrange(expt.fnbins[df]):
                    try:
                        line = '%i %f %f %f' % (ibin + 1, expt.fbins[df][ibin],
                                                expt.fbins[df][ibin + 1],
                                                expt.fdata[df][ibin])
                    except IndexError:
                        print "Probably your binstyle doesn't match the datafile bins"
                        sys.exit(0)
                    remark(log, line)

        else:
            for ibin in xrange(expt.nbins):
                try:
                    line = '%i %f %f %f' % (ibin + 1, expt.bins[ibin],
                                            expt.bins[ibin + 1],
                                            expt.data[ibin])
                except IndexError:
                    print "Probably your binstyle doesn't match the datafile bins"
                    sys.exit(0)
                remark(log, line)

    # Run MultiNest
    if master:
        t0 = time.time()
        print outdir
    try:
        pymultinest.run(expt.loglike,expt.logprior,expt.nparams,\
                    resume=RESUME,verbose=True,\
                    multimodal=multimodal,max_modes=max_modes,write_output=True,\
                    n_live_points=n_live_points,\
                    evidence_tolerance=evidence_tolerance,\
                    # mode_tolerance=-1e90 bugfix for earlier versions
                    # of PyMultiNest


                    mode_tolerance=-1e90,seed=SEED_SAMP,max_iter=max_iter,\
                    importance_nested_sampling=do_INS,\
      sampling_efficiency=sampling_efficiency,\
                    outputfiles_basename=os.path.join(outdir,outstem),\
        # NB MPI is already init'ed by mpi4py (crashes otherwise)

                    init_MPI=False)
    except:
        return 1

    if master:
        stopTime = time.strftime('%X %x %Z')
        t1 = time.time()
        dt = t1 - t0

        # Touch the output dir so Dropbox picks it up
        touch(outdir)

        notes=['Time then was %s' % startTime,\
               'Time now is %s' % stopTime,\
               'Execution took %6.4f sec (~ %i hours %i min) with %i cores' % \
                                         (dt,divmod(dt,3600)[0],int(divmod(dt,3600)[1]/60),size),\
               'Arguments:  %s' % ' '.join(sys.argv),\
               'Model used: %s'%modelFamily,\
               'INS   = %s' % do_INS,\
               'nlive = %i' % n_live_points,\
               'Run comment: %s' % comment,\
               'Now execute:',\
               '\n./plot.py %s' % outdir,\
               'and\n./reconstruct.py %s' % outdir]

        remarks(log, notes)
        log.close()

        print 'Parameters were:', expt.parameters

        # Copy the stats file so it's legible on my iPhone, Google, email etc.
        stats_dotdat = '%(od)s/%(os)sstats.dat' % {'od': outdir, 'os': outstem}
        stats_dottxt = '%(od)s/%(os)sstats.txt' % {'od': outdir, 'os': outstem}
        shutil.copy(stats_dotdat, stats_dottxt)

        # Now make all the files world readable
        globlist = glob.glob(os.path.join(outdir, '*'))
        [os.chmod(f, 0644) for f in globlist]
    for lstm_type in lstm_list:
        for get_clf in clf_list:
            for frozen in frozen_list:
                xprint('#' * 80)
                predict_method = PREDICT_METHODS_GOOD[0]
                clf_str = str(get_clf())
                xprint(clf_str)
                runs = completed_tests.get(clf_str, [])
                if len(runs) > n_runs0:
                    xprint('skipping runs=%d n_runs0=%d' % (len(runs), n_runs0))
                    continue

                set_random_seed(random_seed + n_runs0)
                evaluator = Evaluator(n=1)
                ok, auc_reductions = evaluator.evaluate_reductions(get_clf, PREDICT_METHODS_GOOD)
                assert ok

                for predict_method, auc in auc_reductions.items():
                    auc_list.append((auc, get_clf.__name__, str(get_clf())))
                    show_results(auc_list)

                    runs.append(auc_score_list(auc))
                    completed_tests[str(get_clf())] = runs
                    save_json(run_summary_path, completed_tests)
                    xprint('n_completed=%d = %d + %d' % (len(completed_tests), n_completed0,
                        len(completed_tests) - n_completed0))
                xprint('&' * 100)

touch('completed.spacy_lstmx_100.txt')
xprint('$' * 100)
Exemplo n.º 54
0
        s = self.time_used / 1e6
        m = int(s / 60) % 60
        h = int(s / 3600) % 24
        d = int(s / 86400)
        s = s % 60

        if d == 0:
            content += " Time: %02d:%02d:%02d" % (h, m, s)
        else:
            content += " Time: %d d %02d:%02d:%02d" % (d, h, m, s)

        if cr:
            content = "\r" + content

        sys.stdout.write(content)
        sys.stdout.flush()

    def start(self, arg):
        f = open(self.arg_id(arg) + ".todo", "w")
        content = json.dumps(arg)
        f.write(content)
        f.close()

    def get_result(self):
        return self.result


if __name__ == "__main__":
    touch("test")
do_submission = False
epochs = 8



def get_clf():
    return ClfSpacy(n_hidden=512, max_length=100,  # Shape
                    dropout=0.5, learn_rate=0.001,  # General NN config
                    epochs=epochs, batch_size=300, frozen=True,
                    lstm_type=6, predict_method=PREDICT_METHODS_GOOD[0])


xprint_init(submission_name, do_submission)
xprint('#' * 80)
xprint(get_clf())
set_random_seed(seed=1234)

if do_submission:
    make_submission_reductions(get_clf, submission_name, PREDICT_METHODS_GOOD)
    touch('completed.spacy_lstm20.txt')
else:
    evaluator = Evaluator(n=1)
    ok, auc = evaluator.evaluate_reductions(get_clf, PREDICT_METHODS_GOOD)
xprint('$' * 80)

"""
instance5/spacy_lstm20s.ALL.LINEAR2.csv
Your submission scored 0.9723, which is not an improvement of your best score. Keep trying!

"""
                if len(runs) > n_runs0:
                    xprint('skipping runs=%d n_runs0=%d' % (len(runs), n_runs0))
                    continue

                set_random_seed(random_seed + n_runs0)
                evaluator = Evaluator(n=1)
                ok, auc_reductions, best_method = evaluator.evaluate_reductions(get_clf,
                    PREDICT_METHODS_GOOD)
                assert ok

                for predict_method in sorted(auc_reductions):
                    auc = auc_reductions[predict_method]
                    xprint('<->.' * 25)
                    xprint('predict_method=%s' % predict_method)
                    if predict_method == 'BEST':
                        xprint('best_method=%s' % best_method)
                    assert auc.all() > 0.0, auc

                    auc_list.append((auc, get_clf.__name__, str(get_clf())))
                    show_results(auc_list)

                    runs.append(auc_score_list(auc))
                    completed_tests[str(get_clf())] = runs
                    save_json(run_summary_path, completed_tests)
                    xprint('n_completed=%d = %d + %d' % (len(completed_tests), n_completed0,
                        len(completed_tests) - n_completed0))
                xprint('&' * 100)

touch('completed.spacy_lstm130_flip.txt')
xprint('$' * 100)