示例#1
0
def test_LogicalFile(tmpdir):
    prefix = os.path.join(tmpdir, 'log')
    L = util.LogicalFile(prefix, 2, 6)
    with pytest.raises(FileNotFoundError):
        L.open_file(0, create=False)

    # Check L.open creates a file
    with L.open_file(8, create=True) as f:
        pass
    with util.open_file(prefix + '01') as f:
        pass

    L.write(0, b'987')
    assert L.read(0, -1) == b'987'
    assert L.read(0, 4) == b'987'
    assert L.read(1, 1) == b'8'

    L.write(0, b'01234567890')
    assert L.read(0, -1) == b'01234567890'
    assert L.read(5, -1) == b'567890'
    with util.open_file(prefix + '01') as f:
        assert f.read(-1) == b'67890'

    # Test file boundary
    L.write(0, b'957' * 6)
    assert L.read(0, -1) == b'957' * 6
示例#2
0
文件: fabfile.py 项目: andresvia/kfc
def xlsx_report(header='1', sort='1', report_path=None,
                launch='1'):
    if report_path is None:
        report_path = tempfile.mktemp('.xlsx')
    workbook = xlsxwriter.Workbook(report_path)
    worksheet = workbook.add_worksheet()
    col = 0
    row = 0
    bold = workbook.add_format({'bold': True})
    if sort == '1':
        report.sort()
    if header == '1':
        for column_title in report.header:
            worksheet.write(row, col, column_title, bold)
            col += 1
        col = 0
        row += 1
    for report_row in report:
        for report_cell in report_row:
            worksheet.write(row, col, report_cell)
            col += 1
        col = 0
        row += 1
    workbook.close()
    if (launch == '1'):
        util.open_file(report_path)
    else:
        print("%s generated" % report_path)
    serverinfo.server_info_cache.save()
示例#3
0
文件: fabfile.py 项目: andresvia/kfc
def save_report(header='1', sort='1', separator='\t', vseparator='\n',
                report_path=None, launch='0'):
    if report_path is None:
        report_path = tempfile.mktemp('.tsv')
    codecs.open(report_path,
                mode='w',
                encoding='utf-8').write(gen_report(header,
                                                   sort,
                                                   separator,
                                                   vseparator))
    if (launch == '1'):
        util.open_file(report_path)
    else:
        print("%s generated" % report_path)
    serverinfo.server_info_cache.save()
示例#4
0
def test_open_fns(tmpdir):
    tmpfile = os.path.join(tmpdir, 'file1')
    with pytest.raises(FileNotFoundError):
        util.open_file(tmpfile)
    with util.open_file(tmpfile, create=True) as f:
        f.write(b'56')
    with util.open_file(tmpfile) as f:
        assert f.read(3) == b'56'

    # Test open_truncate truncates and creates
    with util.open_truncate(tmpfile) as f:
        assert f.read(3) == b''
    tmpfile = os.path.join(tmpdir, 'file2')
    with util.open_truncate(tmpfile) as f:
        assert f.read(3) == b''
示例#5
0
    def open_dbs(self):
        '''Open the databases.  If already open they are closed and re-opened.

        When syncing we want to reserve a lot of open files for the
        synchtonization.  When serving clients we want the open files for
        serving network connections.
        '''
        def log_reason(message, is_for_sync):
            reason = 'sync' if is_for_sync else 'serving'
            self.logger.info('{} for {}'.format(message, reason))

        # Assume we're serving until we find out otherwise
        for for_sync in [False, True]:
            if self.utxo_db:
                if self.utxo_db.for_sync == for_sync:
                    return
                log_reason('closing DB to re-open', for_sync)
                self.utxo_db.close()
                self.hist_db.close()
                self.eventlog_db.close()
                self.hashY_db.close()

            # Open DB and metadata files.  Record some of its state.
            self.utxo_db = self.db_class('utxo', for_sync)
            self.hist_db = self.db_class('hist', for_sync)
            self.eventlog_db = self.db_class('eventlog', for_sync)
            self.hashY_db = self.db_class('hashY', for_sync)

            if self.utxo_db.is_new:
                self.logger.info('created new database')
                self.logger.info('creating metadata directory')
                os.mkdir('meta')
                with util.open_file('COIN', create=True) as f:
                    f.write(
                        'ElectrumX databases and metadata for {} {}'.format(
                            self.coin.NAME, self.coin.NET).encode())
            else:
                log_reason('opened DB', self.utxo_db.for_sync)

            self.read_utxo_state()
            if self.first_sync == self.utxo_db.for_sync:
                break

        self.read_history_state()
        self.read_eventlog_state()

        self.logger.info('DB version: {:d}'.format(self.db_version))
        self.logger.info('coin: {}'.format(self.coin.NAME))
        self.logger.info('network: {}'.format(self.coin.NET))
        self.logger.info('height: {:,d}'.format(self.db_height))
        self.logger.info('tip: {}'.format(hash_to_str(self.db_tip)))
        self.logger.info('tx count: {:,d}'.format(self.db_tx_count))
        self.logger.info('flush count: {:,d}'.format(self.flush_count))
        self.logger.info('eventlog flush count: {:,d}'.format(
            self.eventlog_flush_count))
        if self.first_sync:
            self.logger.info('sync time so far: {}'.format(
                util.formatted_time(self.wall_time)))
示例#6
0
文件: peers.py 项目: xHire/electrumx
 def read_peers_file(self):
     try:
         with util.open_file(PEERS_FILE, create=True) as f:
             data = f.read(-1).decode()
     except Exception as e:
         self.logger.error('error reading peers file {}'.format(e))
     else:
         if data:
             version, items = ast.literal_eval(data)
             if version == 1:
                 peers = [Peer.deserialize(item) for item in items]
                 self.add_peers(peers, source='peers file', limit=None)
示例#7
0
 def read_peers_file(self):
     try:
         with util.open_file(PEERS_FILE, create=True) as f:
             data = f.read(-1).decode()
     except Exception as e:
         self.logger.error('error reading peers file {}'.format(e))
     else:
         if data:
             version, items = ast.literal_eval(data)
             if version == 1:
                 peers = []
                 for item in items:
                     if 'last_connect' in item:
                         item['last_good'] = item.pop('last_connect')
                     try:
                         peers.append(Peer.deserialize(item))
                     except Exception:
                         pass
                 self.add_peers(peers, source='peers file', limit=None)
示例#8
0
def load_one_file(args, cxn, file_name, ends, seq_end_clamp=''):
    """Load sequences from a fasta/fastq file into the atram database."""
    log.info('Loading "{}" into sqlite database'.format(file_name))

    parser = get_parser(args, file_name)

    with util.open_file(args, file_name) as sra_file:
        batch = []

        for rec in parser(sra_file):
            title = rec[0].strip()
            seq = rec[1]
            seq_name, seq_end = blast.parse_fasta_title(
                title, ends, seq_end_clamp)

            batch.append((seq_name, seq_end, seq))

            if len(batch) >= db.BATCH_SIZE:
                db.insert_sequences_batch(cxn, batch)
                batch = []

        db.insert_sequences_batch(cxn, batch)
示例#9
0
def load_one_file(args, cxn, file_name, ends, seq_end_clamp=''):
    """Load sequences from a fasta/fastq file into the atram database."""
    log.info('Loading "{}" into sqlite database'.format(file_name))

    parser = get_parser(args, file_name)

    with util.open_file(args, file_name) as sra_file:
        batch = []

        for rec in parser(sra_file):
            title = rec[0].strip()
            seq = rec[1]
            seq_name, seq_end = blast.parse_fasta_title(
                title, ends, seq_end_clamp)

            batch.append((seq_name, seq_end, seq))

            if len(batch) >= db.BATCH_SIZE:
                db.insert_sequences_batch(cxn, batch)
                batch = []

        db.insert_sequences_batch(cxn, batch)
示例#10
0
 def OnOpenFile(self, event):
     obj = self.myOlv.GetSelectedObject()
     open_file(obj.get_media_path())
示例#11
0
 def OnOpenFile(self, event):
     obj = self.myOlv.GetSelectedObject()
     open_file(obj.get_media_path())