def _make_index_hashsum(self, total_files): return md5_hash('+'.join([ '+'.join(self._extension.paths), str(self._extension.skip_tests), sys.version, str(DB_VERSION), str(total_files) ]))
def commit(self, total_files=None): if not self._writer: raise Exception('Writer is empty') self._writer.commit() self._writer = None if total_files is not None: db_checksum = md5_hash('%s+%s+%s+%s' % ('+'.join( self._extension.paths), sys.version, DB_VERSION, total_files)) self._write_checksum(db_checksum)
def flush(self, ): if self._client is not None: self._client.add_transaction(self._transaction.as_dict()) # rows are sharded by cliend_id shard_idx = md5_hash(self._client.client_id()) % self.n_shards data = self._client.as_dict() self.outs[shard_idx].write(json.dumps(data) + "\n") self._client = None self._transaction = None
def __init__(self, extension, workspace_name): self._extension = extension self._workspace_hash_name = md5_hash(workspace_name)[:8] self._report_listener = None self._last_report_time = 0 self._total_items = 0 self._writer = None # Create target temp path data_path = self._get_path() if not path.exists(data_path): mkdir(data_path)
def __init__(self, extension, workspace_name): self._extension = extension self._workspace_hash_name = md5_hash(workspace_name)[:8] self._report_listener = None self._last_report_time = 0 self._total_items = 0 self._writer = None # Create target temp path data_path = self._get_path() try: makedirs(data_path) except OSError as e: pass
def open(self): # Quickly count files in project (include system files) idx = QuickIndexer(self._extension.paths, self._extension.skip_tests) idx.build() db_checksum = md5_hash('%s+%s+%s+%s' % ('+'.join( self._extension.paths), sys.version, DB_VERSION, idx.total_files)) if self._read_checksum() != db_checksum: return # Trying to open try: self._open_index() except Exception: return return True