def get_prover_options(prover_round_tag='manual', prover_round=-1) -> deephol_pb2.ProverOptions: """Returns a ProverOptions proto based on FLAGS.""" if not FLAGS.prover_options: tf.logging.fatal('Mandatory flag --prover_options is not specified.') if not tf.gfile.Exists(FLAGS.prover_options): tf.logging.fatal('Required prover options file "%s" does not exist.', FLAGS.prover_options) prover_options = deephol_pb2.ProverOptions() if FLAGS.max_theorem_parameters is not None: tf.logging.warning( 'Overring max_theorem_parameters in prover options to %d.', FLAGS.max_theorem_parameters) prover_options.action_generator_options.max_theorem_parameters = ( FLAGS.max_theorem_parameters) with tf.gfile.Open(FLAGS.prover_options) as f: text_format.MergeLines(f, prover_options) if prover_options.builtin_library: tf.logging.warning('builtin_library is deprecated. Do not provide.') if str(prover_options.builtin_library) not in ['core']: tf.logging.fatal('Unsupported built in library: %s', prover_options.builtin_library) if FLAGS.timeout_seconds is not None: prover_options.timeout_seconds = FLAGS.timeout_seconds if not FLAGS.output: tf.logging.fatal('Missing flag --output [recordio_pattern]') prover_options.prover_round = deephol_pb2.ProverRound( start_seconds=int(round(time.time())), tag=prover_round_tag, round=prover_round) _verify_prover_options(prover_options) # Log prover options. tf.logging.info('Using prover_options:\n %s', str(prover_options)) return prover_options
def _run_inlining(self, file_paths, tf_policy_path, size_only): """Run inlining for the given IR file under the given policy. Args: file_paths: path to files needed for inlining, Tuple of (.bc, .cmd). tf_policy_path: path to TF policy direcoty on local disk. size_only: whether only return native size. Returns: A tuple containing: sequence_example: A tf.SequenceExample proto describing inlining trace. native_size: Native size of the final native code. Raises: subprocess.CalledProcessError if process fails. """ working_dir = tempfile.mkdtemp() log_path = os.path.join(working_dir, 'log') output_native_path = os.path.join(working_dir, 'native') input_ir_path, cmd_path = file_paths with open(cmd_path) as f: cmds = f.read().split('\0') try: command_line = [self._clang_path] + cmds + [ '-mllvm', '-enable-ml-inliner=development', input_ir_path, '-mllvm', '-training-log=' + log_path, '-o', output_native_path ] if tf_policy_path: command_line.extend([ '-mllvm', '-ml-inliner-model-under-training=' + tf_policy_path ]) subprocess.check_call(command_line) command_line = [self._llvm_size_path, output_native_path] output = subprocess.check_output(command_line).decode('utf-8') tmp = output.split('\n') if len(tmp) != 3: raise RuntimeError('Wrong llvm-size output %s' % output) tmp = tmp[1].split('\t') native_size = int(tmp[0]) if size_only: tf.io.gfile.rmtree(working_dir) return None, native_size with io.open(log_path, 'r') as f: sequence_example = text_format.MergeLines( f, tf.train.SequenceExample()) tf.io.gfile.rmtree(working_dir) except (subprocess.CalledProcessError, tf.errors.OpError) as e: raise e return sequence_example, native_size
def testMergeLinesGolden(self): opened = self.ReadGolden('text_format_unittest_data.txt') parsed_message = unittest_pb2.TestAllTypes() r = text_format.MergeLines(opened, parsed_message) self.assertIs(r, parsed_message) message = unittest_pb2.TestAllTypes() test_util.SetAllFields(message) self.assertEqual(message, parsed_message)
def load_theorem_database_from_file( filename: Text) -> proof_assistant_pb2.TheoremDatabase: """Load a theorem database from a text protobuf file.""" theorem_database = proof_assistant_pb2.TheoremDatabase() with tf.gfile.Open(filename) as f: text_format.MergeLines(f, theorem_database) tf.logging.info( 'Successfully read theorem database from %s (%d theorems).', filename, len(theorem_database.theorems)) return theorem_database
def load_text_proto(filename): """Load a protobuf from a text format file. Args: filename: Name of the file to be read. proto_constructor: The constructor method for the proto object. description: Optional string describing the content of the proto file. Returns: A protobuf parsed from the text file. """ proto = deephol_pb2.ProofLog() with tf.gfile.Open(filename) as f: text_format.MergeLines(f, proto) return proto
def load_theorem_database_from_file( filename: Text) -> proof_assistant_pb2.TheoremDatabase: """Load a theorem database from a text protobuf file.""" theorem_database = proof_assistant_pb2.TheoremDatabase() if filename.endswith('.recordio'): theorem_database = [ x for x in recordio_util.read_protos_from_recordio( filename, proof_assistant_pb2.TheoremDatabase) ] theorem_database = theorem_database[0] else: with tf.gfile.Open(filename) as f: text_format.MergeLines(f, theorem_database) tf.logging.info( 'Successfully read theorem database from %s (%d theorems).', filename, len(theorem_database.theorems)) return theorem_database
def load_text_proto(filename: Text, proto_constructor, description: Optional[Text] = None): """Load a protobuf from a text format file. Args: filename: Name of the file to be read. proto_constructor: The constructor method for the proto object. description: Optional string describing the content of the proto file. Returns: A protobuf parsed from the text file. """ proto = proto_constructor() with tf.gfile.Open(filename) as f: text_format.MergeLines(f, proto) if description: tf.logging.info('Successfully read %s from "%s"', description, filename) return proto
def from_pbtxt_file(spec_path): """Loads a spec encoded as a struct_pb2.StructuredValue from a pbtxt file.""" spec_proto = struct_pb2.StructuredValue() with tf.io.gfile.GFile(spec_path, "rb") as f: text_format.MergeLines(f, spec_proto) return from_proto(spec_proto)
def load_tactics(filename) -> List[deephol_pb2.Tactic]: tactics_info = deephol_pb2.TacticsInfo() with tf.gfile.GFile(filename, 'r') as f: text_format.MergeLines(f, tactics_info) return tactics_info.tactics