Beispiel #1
0
def expandMotifLayer(motif_layer, motif) :
      expanded_layers = []

      a = motif.name
      a = a.replace('motif', '')
      base_name = motif_layer.name

      old_to_new_name = {}

      #hash the variables
      variables = {}
      skip_me = {}
      for v in motif_layer.motif_layer.variable :
        t = v.split()
        name = t[0]
        if name == 'do_not_use' :
          skip_me[t[1]] = None
        else :  
          if not variables.has_key(name) :
            variables[name] = set()
          variables[t[0]].add(v)

      for layer in motif.layer :
        if skip_me.has_key(layer.name) :
          skip_me[layer.name] = layer

      for x in skip_me.keys() :
        assert(skip_me[x] != None)
  
      #loop over the layers in the motif; turn each one into an actual layer
      #in the model
      for layer in motif.layer :
        if skip_me.has_key(layer.name) :
          print 'not using optional layer:', layer.name
        else :
          new_layer = lbann_pb2.Layer()
          new_layer.CopyFrom(layer)
          print 'constructing layer:', layer.name
  
          #get the variables for this layer, if any
          fake_name = new_layer.name
          vv = None
          if variables.has_key(fake_name) :
            vv = variables[fake_name]
            print '  layer has these variables:', vv
  
          #set a unique name for the layer
          org_name = new_layer.name
          name = base_name + '_' + org_name
          old_to_new_name[org_name] = name
          new_layer.name = name
          if new_layer.parents == "" :
            new_layer.parents = motif_layer.parents
          if new_layer.children == "" :
            new_layer.children = motif_layer.children
  
          #deal with layers that have variables
          if not vv :
            expanded_layers.append(new_layer)
          else :
            string_layer = txtf.MessageToString(new_layer)
            list_layer = string_layer.split('\n')
            for j in range(len(list_layer)) :
              for tuple in vv :
                t = tuple.split()
                field = t[1]
                val = t[2]
                if list_layer[j].find(field) != -1 :
                  h = list_layer[j].rfind(':')
                  assert(h != -1)
                  list_layer[j] = list_layer[j][:h+1] + ' ' + val
                  #assert(list_layer[j].find('-1') != -1)
                  #list_layer[j] = list_layer[j].replace('-1', val)
            string_layer = '\n'.join(list_layer)
            tmp = lbann_pb2.LbannPB()
            tmp = lbann_pb2.Layer()
            txtf.Merge(string_layer, tmp)
            expanded_layers.append(tmp)
      
      #fix parent and child names
      for layer in expanded_layers :
        t = layer.parents.split()
        parents = []
        for p in t :
          if skip_me.has_key(p) :
            n = p
            while skip_me.has_key(n) :
              pp = skip_me[n].parents.split()
              assert(len(pp) == 1)
              n = pp[0]
            assert(old_to_new_name.has_key(n))
            parents.append(old_to_new_name[n])
          elif old_to_new_name.has_key(p) :
            parents.append(old_to_new_name[p])
          else :
            parents.append(p)
        pp = ' '.join(parents)
        layer.parents = pp

        t = layer.children.split()
        children = []
        for p in t :
          if skip_me.has_key(p) :
            n = p
            while skip_me.has_key(n) :
              pp = skip_me[n].children.split()
              assert(len(pp) == 1)
              n = pp[0]
            assert(old_to_new_name.has_key(n))
            parents.append(old_to_new_name[n])
          if old_to_new_name.has_key(p) :
            children.append(old_to_new_name[p])
          else :
            pass
            #print 'xxxxx layer:', layer.name, ' old_to_new_name not found for child:', p
        pp = ' '.join(children)
        layer.children = pp
      return expanded_layers
Beispiel #2
0
def load_prototext(fn) :
  a = lbann_pb2.LbannPB()
  f = open(fn).read()
  print '\n in load_prototext\n'
  pb = txtf.Merge(f, a)
  return pb
Beispiel #3
0
    # Make sure protobuf Python implementation is built
    host = subprocess.check_output('hostname').strip('\n1234567890')
    protoc = lbann_dir + '/build/gnu.' + host + '.llnl.gov/install/bin/protoc'
    proto_python_dir = lbann_dir + '/build/gnu.' + host + '.llnl.gov/protobuf/src/python'
    os.putenv('PROTOC', protoc)
    subprocess.call('cd ' + proto_python_dir + '; ' + sys.executable + ' ' +
                    proto_python_dir + '/setup.py build',
                    shell=True)
    sys.path.append(proto_python_dir)
    import google.protobuf.text_format as txtf

    # Compile LBANN protobuf
    subprocess.call([
        protoc, '-I=' + lbann_proto_dir, '--python_out=' + work_dir,
        lbann_proto_dir + '/lbann.proto'
    ])
    sys.path.append(work_dir)
    global lbann_pb2
    import lbann_pb2

    # Load template prototext
    with open(template_proto, 'r') as f:
        pb = txtf.Merge(f.read(), lbann_pb2.LbannPB())

    # Configure prototext model
    configure_model(pb.model)

    # Export prototext
    with open(output_proto, 'w') as f:
        f.write(txtf.MessageToString(pb))
Beispiel #4
0
def main(argv) :
  global usage
  if len(argv) != 3 :
    print usage
    exit(9)

  compile_lbann_proto()
  p_path = set_python_search_path()
  build_descriptor_pb2(p_path)
  
  pb = load_prototext(argv[1])
  model = pb.model
  if not has_motifs(model) :
    print 'The input prototext file does not contain motifs; the output file'
    print '"' + argv[2] + '" will contain identical information as the input file: "' + argv[1] + '"'
    write_output(pb, argv[2])
    exit(0)
  
  #build table: motif name -> motif
  motif_defs = pb.motif_definitions
  motifs = {}
  for m in motif_defs.motif :
    motifs[m.name] = m
  
  #make copy of prototext, then delete the layers in the copy
  b = lbann_pb2.LbannPB()
  b.CopyFrom(pb)
  del b.model.layer[:]

  #error check
  model_name = model.name
  known_models = set(['dag_model', 'sequential_model'])
  print 'model_name:', model_name
  if model_name not in known_models :
    print 'nothing known about the model named:', model_name
    print 'please update this code, or fix the prototext file'
    print 'known models:',
    for m in known_models : 
      print m
    exit(9)

  is_sequential = False
  if model_name == 'sequential_model' :
    is_sequential = True

  #fix the names for layers that are not motif_layers, but whose
  #parents and/or children are motif_layers
  fixNames(motifs, model)

  #loop over the layers in the input prototext; expand the motif layers
  for layer in model.layer :
    if not layer.HasField("motif_layer") :
      b.model.layer.extend([layer])
    else :
      #get the requested motif
      id = layer.motif_layer.motif_id
      assert(motifs.has_key(id))
      motif = motifs[id]
      expanded_motif_layers = expandMotifLayer(layer, motif)
      for x in expanded_motif_layers :
        b.model.layer.extend([x])

  b.motif_definitions.Clear()
  print 'calling write_output'
  write_output(b, argv[2])
Beispiel #5
0
import math, os, sys
import lbann_pb2
import google.protobuf.text_format as txtf
pb = lbann_pb2.LbannPB()

# These two globals used to handle implicit activations in keras layers
prev_layer = ''
activations = {'relu' : 0, 'sigmoid' : 0,'softmax' : 0,  'tanh' : 0}
# This is a list of keras layers which do not exist as single layers in lbann, but can be constructed using multiple.
complex_layers = ['LSTM']

# This is the main driving function. Setups model parameters passed to it, and uses the keras model object to build the protobuf model
def keras_to_lbann(model, num_classes,
        model_type='directed_acyclic_graph_model', data_layout="data_parallel",
        block_size=256, epochs=20,
        batch_size=64, num_parallel_readers=0,
        procs_per_model=0, callbacks=['timer','print'], target='target'):
    # set user passed parameters (currently set once for entire model
    pb.model.type = model_type
    pb.model.data_layout = data_layout
    pb.model.mini_batch_size = batch_size
    pb.model.block_size = block_size
    pb.model.num_epochs = epochs
    pb.model.num_parallel_readers = num_parallel_readers
    pb.model.procs_per_model = procs_per_model

    if model.layers[0].name != 'input_1':
        l = pb.model.layer.add()
        l.name = model.input_names[0]
        exec('l.input.SetInParent()')
        l.input.io_buffer = "partitioned"