Ejemplo n.º 1
0
                       "returned %s" % (str(searchDef)))
  

  # ------------------------------------------------------------------------
  # Figure out the output directory if not provided
  if options.outDir is None:
    options.outDir = os.path.dirname(searchFileName)
      
  
  # ------------------------------------------------------------------------
  # Run through expGenerator
  expGenArgs = ['--description=%s' % (json.dumps(searchDef)),
                '--version=v2',
                '--outDir=%s' % (options.outDir)]
  print "Running ExpGenerator with the following arguments: ", expGenArgs
  expGenerator(expGenArgs)

  
  # Get the permutations file name
  permutationsFilename = os.path.join(options.outDir, 'permutations.py')
  
  print "Successfully generated permutations file: %s" % (permutationsFilename)
  

  
  
  
  
  
  
  
Ejemplo n.º 2
0
  def createProceduralDataset(self, fnText, iterations):
    """ Create a dataset from the text of a function"""


    fnLines = fnText.split('\n')

    filteredText = ['def foo(t):',
                    '\tfields=SamplingDict()'
                    ]

    # Filter out lines with return statements
    for line in fnLines:
      if line.find('return') >= 0:
        continue
      filteredText.append('\t'+line)

    filteredText.append('\treturn fields')

    fnText = '\n'.join(filteredText)
    code = compile(fnText, "<string>", "exec")


    # -----------------------------------------------------------------------
    # Import global modules available to the function
    import random
    import numpy
    import string
    import math
    history = deque([], 20)

    globs={'random': random,
           'numpy':numpy,
           'string':string,
           "math": math,
           'SamplingDict':SamplingDict,
           'history':history
           }
    locs = {}
    eval(code, globs, locs)


    foo = locs['foo']

    dataset = ProceduralDataset(foo, iterations, history)
    self.datasets.append(dataset)
    self.currentDataset = len(self.datasets) - 1

    datasetInfo = self.datasets[self.currentDataset].getDatasetFieldMetaData()
    includedFields = []
    for fieldInfo in datasetInfo:
      includedFields.append({'fieldName': fieldInfo.name,
                             'fieldType':fieldInfo.type})


    expDesc = json.dumps(dict( environment = "grok",
                    inferenceType=InferenceType.TemporalMultiStep,
                    inferenceArgs={"predictedField":datasetInfo[0].name,
                                    "predictionSteps":[1]},
                    includedFields=includedFields,
                    streamDef=self._DUMMY_STREAMDEF,
                   ))

    tempDir = tempfile.mkdtemp()
    expGenerator(["--description=%s"%expDesc, "--outDir=%s"%tempDir, "--version=v2"])

    descFile = os.path.join(tempDir, "description.py")

    f = open(descFile)
    self.descriptionText = f.read()
    f.close()

    self.__getCurrentModelFromDir(tempDir)
    return True