Beispiel #1
0
class _SpecPluginTestCase(PluginTester, unittest.TestCase):
    activate = '--with-spec'
    args = ['--no-spec-color']
    plugins = [Spec()]

    def _get_suitepath(self):
        return '_spec_test_cases/%s.py' % self.suitename

    suitepath = property(_get_suitepath)

    def assertContains(self, needle, haystack):
        assert needle in haystack,\
            "Failed to find:\n\n%s\ninside\n%s\n" % \
                (_prepend_in_each_line(needle), _prepend_in_each_line(haystack))

    def assertContainsInOutput(self, string):
        self.assertContains(string, six.text_type(self.output))

    def failIfContains(self, needle, haystack):
        assert needle not in haystack,\
            "Found:\n\n%s\ninside\n%s\n" % \
                (_prepend_in_each_line(needle), _prepend_in_each_line(haystack))

    def failIfContainsInOutput(self, string):
        self.failIfContains(string, six.text_type(self.output))
Beispiel #2
0
class TestPluginSpecWithFoobazAndStandardPluginsEnabled(
        TestPluginSpecWithFoobaz):
    plugins = [
        Spec(),
        nose.plugins.skip.Skip(),
        nose.plugins.deprecated.Deprecated()
    ]
Beispiel #3
0
def train(args):
    check_present(
        args,
        ["train_corpus", "output_folder", "dev_corpus", "train_shuffle_seed"])

    train_corpus_path = args.train_corpus
    if args.train_shuffle_seed > 0:
        reader = sling.RecordReader(args.train_corpus)
        items = [(key, value) for key, value in reader]
        reader.close()
        r = random.Random(args.train_shuffle_seed)
        r.shuffle(items)
        train_corpus_path = os.path.join(args.output_folder,
                                         "train_shuffled.rec")
        writer = sling.RecordWriter(train_corpus_path)
        for key, value in items:
            writer.write(key, value)
        writer.close()
        print("Wrote shuffled train corpus to %s using seed %d" % \
              (train_corpus_path, args.train_shuffle_seed))

    # Setting an explicit seed for the sake of determinism.
    torch.manual_seed(1)

    # Make commons store if needed.
    if args.commons == '' or not os.path.exists(args.commons):
        if args.commons == '':
            fname = os.path.join(args.output_folder, "commons")
            print("Will create a commons store at", fname)
            args.commons = fname
        else:
            print("No commons found at", args.commons, ", creating it...")
        _, symbols = commons_builder.build(
            [train_corpus_path, args.dev_corpus], args.commons)
        print("Commons created at", args.commons, "with", len(symbols), \
            "symbols besides the usual ones.")

    # Make the training spec.
    spec = Spec()
    spec.build(args.commons, train_corpus_path)

    # Initialize the model with the spec and any word embeddings.
    caspar = Caspar(spec)
    embeddings_file = args.word_embeddings
    if embeddings_file == '': embeddings_file = None
    caspar.initialize(embeddings_file)

    tmp_folder = os.path.join(args.output_folder, "tmp")
    if not os.path.exists(tmp_folder):
        os.makedirs(tmp_folder)

    evaluator = partial(dev_accuracy, args.dev_corpus, tmp_folder)

    output_file_prefix = os.path.join(args.output_folder, "caspar")
    hyperparams = Hyperparams(args)
    print("Using hyperparameters:", hyperparams)

    trainer = Trainer(caspar, hyperparams, evaluator, output_file_prefix)
    train = Corpora(train_corpus_path, spec.commons, gold=True)
    trainer.train(train)
Beispiel #4
0
class TestPluginSpecWithDoctestsButDisabled(_SpecPluginTestCase):
    activate = '--with-spec'

    # no --spec-doctests option
    args = ['--with-doctest', '--doctest-tests', '--no-spec-color']
    plugins = [Spec(), nose.plugins.doctests.Doctest()]
    suitename = 'doctests'

    def test_doesnt_build_specifications_for_doctests_when_spec_doctests_option_wasnt_set(
            self):
        self.failIfContainsInOutput("test_doctests")
        self.failIfContainsInOutput("2 + 3 returns 5")
Beispiel #5
0
class TestPluginSpecWithDoctests(_SpecPluginTestCase):
    activate = '--with-spec'
    args = [
        '--with-doctest', '--doctest-tests', '--spec-doctests',
        '--no-spec-color'
    ]
    plugins = [Spec(), nose.plugins.doctests.Doctest()]

    suitename = 'doctests'
    expected_test_doctests_output = """doctests
- 2 + 3 returns 5
- None is nothing
- foobar throws "NameError: name 'foobar' is not defined"
"""

    def test_builds_specifications_for_doctests(self):
        self.assertContainsInOutput(self.expected_test_doctests_output)
Beispiel #6
0
    def generateVideo(self, scriptFile):
        with open(scriptFile) as t:
            self.rootSpec = Spec(yaml.safe_load(t), None)

        rootSpec = self.rootSpec
        self.framerate = rootSpec.get('framerate', 30)
        self.frameWidth = rootSpec.get('framewidth', 1440)
        self.frameHeight = rootSpec.get('frameheight', 1080)
        self.outputFrames = rootSpec.get('outputframes')
        self.limitFrames = rootSpec.get('limitframes')
        random.seed(rootSpec.get('randomseed'))

        outputFile = rootSpec.get('outputfile', 'video.mp4')
        videoOut = outputFile + '.temp.mp4'

        # Initialize data structures
        self.imageSpecQueue = queue.Queue()
        self.imageFrameQueue = queue.Queue()
        self.resultQueue = queue.Queue()
        self.prevSpec = None
        self.allImageSpecsInitialized = False

        # Prepare data structures for processing
        images = rootSpec.get('images', [])
        self.prepareImageSpecs(images, rootSpec)

        # Start one thread to initialize image specs
        threading.Thread(target=self.runnableInitImageSpecs).start()
        
        # Start processing image specs by launching worker threads
        self.globalFrameN = 0
        for _ in range(self.rootSpec.get('threads', 16)):
            threading.Thread(target=self.runnableProcessFrame).start()

        # In the current thread, wait for and write the results
        self.writer = imageio.get_writer(videoOut, 
            fps=self.framerate,
            macro_block_size=8)
        self.processResults()
        self.writer.close()

        # Join audio
        audioSpec = rootSpec.getSpec('audio')
        if not audioSpec is None:
            self.combineVideoWithAudio(audioSpec, videoOut, outputFile)
Beispiel #7
0
def train(args):
  check_present(args, ["train_corpus", "output_folder", "dev_corpus"])

  # Setting an explicit seed for the sake of determinism.
  torch.manual_seed(1)

  # Make commons store if needed.
  if args.commons == '' or not os.path.exists(args.commons):
    if args.commons == '':
      fname = os.path.join(args.output_folder, "commons")
      print "Will create a commons store at", fname
      args.commons = fname
    else:
      print "No commons found at", args.commons, ", creating it..."
    _, symbols = commons_builder.build(
      [args.train_corpus, args.dev_corpus], args.commons)
    print "Commons created at", args.commons, "with", len(symbols), \
      "symbols besides the usual ones."

  # Make the training spec.
  spec = Spec()
  spec.build(args.commons, args.train_corpus)

  # Initialize the model with the spec and any word embeddings.
  caspar = Caspar(spec)
  embeddings_file = args.word_embeddings
  if embeddings_file == '': embeddings_file = None
  caspar.initialize(embeddings_file)

  tmp_folder = os.path.join(args.output_folder, "tmp")
  if not os.path.exists(tmp_folder):
    os.makedirs(tmp_folder)

  evaluator = partial(dev_accuracy,
                      args.commons,
                      args.dev_corpus,
                      tmp_folder)

  output_file_prefix = os.path.join(args.output_folder, "caspar")
  hyperparams = Hyperparams(args)
  print "Using hyperparameters:", hyperparams

  trainer = Trainer(caspar, hyperparams, evaluator, output_file_prefix)
  train = Corpora(args.train_corpus, spec.commons, gold=True)
  trainer.train(train)
Beispiel #8
0
def run(args):
    check_present(args, ["input", "parser", "output"])
    assert os.path.exists(args.input), args.input
    assert os.path.exists(args.parser), args.parser

    # Read parser flow.
    flow = Flow()
    flow.load(args.parser)

    # Initialize the spec from the flow.
    spec = Spec()
    spec.from_flow(flow)

    # Initialize the model from the flow.
    caspar = Caspar(spec)
    caspar.from_flow(flow)

    corpus = Corpora(args.input, caspar.spec.commons)
    writer = sling.RecordWriter(args.output)
    count = 0
    for document in corpus:
        state, _, _, trace = caspar.forward(document,
                                            train=False,
                                            debug=args.trace)
        state.write()
        if trace:
            trace.write()
        writer.write(str(count), state.encoded())
        count += 1
        if count % 100 == 0:
            print "Annotated", count, "documents", now(), mem()
    writer.close()
    print "Annotated", count, "documents", now(), mem()
    print "Wrote annotated documents to", args.output

    if args.evaluate:
        f = tempfile.NamedTemporaryFile(delete=False)
        fname = f.name
        caspar.spec.commons.save(fname, binary=True)
        f.close()
        eval_result = frame_evaluation(gold_corpus_path=args.input, \
            test_corpus_path=args.output, commons=caspar.spec.commons)
        os.unlink(fname)
        return eval_result
 def __init__(self):
     self.easy = int(input("Enter the marks for easy questions > "))
     self.medium = int(input("Enter the marks for medium questions > "))
     self.hard = int(input("Enter the marks for hard questions > "))
     self.entry = Spec(self.easy, self.medium, self.hard)
     self.store = QuestionStore()
Beispiel #10
0
machines = [Machine(1), Machine(2)]

# setup times dalla tabella 2 dell'articolo
setup_times = {
    "length": 60,
    "width": 15,
    "thickness": 10,
    "hardness": 20,
    "colour": 15,
}

# creazione delle specs per ogni job, dalla tabelle 2 dell'articolo
specs_list = {
    1: [
        Spec("length", 240, setup_times["length"]),
        Spec("width", 12, setup_times["width"]),
        Spec("thickness", 1.5, setup_times["thickness"]),
        Spec("hardness", 7, setup_times["hardness"]),
        Spec("colour", 1, setup_times["colour"])
    ],
    2: [
        Spec("length", 96, setup_times["length"]),
        Spec("width", 36, setup_times["width"]),
        Spec("thickness", 5.0, setup_times["thickness"]),
        Spec("hardness", 8, setup_times["hardness"]),
        Spec("colour", 1, setup_times["colour"])
    ],
    3: [
        Spec("length", 96, setup_times["length"]),
        Spec("width", 24, setup_times["width"]),