def run(rule_number): automaton = automatatron.Engine(rule_number) def stream_handler(row, _): print automatatron.default_string_formatter(row) time.sleep(0.05) automaton.run(handler=stream_handler, width=101)
def create_swarm_input(rule_number): this_dir = os.path.dirname(os.path.realpath(__file__)) data_dir = os.path.join(this_dir, DATA_DIR) swarm_input_file = os.path.join(data_dir, SWARM_INPUT_FILENAME % rule_number) swarm_desc_file = os.path.join(data_dir, SWARM_DESC_FILENAME) with open(swarm_desc_file, "r") as swarm_desc_tmpl: swarm_desc = swarm_desc_tmpl.read() incl_fields = [] midpoint = BITS / 2 for i in xrange(BITS): incl_fields.append(dict( fieldName="bit_%i" % i, fieldType="string" )) swarm_desc = swarm_desc.replace("<INCLUDED_FIELDS>", str(incl_fields)) swarm_desc = swarm_desc.replace("<PREDICTED_FIELD>", "bit_%i" % midpoint) swarm_desc = swarm_desc.replace("<RULE_NUMBER>", str(rule_number)) swarm_desc = swarm_desc.replace("<SOURCE_FILE>", swarm_input_file) swarm_desc_out = os.path.join(data_dir, "swarm_description_%s.py" % rule_number) print "Creating swarm description at %s..." % swarm_desc_out with open(swarm_desc_out, "w") as swarm_desc_out: swarm_desc_out.write(swarm_desc) print "Creating swarm input file at %s..." % swarm_input_file with open(swarm_input_file, "w") as input_file: writer = csv.writer(input_file) names = [] types = [] flags = [] for i in xrange(BITS): names.append("bit_%i" % i) types.append("string") flags.append("") writer.writerow(names) writer.writerow(types) writer.writerow(flags) automaton = automatatron.Engine(int(rule_number)) def stream_handler(row, _): writer.writerow(row) automaton.run(iterations=BITS) automaton.run(handler=stream_handler, width=BITS, iterations=3000)
def test_rule30_prediction_is_perfect_after_600_iterations(self): """ Generates Rule 30 elementary cellular automaton and passes it through NuPIC. Asserts that predictions are perfect after X rows of data. """ iterations = 600 model = ModelFactory.create(rule_30_model_params.MODEL_PARAMS) model.enableInference({"predictedField": PREDICTED_FIELD}) prediction_history = deque(maxlen=500) counter = [0] last_prediction = [None] def stream_handler(row, _): counter[0] += 1 input_row = {} for index, field in enumerate(row): input_row["bit_%i" % index] = str(field) prediction = last_prediction[0] predicted_index = int(PREDICTED_FIELD.split("_").pop()) value = str(row[predicted_index]) correct = (value == prediction) count = counter[0] if correct: prediction_history.append(1.0) else: prediction_history.append(0.0) correctness = reduce(lambda x, y: x + y, prediction_history) / len(prediction_history) if count == iterations: unittest.TestCase.assertEqual( self, 1.0, correctness, "Predictions should be 100 percent correct after reaching %i " "iterations." % iterations) result = model.run(input_row) prediction = result.inferences["multiStepBestPredictions"][1] last_prediction[0] = prediction automaton = automatatron.Engine(RULE_NUMBER) automaton.run(handler=stream_handler, width=21, iterations=iterations)
def run_io_through_nupic(model, rule_number): prediction_history = deque(maxlen=500) counter = [0] last_prediction = [None] def stream_handler(row, _): counter[0] += 1 input_row = {} for index, field in enumerate(row): input_row["bit_%i" % index] = str(field) # Show this input row compared with the last prediction print_current_row_with_last_prediction( row, last_prediction[0], int(PREDICTED_FIELD.split("_").pop()), prediction_history) result = model.run(input_row) prediction = result.inferences["multiStepBestPredictions"][1] last_prediction[0] = prediction automaton = automatatron.Engine(rule_number) automaton.run(iterations=21) automaton.run(handler=stream_handler, width=21)
incl_fields = [] midpoint = BITS / 2 for i in xrange(BITS): incl_fields.append(dict(fieldName="bit_%i" % i, fieldType="string")) swarm_desc = swarm_desc.replace("<INCLUDED_FIELDS>", str(incl_fields)) swarm_desc = swarm_desc.replace("<PREDICTED_FIELD>", "bit_%i" % midpoint) with open("swarm_description.py", "w") as swarm_desc_out: swarm_desc_out.write(swarm_desc) with open("swarm_input.csv", "w") as input_file: writer = csv.writer(input_file) names = [] types = [] flags = [] for i in xrange(BITS): names.append("bit_%i" % i) types.append("string") flags.append("") writer.writerow(names) writer.writerow(types) writer.writerow(flags) automaton = automatatron.Engine(30) def stream_handler(row, _): writer.writerow(row) automaton.run(iterations=BITS) automaton.run(handler=stream_handler, width=BITS, iterations=3000)
# # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import automatatron import time print "EXAMPLE 1:" print "Print the first 10 rows of all possible automaton:" for rule in range(1, 257): automaton = automatatron.Engine(rule) automaton.run(iterations=10) print automaton print "EXAMPLE 2:" print "Print the first 50 rows of Rule 30" automaton = automatatron.Engine(30) automaton.run(iterations=50) print automaton print "EXAMPLE 3:" print "Run the next 10 iterations, and pass results into specified handler." def row_handler(row, _): print row