def test_flip(self): x = torch.rand(6, 3, 9, 3, 2) x = util.flip(x) f = x.view(-1, 2) self.assertEqual((f[:, 0] < f[:, 1]).sum(), 0) self.assertEqual(len(x.size()), 5) x = torch.randint(10, size=(3, 128, 2)) x = util.flip(x) f = x.view(-1, 2) self.assertEqual((f[:, 0] < f[:, 1]).sum(), 0) self.assertEqual(len(x.size()), 3)
def load_network(configs): edges_file = open( configs['network_file'], 'r' ) #note: with nx.Graph (undirected), there are 2951 edges, with nx.DiGraph (directed), there are 3272 edges M = nx.DiGraph() next(edges_file) #ignore the first line for e in edges_file: interaction = e.split() assert len(interaction) >= 2 source, target = str(interaction[0]).strip().replace("'", '').replace( '(', '').replace(')', ''), str(interaction[1]).strip().replace( "'", '').replace('(', '').replace(')', '') if (len(interaction) > 2): if (str(interaction[2]) == '+'): Ijk = 1 elif (str(interaction[2]) == '-'): Ijk = -1 else: print("Error: bad interaction sign in file " + str(edges_file) + "\nExiting...") sys.exit() else: Ijk = util.flip() M.add_edge(source, target, sign=Ijk) return M
def randomTerm( self ): player = None if util.flip( ): player = srctype.OPPONENT else: player = srctype.PLAYER #-1 here since this will reference our list blindly term = (player,random.randint(0,self.agent.mem-1),) return term
def act(self, policy, verbose=False): if util.flip(self.epsilon): return random.randint(0, self.action_size) [qDists] = policy if verbose: print("qDists", qDists) samples = [random.normal(mean, std) for mean, std in qDists] return argmax(samples)
def sample_tftd(): n_vars = np.random.randint(10000) n_clauses = np.random.randint(100000) n_cells = np.random.randint(1000000) return TFTD(dp_id=np.random.randint(10000), is_train=util.flip(0.5), n_vars=n_vars, n_clauses=n_clauses, CL_idxs=np.random.randint(n_clauses, size=(n_cells, 2), dtype=np.int32), core_var_mask=(np.random.randint( 2, size=(n_vars), dtype=np.int32) < 1), core_clause_mask=(np.random.randint( 2, size=(n_clauses), dtype=np.int32) < 1))
def submit_share(self, job_id, worker_name, session, extranonce1_bin, data, difficulty): job = self.get_job(job_id) if job == None: raise SubmitException("Job '%s' not found" % job_id) ntime = util.flip(data[136:144]) if not job.check_ntime(int(ntime, 16)): raise SubmitException("Ntime out of range") if not job.register_submit(data): log.info("Duplicate from %s, (%s %s)" % \ (worker_name, binascii.hexlify(extranonce1_bin), data)) raise SubmitException("Duplicate share") hash_int = gapcoin_hash.getpowdiff(str(data)) block_hash_bin = util.doublesha(binascii.unhexlify(data[0:168])) block_hash_hex = util.rev(binascii.hexlify(block_hash_bin)) '''log.info("block_hash_hex %s" % block_hash_hex) log.info("shrint %s" % hash_int) log.info("jobint %s" % job.target)%f log.info("target %s" % difficulty)''' if hash_int < difficulty: raise SubmitException("Share less than target") share_diff = float(float(hash_int) / float(pow(2, 48))) if hash_int >= job.target: log.info("BLOCK CANDIDATE! %s" % block_hash_hex) extranonce2_bin = struct.pack('>L', 0) #self.last_block.vtx[0].set_extranonce(extranonce1_bin + extranonce2_bin) #txs = binascii.hexlify(util.ser_vector(self.last_block.vtx)) job.vtx[0].set_extranonce(extranonce1_bin + extranonce2_bin) txs = binascii.hexlify(util.ser_vector(job.vtx)) serialized = str(data) + str(txs) on_submit = self.bitcoin_rpc.submitblock(str(data), str(txs), block_hash_hex) if on_submit: self.update_block() return (block_hash_hex, share_diff, on_submit) return (block_hash_hex, share_diff, None)
def breed( self, p1, p2 ): unlitsq = self.graph.sqgt[gt.UNLIT].copy( ) random.shuffle( unlitsq ) self.birth=self.gen.num while len(unlitsq) > 0: sqr = unlitsq.pop( ) if sqr.isBlack( ): continue if flip( ): if p1.graph.data[sqr.x][sqr.y].type == gt.BULB: self.graph.addLight( sqr.x, sqr.y, False ) if self.graph.data[sqr.x][sqr.y].type != gt.BULB: raise TypeError("NOT A BULB!") elif p2.graph.data[sqr.x][sqr.y].type == gt.BULB: self.graph.addLight( sqr.x, sqr.y, False ) if self.graph.data[sqr.x][sqr.y].type != gt.BULB: raise TypeError("NOT A BULB!")
def map_callback(self, data): print("--------Map callback--------") size_x = data.info.width size_y = data.info.height self.cv_map = np.zeros(shape=(size_y, size_x)) if size_x < 3 or size_y < 3: print( "Map size is only x: {}, y: {}. Not running map to image conversion." .format(size_x, size_y)) rows, columns = self.cv_map.shape if rows != size_y and columns != size_x: self.cv_map = np.array([size_y, size_x]) self.map_resolution = data.info.resolution self.map_transform = data.info.origin grid = flip(np.reshape(data.data, (size_y, size_x)), 0) for i in range(size_y): for j in range(size_x): if grid[i][j] == -1: self.cv_map[i][j] = 127 elif grid[i][j] == 100: self.cv_map[i][j] = 0 elif grid[i][j] == 0: self.cv_map[i][j] = 255 else: print('Error at i:' + str(grid[i][j])) print("Map successfully saved.") pixel_goals = self.goal_generator.generate_points() self.viewpoints = [self.transform_map_point(p) for p in pixel_goals] self.goals_left = self.viewpoints[:] print("Transformed goals to map coordinates") print("Viewpoints: ", self.viewpoints) self.state = states.READY_FOR_GOAL
def __init__( self, agent, maxdepth, method ): self.agent = agent self.meth = method #The max depth our tree should have #FIXME: This should be generation-level self.maxdepth = maxdepth #Array of all nodes self.nodes = [] #Root node self.root = None #Our children update our depth as they're added self.depth = 0 #We do grow, randomly if "half and half" if( self.meth == HALFANDHALF and util.flip( ) ) or self.meth == GROW: self.populate( GROW ) elif self.meth == HALFANDHALF or self.meth == FULL: #Otherwise we do full initialization self.populate( FULL )
def load_network (configs): edges_file = open (configs['network_file'],'r') #note: with nx.Graph (undirected), there are 2951 edges, with nx.DiGraph (directed), there are 3272 edges M=nx.DiGraph() next(edges_file) #ignore the first line i=0 set_nodes=[] for e in edges_file: i+=1 set_nodes.append(e.split()[0]) set_nodes.append(e.split()[1]) interaction = e.split() assert len(interaction)>=2 source, target = str(interaction[0]), str(interaction[1]) if source == target: print ("source == target") if (len(interaction) >2): if (str(interaction[2]) == '+'): Ijk=1 elif (str(interaction[2]) == '-'): Ijk=-1 else: print ("Error: bad interaction sign in file "+network_edge_file+"\nExiting...") sys.exit() else: Ijk=util.flip() M.add_edge(source, target, sign=Ijk) print ("lines "+str(i)) print ("nx edges "+str(len(M.edges()))) print ("nx nodes "+str(len(M.nodes()))) print ("set nodes "+str(len(set(set_nodes)))) # conservation scores: if not configs['biased']: return M else: return conservation_scores (M, configs)
def submit_share(self, job_id, worker_name, session, extranonce1_bin, extranonce2, ntime, nonce, difficulty): '''Check parameters and finalize block template. If it leads to valid block candidate, asynchronously submits the block back to the bitcoin network. - extranonce1_bin is binary. No checks performed, it should be from session data - job_id, extranonce2, ntime, nonce - in hex form sent by the client - difficulty - decimal number from session, again no checks performed - submitblock_callback - reference to method which receive result of submitblock() ''' # Check if extranonce2 looks correctly. extranonce2 is in hex form... if len(extranonce2) != self.extranonce2_size * 2: raise SubmitException("Incorrect size of extranonce2. Expected %d chars" % (self.extranonce2_size*2)) # Check for job job = self.get_job(job_id) if job == None: raise SubmitException("Job '%s' not found" % job_id) # Check if ntime looks correct if len(ntime) != 8: raise SubmitException("Incorrect size of ntime. Expected 8 chars") if not job.check_ntime(int(ntime, 16)): raise SubmitException("Ntime out of range") # Check nonce if len(nonce) != 8: raise SubmitException("Incorrect size of nonce. Expected 8 chars") # Check for duplicated submit if not job.register_submit(extranonce1_bin, extranonce2, ntime, nonce): log.info("Duplicate from %s, (%s %s %s %s)" % \ (worker_name, binascii.hexlify(extranonce1_bin), extranonce2, ntime, nonce)) raise SubmitException("Duplicate share") # Now let's do the hard work! # --------------------------- # 0. Some sugar extranonce2_bin = binascii.unhexlify(extranonce2) ntime_bin = binascii.unhexlify(ntime) nonce_bin = binascii.unhexlify(nonce) # 1. Build coinbase coinbase_bin = job.serialize_coinbase(extranonce1_bin, extranonce2_bin) coinbase_hash = util.doublesha(coinbase_bin) # 2. Calculate merkle root merkle_root_bin = job.merkletree.withFirst(coinbase_hash) merkle_root_int = util.uint256_from_str(merkle_root_bin) # 3. Serialize header with given merkle, ntime and nonce header_bin = job.serialize_header(merkle_root_int, ntime_bin, nonce_bin) # 4. Reverse header and compare it with target of the user if settings.COINDAEMON_ALGO == 'scrypt': hash_bin = ltc_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) elif settings.COINDAEMON_ALGO == 'scrypt-jane': hash_bin = yac_scrypt.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]), int(ntime, 16)) elif settings.COINDAEMON_ALGO == 'quark': hash_bin = quark_hash.getPoWHash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) elif settings.COINDAEMON_ALGO == 'skeinhash': hash_bin = skeinhash.skeinhash(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) else: hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) hash_int = util.uint256_from_str(hash_bin) scrypt_hash_hex = "%064x" % hash_int header_hex = binascii.hexlify(header_bin) if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'scrypt-jane': header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000" elif settings.COINDAEMON_ALGO == 'quark': header_hex = header_hex+"000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000" else: pass target_user = self.diff_to_target(difficulty) if hash_int > target_user: raise SubmitException("Share is above target") # Mostly for debugging purposes target_info = self.diff_to_target(100000) if hash_int <= target_info: log.info("Yay, share with diff above 100000") # Algebra tells us the diff_to_target is the same as hash_to_diff share_diff = int(self.diff_to_target(hash_int)) on_submit = None mm_submit = None if settings.SOLUTION_BLOCK_HASH: # Reverse the header and get the potential block hash (for scrypt only) only do this if we want to send in the block hash to the shares table block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) block_hash_hex = block_hash_bin[::-1].encode('hex_codec') # 5. Compare hash with target of the network if hash_int <= job.target: # Yay! It is block candidate! log.info("We found a block candidate! %s" % scrypt_hash_hex) # Reverse the header and get the potential block hash (for scrypt only) #if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'sha256d': # if settings.COINDAEMON_Reward == 'POW': block_hash_bin = util.doublesha(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ])) block_hash_hex = block_hash_bin[::-1].encode('hex_codec') #else: block_hash_hex = hash_bin[::-1].encode('hex_codec') #else: block_hash_hex = hash_bin[::-1].encode('hex_codec') # 6. Finalize and serialize block object job.finalize(merkle_root_int, extranonce1_bin, extranonce2_bin, int(ntime, 16), int(nonce, 16)) if not job.is_valid(): # Should not happen log.exception("FINAL JOB VALIDATION FAILED!(Try enabling/disabling tx messages)") # 7. Submit block to the network serialized = binascii.hexlify(job.serialize()) on_submit = self.bitcoin_rpc.submitblock(serialized, block_hash_hex, scrypt_hash_hex) if on_submit: self.update_block() # 8. Compare hash with target of mm network if hash_int <= job.mm_target: log.info("We found a mm block candidate! %s" % scrypt_hash_hex) coinbase_hex = binascii.hexlify(coinbase_bin) branch_count = job.merkletree.branchCount() branch_hex = job.merkletree.branchHex() parent_hash = util.rev("%064x" % hash_int) parent_header = util.flip(header_hex) submission = coinbase_hex + parent_hash + branch_count + branch_hex + "000000000000000000" + parent_header; mm_submit = self.mm_rpc.getauxblock(self.mm_hash,submission) log.debug("Coinbase:%s",coinbase_hex) log.debug("Branch Count:%s",branch_count) log.debug("Branch Hex:%s",branch_hex) log.debug("Parent Hash:%s",parent_hash) log.debug("Parent Header:%s",parent_header) log.debug("MM Hash:%s",self.mm_hash) log.debug(" AuxPow:%s",submission) log.debug(" Res:"+str(mm_submit)) if settings.SOLUTION_BLOCK_HASH: return (header_hex, block_hash_hex, self.mm_hash, share_diff, on_submit, mm_submit) else: return (header_hex, scrypt_hash_hex, self.mm_hash, share_diff, on_submit, mm_submit)
def randomNodule( self, parent, meth=None ): #True? We're going to be a terminal if ( meth == GROW and util.flip( ) ) or self.maxdepth <= 1 or ( parent != None and parent.depth == self.maxdepth-2 ): return node( self, parent, op=self.randomTerm( ), leaf=True ) else: return node( self, parent, leaf=False )
def construct_grid(tiles): edges, flipped_edges = util.edges(tiles) corners, corner_unique_edges = util.find_corners(tiles) print('edges', edges) print('flipped_edges', flipped_edges) i = 0 while not (set(corner_unique_edges[0]) == set( [edges[3][corners[0]], edges[0][corners[0]]])): util.rotate(corners[0], tiles, edges, flipped_edges) i += 1 print('top left corner is tile', corners[0], [edges[3][corners[0]], edges[0][corners[0]]]) print('Rotated ', corners[0], i, 'times') util.print_tile(corners[0], tiles, edges, flipped_edges) grid_dim = int(sqrt(len(tiles))) assert grid_dim * grid_dim == len( tiles), "Can't find grid dimensions, # tiles = " + str(len(tiles)) # So now corners[0] is rotated to be top-left. We didn't do any check about whether to flip # it or not; we just declared that its side that faces up is correct for now. # # Next we'll assemble the rest of the tiles into a consistent grid, rotating or flipping each # as necessary. This consistent grid may still need to be flipped or rotated in its entirety # to find sea monsters. # grid will be indexed as [x][y] grid = [None] * grid_dim for i in range(len(grid)): grid[i] = [None] * grid_dim # print ('grid', grid) tiles_to_place = set([t for t in tiles.keys()]) tiles_to_place.remove(corners[0]) grid[0][0] = corners[0] # print ('tiles_to_place', len(tiles_to_place), tiles_to_place) for c in range(grid_dim): # Place the top tile in the column, except for column 0 where it's already placed. if c != 0: print('top... tiles_to_place', len(tiles_to_place), tiles_to_place) glue = util.flip_int(edges[1][grid[c - 1][0]]) # 1 == right edge print('right glue', glue) print('edges', edges) print('flipped_edges', flipped_edges) # Find the one and only tile left with that side. # First check the ones that are face up. found = None face_down = False for t in tiles_to_place: for e in range(4): if edges[e][t] == glue: assert not found, "uh oh" + str((found, t)) found = t print('found', t, 'face up', c, 0) if flipped_edges[e][t] == glue: assert not found, 'ooopsie' found = t face_down = True print('found', t, 'face down', c, 0) if face_down: util.flip(found, tiles, edges, flipped_edges) while edges[3][found] != glue: # 3 == left edge util.rotate(found, tiles, edges, flipped_edges) grid[c][0] = found tiles_to_place.remove(found) # Place the rest of the column, top to bottom. for r in range(1, grid_dim): print('tiles_to_place', len(tiles_to_place), tiles_to_place) print('r', r) prev_tile = grid[c][r - 1] print('prev_tile', prev_tile) glue = util.flip_int(edges[2][prev_tile]) # 2 == bottom edge print('glue', glue) # Find the one and only tile left with that side. # First check the ones that are face up. found = None face_down = False for t in tiles_to_place: for e in range(4): if edges[e][t] == glue: assert not found, "uh oh" + str((found, t)) found = t print('found', t, 'face up', c, r) if flipped_edges[e][t] == glue: assert not found, 'ooopsie' found = t face_down = True print('found', t, 'face down', c, r) if face_down: util.flip(found, tiles, edges, flipped_edges) while edges[0][found] != glue: util.rotate(found, tiles, edges, flipped_edges) grid[c][r] = found tiles_to_place.remove(found) return grid
def submit_share(self, job_id, worker_name, session, extranonce1_bin, extranonce2, ntime, nonce, difficulty): '''Check parameters and finalize block template. If it leads to valid block candidate, asynchronously submits the block back to the bitcoin network. - extranonce1_bin is binary. No checks performed, it should be from session data - job_id, extranonce2, ntime, nonce - in hex form sent by the client - difficulty - decimal number from session, again no checks performed - submitblock_callback - reference to method which receive result of submitblock() ''' # Check if extranonce2 looks correctly. extranonce2 is in hex form... if len(extranonce2) != self.extranonce2_size * 2: raise SubmitException( "Incorrect size of extranonce2. Expected %d chars" % (self.extranonce2_size * 2)) # Check for job job = self.get_job(job_id) if job == None: raise SubmitException("Job '%s' not found" % job_id) # Check if ntime looks correct if len(ntime) != 8: raise SubmitException("Incorrect size of ntime. Expected 8 chars") if not job.check_ntime(int(ntime, 16)): raise SubmitException("Ntime out of range") # Check nonce if len(nonce) != 8: raise SubmitException("Incorrect size of nonce. Expected 8 chars") # Check for duplicated submit if not job.register_submit(extranonce1_bin, extranonce2, ntime, nonce): log.info("Duplicate from %s, (%s %s %s %s)" % \ (worker_name, binascii.hexlify(extranonce1_bin), extranonce2, ntime, nonce)) raise SubmitException("Duplicate share") # Now let's do the hard work! # --------------------------- # 0. Some sugar extranonce2_bin = binascii.unhexlify(extranonce2) ntime_bin = binascii.unhexlify(ntime) nonce_bin = binascii.unhexlify(nonce) # 1. Build coinbase coinbase_bin = job.serialize_coinbase(extranonce1_bin, extranonce2_bin) coinbase_hash = util.doublesha(coinbase_bin) # 2. Calculate merkle root merkle_root_bin = job.merkletree.withFirst(coinbase_hash) merkle_root_int = util.uint256_from_str(merkle_root_bin) # 3. Serialize header with given merkle, ntime and nonce header_bin = job.serialize_header(merkle_root_int, ntime_bin, nonce_bin) # 4. Reverse header and compare it with target of the user if settings.COINDAEMON_ALGO == 'scrypt': hash_bin = ltc_scrypt.getPoWHash(''.join( [header_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 20)])) elif settings.COINDAEMON_ALGO == 'scrypt-jane': hash_bin = yac_scrypt.getPoWHash( ''.join( [header_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 20)]), int(ntime, 16)) elif settings.COINDAEMON_ALGO == 'quark': hash_bin = quark_hash.getPoWHash(''.join( [header_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 20)])) elif settings.COINDAEMON_ALGO == 'skeinhash': hash_bin = skeinhash.skeinhash(''.join( [header_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 20)])) else: hash_bin = util.doublesha(''.join( [header_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 20)])) hash_int = util.uint256_from_str(hash_bin) scrypt_hash_hex = "%064x" % hash_int header_hex = binascii.hexlify(header_bin) if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'scrypt-jane': header_hex = header_hex + "000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000" elif settings.COINDAEMON_ALGO == 'quark': header_hex = header_hex + "000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000" else: pass target_user = self.diff_to_target(difficulty) if hash_int > target_user: raise SubmitException("Share is above target") # Mostly for debugging purposes target_info = self.diff_to_target(100000) if hash_int <= target_info: log.info("Yay, share with diff above 100000") # Algebra tells us the diff_to_target is the same as hash_to_diff share_diff = int(self.diff_to_target(hash_int)) on_submit = None mm_submit = None if settings.SOLUTION_BLOCK_HASH: # Reverse the header and get the potential block hash (for scrypt only) only do this if we want to send in the block hash to the shares table block_hash_bin = util.doublesha(''.join( [header_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 20)])) block_hash_hex = block_hash_bin[::-1].encode('hex_codec') # 5. Compare hash with target of the network if hash_int <= job.target: # Yay! It is block candidate! log.info("We found a block candidate! %s" % scrypt_hash_hex) # Reverse the header and get the potential block hash (for scrypt only) #if settings.COINDAEMON_ALGO == 'scrypt' or settings.COINDAEMON_ALGO == 'sha256d': # if settings.COINDAEMON_Reward == 'POW': block_hash_bin = util.doublesha(''.join( [header_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 20)])) block_hash_hex = block_hash_bin[::-1].encode('hex_codec') #else: block_hash_hex = hash_bin[::-1].encode('hex_codec') #else: block_hash_hex = hash_bin[::-1].encode('hex_codec') # 6. Finalize and serialize block object job.finalize(merkle_root_int, extranonce1_bin, extranonce2_bin, int(ntime, 16), int(nonce, 16)) if not job.is_valid(): # Should not happen log.exception( "FINAL JOB VALIDATION FAILED!(Try enabling/disabling tx messages)" ) # 7. Submit block to the network serialized = binascii.hexlify(job.serialize()) on_submit = self.bitcoin_rpc.submitblock(serialized, block_hash_hex, scrypt_hash_hex) if on_submit: self.update_block() # 8. Compare hash with target of mm network if hash_int <= job.mm_target: log.info("We found a mm block candidate! %s" % scrypt_hash_hex) coinbase_hex = binascii.hexlify(coinbase_bin) branch_count = job.merkletree.branchCount() branch_hex = job.merkletree.branchHex() parent_hash = util.rev("%064x" % hash_int) parent_header = util.flip(header_hex) submission = coinbase_hex + parent_hash + branch_count + branch_hex + "000000000000000000" + parent_header mm_submit = self.mm_rpc.getauxblock(self.mm_hash, submission) log.debug("Coinbase:%s", coinbase_hex) log.debug("Branch Count:%s", branch_count) log.debug("Branch Hex:%s", branch_hex) log.debug("Parent Hash:%s", parent_hash) log.debug("Parent Header:%s", parent_header) log.debug("MM Hash:%s", self.mm_hash) log.debug(" AuxPow:%s", submission) log.debug(" Res:" + str(mm_submit)) if settings.SOLUTION_BLOCK_HASH: return (header_hex, block_hash_hex, self.mm_hash, share_diff, on_submit, mm_submit) else: return (header_hex, scrypt_hash_hex, self.mm_hash, share_diff, on_submit, mm_submit)
from collections import Counter from itertools import starmap, groupby from operator import sub from util import read_input, flip, rolling inputs = [int(i) for i in read_input(10).splitlines()] voltages = list(sorted(inputs)) voltages.insert(0, 0) voltages.append(voltages[-1] + 3) c = Counter((starmap(flip(sub), rolling(voltages, 2)))) n1 = c.pop(1) n3 = c.pop(3) ansA = n1 * n3 assert ansA > 2628 assert ansA == 2738 assert len(c) == 0 # there were only 1-diff and 3-diff voltages! diffs = starmap(flip(sub), rolling(voltages, 2)) n_combs = 1 comb_dict = {1: 2**0, 2: 2**1, 3: 2**2, 4: 2**3 - 1} for key, grp in groupby(diffs): if key == 3: # 3-steps are fixed. no freedom to choose here... continue else: # the longest chunk is 4, so i have precomputed the number of choices possible grp_len = sum(1 for _ in grp) n_combs *= comb_dict[grp_len]