Exemple #1
0
def solve_part_two(day_input: List[str]) -> str:
    lst = convert(day_input)
    # Same as part one, until getting the list of alergen possible ingredients
    ingredients = set(flatten(line[0] for line in lst))
    alergens = set(flatten(line[1] for line in lst))
    alergen_ingredients: Dict[str, Set[str]] = \
        dict(zip(alergens, [set(ingredients) for _ in range(len(alergens))]))

    for line in lst:
        for alerg in line[1]:
            alergen_ingredients[alerg].intersection_update(line[0])

    # Basic constraint propagation, again
    constraints = [
        alerg for alerg, ingr_lst in alergen_ingredients.items()
        if len(ingr_lst) == 1
    ]
    while (len(constraints) > 0):
        alerg = constraints.pop()
        ingr = next(iter(alergen_ingredients[alerg]))
        for k, v in alergen_ingredients.items():
            if k != alerg and ingr in v:
                v.remove(ingr)
                if len(v) == 1:  # New constraint
                    constraints.append(k)
    # Join the result. Lots of manipulations because the structure is somewhat complex
    return ','.join(
        [next(iter(v)) for _, v in sorted(alergen_ingredients.items())])
Exemple #2
0
def solve_part_one(day_input: List[str]) -> int:
    fields, _, tickets = convert(day_input)

    # Invalid numbers aren't in any of the ranges
    all_ranges = list(flatten(fields.values()))
    res = (n for n in flatten(tickets) if not any(n in r for r in all_ranges))
    return sum(res)
Exemple #3
0
 def sendRaw(self, *data):
     """
     inputs:
         data - characters or strings that get merged and sent over the
                 serial interface
     outputs:
         bool - returns True if the data was written; False otherwise
     """
     byteString = "".join(common.flatten(data))
     try:
         bytesWritten = self.interface.write(byteString)
         self.interface.flush()  # Wait for the output to be written
         if byteString and bytesWritten:
             self.logger.log.debug("Successfully wrote serial data: {}". \
                 format(byteString))
             return True
         else:
             self.logger.log.error("Did not write serial data: {}". \
                 format(byteString))
             return False
     except Exception as e:
         self.logger.log.error("Got exception: {}". \
                 format(e))
         self.logger.log.error("Did not write serial data: {}". \
                 format(byteString))
         return False
Exemple #4
0
def compute(data, results, correct_hash):
    # Compute commitment
    hashed_data = poseidon_hash(flatten(data))
    [x.assert_eq(y) for (x, y) in zip(hashed_data, correct_hash)]

    # Compute outputs
    output = {}
    for category in data:
        num_pell = sum([i >= 10 for i in data[category]])
        num_non_pell = sum([i < 10 for i in data[category]])
        num_all = num_pell + num_non_pell

        received_bachelors_pell = LinCombFxp(
            sum([i == 11 for i in data[category]]))
        different_institution_pell = LinCombFxp(
            sum([i == 12 for i in data[category]]))
        same_institution_pell = LinCombFxp(
            sum([i == 13 for i in data[category]]))

        received_bachelors_non_pell = LinCombFxp(
            sum([i == 1 for i in data[category]]))
        different_institution_non_pell = LinCombFxp(
            sum([i == 2 for i in data[category]]))
        same_institution_non_pell = LinCombFxp(
            sum([i == 3 for i in data[category]]))

        output[category] = {
            "Pell": {
                "Received Bachelor's":
                received_bachelors_pell / num_pell,
                "Enrolled at same institution":
                same_institution_pell / num_pell,
                "Enrolled at different insitution":
                different_institution_pell / num_pell
            },
            "No Pell": {
                "Received Bachelor's":
                received_bachelors_non_pell / num_non_pell,
                "Enrolled at same institution":
                same_institution_non_pell / num_non_pell,
                "Enrolled at different insitution":
                different_institution_non_pell / num_non_pell
            },
            "All Students": {
                "Received Bachelor's":
                (received_bachelors_pell + received_bachelors_non_pell) /
                num_all,
                "Enrolled at same institution":
                (same_institution_pell + same_institution_non_pell) / num_all,
                "Enrolled at different insitution":
                (different_institution_pell + different_institution_non_pell) /
                num_all
            }
        }

    for category in results:
        for student_type in results[category]:
            for outcome in results[category][student_type]:
                output[category][student_type][outcome].assert_eq(
                    results[category][student_type][outcome])
    def generate(self, target_file=None):
        if self.components != []:
            target_file = open(self.file_name, "w")
            target_file.write('ASTRON HEADER\n')
            target_file.write('INCLUDED LIBRARIES\n')
            target_file.write('ENTITY DECLARATION\n')
            target_file.write('ARCHITECTURE str OF %s IS\n' %self.name)
            target_file.write('. CONSTANT DECLARATIONS\n')

            # Get the signal declaration from each internal component's port object
            for component in self.components:
                for port_name, port in component.ports.iteritems():
                    target_file.write(port.signal.vhdl_signal_declaration)

            target_file.write('BEGIN\n')
    
            # Write this component's connections
            for connection in cm.flatten(self.connections):
#                print connection
                target_file.write(connection.vhdl_assignment)
      
            # Iterate through internal components
            for component in self.components:
                component.generate()
                target_file.write(component.get_vhdl_instance())
    
            target_file.write('END str\n')
            target_file.close()
 def uniqueAttribs(self, xpath):
     """Return a list of attributes that uniquely distinguish the element at each segment"""
     acceptedAttribs = []
     # select examples which contain the relevant xpath
     docs = [doc for doc in self.docs() if doc.tree().xpath(str(xpath))]
     for i, section in enumerate(xpath.walk()):
         sectionElements = flatten([doc.tree().xpath(section) for doc in docs])
         try:
             siblingElements = flatten([[s for s in e.itersiblings() if s.tag == e.tag] for e in sectionElements])
         except AttributeError:
             pass
         else:
             siblingAttribs = flatten([self.extractAttribs(e) for e in siblingElements])
             proposedAttribs = self.commonAttribs(sectionElements)
             acceptedAttribs.append([a for a in proposedAttribs if a not in siblingAttribs])
     return acceptedAttribs
def getModel(x, num_output, K, stages, wd, is_training, transfer_mode= False):
    with tf.variable_scope('conv1'):
        x = common.spatialConvolution(x, 3, 1, 2*K, wd= wd)
 #       x = common.batchNormalization(x, is_training= is_training)
 #       x = tf.nn.relu(x)
   #     x = common.maxPool(x, 3, 2)
    print x    
    with tf.variable_scope('block0'):
        x = block(x, stages[0], K, is_training= is_training,  wd= wd)
    print x
    with tf.variable_scope('trans1'):
        x = transition(x, K, wd= wd, is_training= is_training)    
    print x
    with tf.variable_scope('block2'):
        x = block(x, stages[1], K, is_training= is_training, wd= wd)
    print x
    with tf.variable_scope('trans2'):
        x = transition(x, K, wd= wd, is_training= is_training)    
    print x
    with tf.variable_scope('block3'):
        x = block(x, stages[2], K, is_training= is_training, wd= wd)
    print x
    x = common.avgPool(x,8,1, padding='VALID')

    x= common.flatten(x)

    if not transfer_mode:
      with tf.variable_scope('output'):
        x = common.fullyConnected(x, num_output, wd= wd)
    else:
      with tf.variable_scope('transfer_output'):
        x = common.fullyConnected(x, num_output, wd= wd)

    return x
Exemple #8
0
def compute(data, results, correct_hash):
    # Compute commitment
    hashed_data = poseidon_hash(flatten(data))
    [x.assert_eq(y) for (x,y) in zip(hashed_data, correct_hash)]

    # Compute graduation rates by category
    output = {}
    for category in data:
        arr2011 = [x == 1 for x in data[category]]
        arr2013 = [x == 11 for x in data[category]]
        
        graduated2011 = LinCombFxp(sum(arr2011))
        graduated2013 = LinCombFxp(sum(arr2013))
        length2011 = PrivVal(len(arr2011))
        length2013 = PrivVal(len(arr2013))

        gr2011 = graduated2011 * 100 / (length2011 + (length2011 == 0))
        gr2013 = graduated2013 * 100 / (length2013 + (length2013 == 0))

        output[category] = {
            "Began in 2011": gr2011,
            "Began in 2013": gr2013
        }

    # # Assert results are correct
    for category in results:
        output[category]["Began in 2011"].assert_eq(results[category]["Began in 2011"])
        output[category]["Began in 2013"].assert_eq(results[category]["Began in 2013"])
Exemple #9
0
def solve_part_two(day_input: List[str]) -> int:
    fields, ours, tickets = convert(day_input)

    # Valid tickets have all their numbers in at least one range
    all_ranges = list(flatten(fields.values()))
    valid_tickets = [
        t for t in tickets if all(any(n in r for r in all_ranges) for n in t)
    ]

    # For each field collect which positions are possible for it
    possible: Dict[str, List[int]] = defaultdict(list)
    for field, ranges in fields.items():
        # This could be done in a single list comprehension but would be unreadable
        for pos in range(len(ours)):
            # if all ticket values in this position are in at least one of the ranges of the field,
            # this position is possible for the field
            vals = [t[pos] for t in valid_tickets]
            if all(any(v in r for r in ranges) for v in vals):
                possible[field].append(pos)

    # Basic constraint propagation, doesn't work in all scenarios, only well behaved ones
    constraints = [field for field, pos in possible.items() if len(pos) == 1]
    while len(constraints) > 0:
        constr = constraints.pop()
        pos = possible[constr][0]
        for k, v in possible.items():
            if k != constr and pos in v:
                v.remove(pos)
                if len(v) == 1:
                    # New constraint
                    constraints.append(k)
    # Assuming that the previous constraint propagation uniquely defined the fields...
    res = (ours[pos] for pos in (
        [v[0] for k, v in possible.items() if k.startswith("departure")]))
    return prod(res)
Exemple #10
0
 def inner(values, mask):
     i, m = mask
     if m == '1':
         values = list(map(lambda v: set_bit(v, i), values))
     elif m == 'X':
         values = flatten([(set_bit(v, i), clear_bit(v, i))
                           for v in values])
     return values
Exemple #11
0
def possible_names(s):
    parsed = parse(s)
    all_ingredients = flatten([ings for ings, allerg in parsed])
    ingredient_set = set(all_ingredients)
    possble_allergen_names = possible_allerggens(parsed)
    allergen_set = set.union(*possble_allergen_names.values())
    non_allergens = ingredient_set - allergen_set
    return sum(all_ingredients.count(allrg) for allrg in non_allergens)
 def do_return(self, inst=[]):
     #if self.last_seen_fn:
     #    self.last_seen_fn.pop()
     ASM = []
     ASM.append("// DO_RETURN")
     #FRAME=LCL
     ASM.append("@LCL")
     ASM.append("D=M")
     ASM.append("@13")  #FRAME in M[13]
     ASM.append("M=D")
     # RET = *(FRAME-5)
     ASM.append("@5")
     ASM.append("A=D-A")
     ASM.append("D=M")
     ASM.append("@14")  #RET in M[14]
     ASM.append("M=D")
     # *ARG = pop()
     ASM.append("@SP")
     ASM.append("AM=M-1")
     ASM.append("D=M")
     ASM.append("@ARG")
     ASM.append("A=M")
     ASM.append("M=D")
     # SP = ARG +1
     ASM.append("@ARG")
     ASM.append("D=M+1")
     ASM.append("@SP")
     ASM.append("M=D")
     # THAT = *(FRAME-1)
     ASM.append("@13")
     ASM.append("AM=M-1")
     ASM.append("D=M")
     ASM.append("@THAT")
     ASM.append("M=D")
     # THIS = *(FRAME-2)
     ASM.append("@13")
     ASM.append("AM=M-1")
     ASM.append("D=M")
     ASM.append("@THIS")
     ASM.append("M=D")
     # ARG = *(FRAME-3)
     ASM.append("@13")
     ASM.append("AM=M-1")
     ASM.append("D=M")
     ASM.append("@ARG")
     ASM.append("M=D")
     # LCL = *(FRAME-4)
     ASM.append("@13")
     ASM.append("AM=M-1")
     ASM.append("D=M")
     ASM.append("@LCL")
     ASM.append("M=D")
     # goto RET
     ASM.append("@14")
     ASM.append("A=M")
     ASM.append("0;JMP")
     return "\n    ".join(common.flatten(ASM))
 def do_return(self, inst=[]):
     #if self.last_seen_fn:
     #    self.last_seen_fn.pop()
     ASM=[]
     ASM.append("// DO_RETURN")
     #FRAME=LCL
     ASM.append("@LCL")
     ASM.append("D=M")
     ASM.append("@13") #FRAME in M[13]
     ASM.append("M=D")
     # RET = *(FRAME-5)
     ASM.append("@5")
     ASM.append("A=D-A")
     ASM.append("D=M")
     ASM.append("@14") #RET in M[14]
     ASM.append("M=D")
     # *ARG = pop()
     ASM.append("@SP")
     ASM.append("AM=M-1")
     ASM.append("D=M")
     ASM.append("@ARG")
     ASM.append("A=M")
     ASM.append("M=D")
     # SP = ARG +1
     ASM.append("@ARG")
     ASM.append("D=M+1")
     ASM.append("@SP")
     ASM.append("M=D")
     # THAT = *(FRAME-1)
     ASM.append("@13")
     ASM.append("AM=M-1")
     ASM.append("D=M")
     ASM.append("@THAT")
     ASM.append("M=D")
     # THIS = *(FRAME-2)
     ASM.append("@13")
     ASM.append("AM=M-1")
     ASM.append("D=M")
     ASM.append("@THIS")
     ASM.append("M=D")
     # ARG = *(FRAME-3)
     ASM.append("@13")
     ASM.append("AM=M-1")
     ASM.append("D=M")
     ASM.append("@ARG")
     ASM.append("M=D")
     # LCL = *(FRAME-4)
     ASM.append("@13")
     ASM.append("AM=M-1")
     ASM.append("D=M")
     ASM.append("@LCL")
     ASM.append("M=D")
     # goto RET
     ASM.append("@14")
     ASM.append("A=M")
     ASM.append("0;JMP")
     return "\n    ".join(common.flatten(ASM))
Exemple #14
0
def main():
    parser = OptionParser(
        usage = "usage: %prog [options] dir1 ... dirN",
        option_list= [
            make_option("-c", "--capacity",
                        type = "int",
                        default = 736000000,
                        help = "Storage unit capacity (in bytes)"),

            make_option("-r", "--recursive",
                        action = "store_true",
                        help = "Scan each directory recursively"),

            make_option("-i", "--include",
                        action = "append",
                        default = [],
                        help = "Filename pattern of files to be included"
                               " (more than one can be specified)"),

            make_option("-x", "--exclude",
                        action = "append",
                        default = [],
                        help = "Filename pattern of files to be excluded"
                               " (more than one can be specified)"),

            make_option("-p", "--preserveStructure",
                        action = "store_true",
                        help = "Preserve the directory tree hierarchy "
                               "in the partition"),
                     ])

    options,dirs = parser.parse_args()
    if not dirs:
        dirs = ['.']

    fileLists = getFileLists(dirs, options.include, options.exclude,
                             options.recursive, options.preserveStructure)

    getSize = os.path.getsize
    bins = getBins(fileLists, options.capacity, getSize)
    files = flatten(fileLists)
    totalSize = sum(map(getSize,files))
    minBound = int(math.ceil(totalSize / float(options.capacity)))

    print "*** SUMMARY ***"
    print "* %d files (%s)" % (len(files), _sizeAsString(totalSize))
    print "* %s storage unit capacity" % _sizeAsString(options.capacity)
    print "* %d storage units are required at minimum" % minBound
    print "* %d sections were allocated" % len(bins)
    print
    print "* Listing files per unit"
    for i,bin in enumerate(sorted(bins,key=Bin.size,descending=True)):
        print "  - Unit %d (%s / %.2f%%)" % (
                        i, _sizeAsString(bin.size()),
                        100 * bin.size() / float(options.capacity))
        for object in sorted(bin, key=getSize, descending=True):
            print "    %s (%s)" % (object, _sizeAsString(getSize(object)))
Exemple #15
0
def main():
    parser = OptionParser(
        usage="usage: %prog [options] dir1 ... dirN",
        option_list=[
            make_option("-c",
                        "--capacity",
                        type="int",
                        default=736000000,
                        help="Storage unit capacity (in bytes)"),
            make_option("-r",
                        "--recursive",
                        action="store_true",
                        help="Scan each directory recursively"),
            make_option("-i",
                        "--include",
                        action="append",
                        default=[],
                        help="Filename pattern of files to be included"
                        " (more than one can be specified)"),
            make_option("-x",
                        "--exclude",
                        action="append",
                        default=[],
                        help="Filename pattern of files to be excluded"
                        " (more than one can be specified)"),
            make_option("-p",
                        "--preserveStructure",
                        action="store_true",
                        help="Preserve the directory tree hierarchy "
                        "in the partition"),
        ])

    options, dirs = parser.parse_args()
    if not dirs:
        dirs = ['.']

    fileLists = getFileLists(dirs, options.include, options.exclude,
                             options.recursive, options.preserveStructure)

    getSize = os.path.getsize
    bins = getBins(fileLists, options.capacity, getSize)
    files = flatten(fileLists)
    totalSize = sum(map(getSize, files))
    minBound = int(math.ceil(totalSize / float(options.capacity)))

    print "*** SUMMARY ***"
    print "* %d files (%s)" % (len(files), _sizeAsString(totalSize))
    print "* %s storage unit capacity" % _sizeAsString(options.capacity)
    print "* %d storage units are required at minimum" % minBound
    print "* %d sections were allocated" % len(bins)
    print
    print "* Listing files per unit"
    for i, bin in enumerate(sorted(bins, key=Bin.size, descending=True)):
        print "  - Unit %d (%s / %.2f%%)" % (i, _sizeAsString(
            bin.size()), 100 * bin.size() / float(options.capacity))
        for object in sorted(bin, key=getSize, descending=True):
            print "    %s (%s)" % (object, _sizeAsString(getSize(object)))
def hist(data, vocab):
    hist = {key: 0 for key in vocab}
    hist[UNK] = 0
    cnt = Counter(flatten(data))
    for k, v in cnt.items():
        if k in hist:
            hist[k] += v
        else:
            hist[UNK] += v
    return hist
    def do_funct(self, inst=[]):
        self.last_seen_fn.append(inst[1])
        ASM = []
        ASM.append("// DO_FUNC")
        nVars = int(inst[2])
        ASM.append("(%s)" % inst[1])
        for i in range(0, nVars):
            ASM.append(self.do_push(["push", "constant", "0"]))

        return "\n    ".join(common.flatten(ASM))
Exemple #18
0
def parse_line(line):
    line = line.replace("nil", "None")
    try:
        a = eval(line)
        f = flatten(a)
        # print(a, "--f->", f)
        return f
    except Exception as e:
        print("WARNING: Can't parse line - ", line, e)
        return [0]
Exemple #19
0
 def load_things(keys, query):
     _things = simplejson.loads(store.get_many(keys))
     xthings.update(_things)
     
     for k, v in query.requested.items():
         k = web.lstrips(k, query.prefix)
         if isinstance(v, Query):
             keys2 = common.flatten([d.get(k) for d in _things.values() if d.get(k)])
             keys2 = [k['key'] for k in keys2]
             load_things(set(keys2), v)
 def do_funct(self, inst=[]):
     self.last_seen_fn.append(inst[1])
     ASM=[]
     ASM.append("// DO_FUNC")
     nVars=int(inst[2])
     ASM.append("(%s)"%inst[1])
     for i in range(0, nVars):
         ASM.append(self.do_push(["push", "constant", "0"]))
         
     return "\n    ".join(common.flatten(ASM))
Exemple #21
0
def parse(responses):
    timeslots_raw = common.flatten(list(map(parse_response, responses)))

    format = '%Y-%m-%dT%H:%M:%S%z'

    timeslots = [
        common.string_to_datetime(format, timeslot)
        for timeslot in timeslots_raw
    ]

    return timeslots
Exemple #22
0
    def load_things(keys, query):
        _things = simplejson.loads(store.get_many(keys))
        xthings.update(_things)

        for k, v in query.requested.items():
            k = web.lstrips(k, query.prefix)
            if isinstance(v, Query):
                keys2 = common.flatten(
                    [d.get(k) for d in _things.values() if d.get(k)])
                keys2 = [k['key'] for k in keys2]
                load_things(set(keys2), v)
Exemple #23
0
def compute(data, results, correct_hash):
    # Compute commitment
    hashed_data = poseidon_hash(flatten(data))
    [x.assert_eq(y) for (x, y) in zip(hashed_data, correct_hash)]

    # Compute GPA
    total = sum(data)
    num_students = PrivVal(len(data))
    gpa = total / num_students

    # Assert GPAs match
    gpa.assert_eq(results)
 def uniqueAttribs(self, xpath):
     """Return a list of attributes that uniquely distinguish the element at each segment"""
     acceptedAttribs = []
     # select examples which contain the relevant xpath
     docs = [doc for doc in self.docs() if doc.tree().xpath(str(xpath))]
     for i, section in enumerate(xpath.walk()):
         sectionElements = flatten(
             [doc.tree().xpath(section) for doc in docs])
         try:
             siblingElements = flatten(
                 [[s for s in e.itersiblings() if s.tag == e.tag]
                  for e in sectionElements])
         except AttributeError:
             pass
         else:
             siblingAttribs = flatten(
                 [self.extractAttribs(e) for e in siblingElements])
             proposedAttribs = self.commonAttribs(sectionElements)
             acceptedAttribs.append(
                 [a for a in proposedAttribs if a not in siblingAttribs])
     return acceptedAttribs
Exemple #25
0
def solve_part_one(day_input: List[str]) -> int:
    lst = convert(day_input)
    # Get all different ingredients and alergens
    ingredients = set(flatten(line[0] for line in lst))
    alergens = set(flatten(line[1] for line in lst))

    # Get ingredients possible for each alergen: for each alergen it's the union of ingredients
    # in which it appears
    alergen_ingredients: Dict[str, Set[str]] = \
        dict(zip(alergens, [set(ingredients) for _ in range(len(alergens))]))
    for line in lst:
        for alerg in line[1]:
            alergen_ingredients[alerg].intersection_update(line[0])

    # Good ingredients don't appear in the alergen list
    good_ingredients = [
        ingr for ingr in ingredients
        if ingr not in list(flatten(alergen_ingredients.values()))
    ]
    res = sum(l[0].count(ingr) for l in lst for ingr in good_ingredients)
    return res
Exemple #26
0
def inference(x,
              num_output,
              wd,
              dropout_rate,
              is_training,
              transfer_mode=False):

    with tf.variable_scope('conv1'):
        network = common.spatialConvolution(x, 11, 4, 64, wd=wd)
        network = common.batchNormalization(network, is_training=is_training)
        network = tf.nn.relu(network)
        #common.activation_summary(network)
    network = common.maxPool(network, 3, 2)
    with tf.variable_scope('conv2'):
        network = common.spatialConvolution(network, 5, 1, 192, wd=wd)
        network = common.batchNormalization(network, is_training=is_training)
        network = tf.nn.relu(network)
        #common.activation_summary(network)
    network = common.maxPool(network, 3, 2)
    with tf.variable_scope('conv3'):
        network = common.spatialConvolution(network, 3, 1, 384, wd=wd)
        network = common.batchNormalization(network, is_training=is_training)
        network = tf.nn.relu(network)
        #common.activation_summary(network)
    with tf.variable_scope('conv4'):
        network = common.spatialConvolution(network, 3, 1, 256, wd=wd)
        network = common.batchNormalization(network, is_training=is_training)
        network = tf.nn.relu(network)
    with tf.variable_scope('conv5'):
        network = common.spatialConvolution(network, 3, 1, 256, wd=wd)
        network = common.batchNormalization(network, is_training=is_training)
        network = tf.nn.relu(network)
    network = common.maxPool(network, 3, 2)
    network = common.flatten(network)
    with tf.variable_scope('fc1'):
        network = tf.nn.dropout(network, dropout_rate)
        network = common.fullyConnected(network, 4096, wd=wd)
        network = common.batchNormalization(network, is_training=is_training)
        network = tf.nn.relu(network)
    with tf.variable_scope('fc2'):
        network = tf.nn.dropout(network, dropout_rate)
        network = common.fullyConnected(network, 4096, wd=wd)
        network = common.batchNormalization(network, is_training=is_training)
        network = tf.nn.relu(network)
    if not transfer_mode:
        with tf.variable_scope('output'):
            network = common.fullyConnected(network, num_output, wd=wd)
    else:
        with tf.variable_scope('transfer_output'):
            network = common.fullyConnected(network, num_output, wd=wd)

    return network
 def translate(self, parsedlines):
     asmlines=[]
     #initialise the stack pointer
     asmlines.append("@256")
     asmlines.append("D=A")
     asmlines.append("@SP")
     asmlines.append("M=D")
     #Call Sys.init
     asmlines.append(self.do_call(["call", "Sys.init", 0]))
     
     for line in parsedlines:
         asmlines.append(self.transfn[line["type"]](line["inst"]))
     return common.flatten(asmlines)
    def translate(self, parsedlines):
        asmlines = []
        #initialise the stack pointer
        asmlines.append("@256")
        asmlines.append("D=A")
        asmlines.append("@SP")
        asmlines.append("M=D")
        #Call Sys.init
        asmlines.append(self.do_call(["call", "Sys.init", 0]))

        for line in parsedlines:
            asmlines.append(self.transfn[line["type"]](line["inst"]))
        return common.flatten(asmlines)
Exemple #29
0
def get_turnout():
    try:
        today = datetime.datetime.today()
        turnout = common.flatten(api.get_turnout_data(today))

        string = []

        for i in range(len(turnout)):
            string.append(f"{common.ranges[i]}: {turnout[i]}")

        return "\n".join(string)
    except Exception as ex:
        log.critical(ex)
        return "_Nessun dato disponibile oggi._"
Exemple #30
0
def parse_response(resp):
    import sys, json

    json_response = json.loads(resp)

    timeslots_raw = [[
        timeslot.get('startTime') for timeslot in time.get('timeslots')
    ] for time in json_response["data"]["providerLocations"][0]["availability"]
                     ["times"]]

    timeslots_by_day = list(filter(lambda ts: len(ts) > 0,
                                   list(timeslots_raw)))

    timeslots = common.flatten(timeslots_by_day)

    return timeslots
Exemple #31
0
def inference(x, num_output, wd, dropout_rate, is_training, transfer_mode= False):
    conv_weight_initializer = tf.truncated_normal_initializer(stddev= 0.1)
    fc_weight_initializer = tf.truncated_normal_initializer(stddev= 0.01)
 
    with tf.variable_scope('conv1'):
      network = common.spatialConvolution(x, 11, 4, 64, wd= wd)
      network = common.batchNormalization(network, is_training= is_training)
      network = tf.nn.relu (network)
      #common.activation_summary(network)
    network = common.maxPool(network, 3, 2)
    with tf.variable_scope('conv2'):
      network = common.spatialConvolution(network, 5, 1, 192, wd= wd)
      network = common.batchNormalization(network, is_training= is_training)
      network = tf.nn.relu(network)
      #common.activation_summary(network)
    network = common.maxPool(network, 3, 2)
    with tf.variable_scope('conv3'):
      network = common.spatialConvolution(network, 3, 1, 384, wd= wd)
      network = common.batchNormalization(network, is_training= is_training)
      network = tf.nn.relu(network)
      #common.activation_summary(network)
    with tf.variable_scope('conv4'):
      network = common.spatialConvolution(network, 3, 1, 256, wd= wd)
      network = common.batchNormalization(network, is_training= is_training)
      network = tf.nn.relu(network)
    with tf.variable_scope('conv5'):
      network = common.spatialConvolution(network, 3, 1, 256, wd= wd)
      network = common.batchNormalization(network, is_training= is_training)
      network = tf.nn.relu(network)
    network = common.maxPool(network, 3, 2)
    network = common.flatten(network)
    with tf.variable_scope('fc1'): 
      network = tf.nn.dropout(network, dropout_rate)
      network = common.fullyConnected(network, 4096, wd= wd)
      network = common.batchNormalization(network, is_training= is_training)
      network = tf.nn.relu(network)
    with tf.variable_scope('fc2'):
      network = tf.nn.dropout(network, dropout_rate)
      network = common.fullyConnected(network, 4096, wd= wd)
      network = common.batchNormalization(network, is_training= is_training)
      network = tf.nn.relu(network)
    output = [None]*len(num_output)
    for o in xrange(0,len(num_output)):
      with tf.variable_scope('output'+str(o)):
        output[o] = common.fullyConnected(network, num_output[o], weight_initializer= fc_weight_initializer, bias_initializer= tf.zeros_initializer, wd= wd)

    return output
Exemple #32
0
def solve_part_one(day_input: List[str]) -> int:
    tiles = convert(day_input)
    # Get all tiles that have 2 edges not compatible with any others
    # This is not a necessary condition for a tile to be in a corner, but it is a sufficient
    # one (a tile with 2 edges not compatible with others must be in a corner)
    corners = []
    for tile in tiles:
        other_edges = list(
            flatten([
                other.edges + other.edges_reversed() for other in tiles
                if other.key != tile.key
            ]))
        found = sum(edge in other_edges for edge in tile.edges)
        if found == 2:
            corners.append(tile)

    return prod(int(t.key) for t in corners)
def check_linked_phrases(link_phrases, vocab):
    phrases = set(flatten([p for p in list(link_phrases.values())]))

    def oov_rate(vocab):
        tmp = set(vocab)
        cnt = 0
        for phrase in phrases:
            oov_words = [w for w in phrase.split() if w not in tmp]
            if oov_words:
                cnt += 1
                #print phrase.split(), [(w, vocab_dict[w]) for w in oov_words]
        return 1.0 * cnt / len(phrases)

    #sys.stdout.write('Number of Entities (N>=%d): %d\n' % (args.min_qfreq, len(link_phrases)))
    sys.stdout.write('Linked phrase vocab size: %d\n' % (len(phrases)))
    for N in [30000, 50000, 100000, 150000]:
        sys.stdout.write('OOV linked phrase rate (n_vocab=%d): %f\n' %
                         (N, oov_rate(vocab[:N])))
Exemple #34
0
def compute(data, results, correct_hash):
    # Compute commitment
    hashed_data = poseidon_hash(flatten(data))
    [x.assert_eq(y) for (x, y) in zip(hashed_data, correct_hash)]

    #  Begin with just 2016-2017
    output = {}
    for year in data:
        output[year] = {}
        for income in data[year]:
            year_data = data[year][income]
            total = sum(year_data)
            length = PrivVal(len(year_data))
            output[year][income] = total / length

    # Check equality
    for year in results:
        for income in results[year]:
            output[year][income].assert_eq(results[year][income])
Exemple #35
0
def inference(x, num_output, wd, dropout_rate, is_training, transfer_mode= False, model_type= 'A'):
   # Create tables describing VGG configurations A, B, D, E
   if model_type == 'A':
      config = [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M']
   elif model_type == 'B':
      config = [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M']
   elif model_type == 'D':
      config = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M']
   elif model_type == 'E':
      config = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M']
   else:
      print('Unknown model type: ' + model_type + ' | Please specify a modelType A or B or D or E')
   
   network= x

   for k,v in enumerate(config):
     if v == 'M':
       network= common.maxPool(network, 2, 2)
     else:  
       with tf.variable_scope('conv'+str(k)):
         network = common.spatialConvolution(network, 3, 1, v, wd= wd)
         network = tf.nn.relu(network)

   network= common.flatten(network)

   with tf.variable_scope('fc1'): 
     network = common.fullyConnected(network, 4096, wd= wd)
     network = tf.nn.relu(network)
     network = common.batchNormalization(network, is_training= is_training)
     network = tf.nn.dropout(network, dropout_rate)
   with tf.variable_scope('fc2'):
     network = common.fullyConnected(network, 4096, wd= wd)
     network = tf.nn.relu(network)
     network = common.batchNormalization(network, is_training= is_training)
     network = tf.nn.dropout(network, dropout_rate)
   if not transfer_mode:
     with tf.variable_scope('output'):
       network = common.fullyConnected(network, num_output, wd= wd)
   else:
     with tf.variable_scope('transfer_output'):
       network = common.fullyConnected(network, num_output, wd= wd)

   return network
def inference(x, num_output, wd, is_training, transfer_mode=False):
    with tf.variable_scope('block1'):
        network = block(x, [11, 4, 96], wd, is_training)
    network = common.maxPool(network, 3, 2)
    with tf.variable_scope('block2'):
        network = block(network, [5, 1, 256], wd, is_training)
    network = common.maxPool(network, 3, 2)
    with tf.variable_scope('block3'):
        network = block(network, [3, 1, 384], wd, is_training)
    network = common.maxPool(network, 3, 2)
    with tf.variable_scope('block4'):
        network = block(network, [3, 1, 1024], wd, is_training)
    network = common.avgPool(network, 7, 1)
    network = common.flatten(network)
    output = [None] * len(num_output)
    for o in xrange(0, len(num_output)):
        with tf.variable_scope('output' + str(o)):
            output[o] = common.fullyConnected(network, num_output[o], wd=wd)

    return output
def inference(x, num_output, wd, is_training, transfer_mode=False):
    with tf.variable_scope('block1'):
        network = block(x, [11, 4, 96], wd, is_training)
    network = common.maxPool(network, 3, 2)
    with tf.variable_scope('block2'):
        network = block(network, [5, 1, 256], wd, is_training)
    network = common.maxPool(network, 3, 2)
    with tf.variable_scope('block3'):
        network = block(network, [3, 1, 384], wd, is_training)
    network = common.maxPool(network, 3, 2)
    with tf.variable_scope('block4'):
        network = block(network, [3, 1, 1024], wd, is_training)
    network = common.avgPool(network, 7, 1)
    network = common.flatten(network)
    if not transfer_mode:
        with tf.variable_scope('output'):
            network = common.fullyConnected(network, num_output, wd=wd)
    else:
        with tf.variable_scope('transfer_output'):
            network = common.fullyConnected(network, num_output, wd=wd)

    return network
 def generate(self, target_file=None):
     if self.components != []:
         target_file = open(self.file_name, "w")
         target_file.write('ASTRON HEADER\n')
         target_file.write('INCLUDED LIBRARIES\n')
         target_file.write('ENTITY DECLARATION\n')
         target_file.write('ARCHITECTURE str OF %s IS\n' %self.name)
         target_file.write('. CONSTANT DECLARATIONS\n')
         target_file.write('. SIGNAL DECLARATIONS\n')
         target_file.write('BEGIN\n')
 
         # Write this component's connections
         for connection in cm.flatten(self.connections):
             print connection
             target_file.write(connection.vhdl_assignment)
   
         # Iterate through internal components
         for component in self.components:
             component.generate()
             target_file.write(component.get_vhdl_instance())
 
         target_file.write('END str\n')
         target_file.close()
 def translate(self, parsedlines):
     asmlines=[]
     for line in parsedlines:
         asmlines.append(self.transfn[line["type"]](line["inst"]))
     return common.flatten(asmlines)
 def do_call(self, inst=[]):
     ASM=[]
     ASM.append("// DO_CALL: "+inst[1])
     self.call_return=self.call_return +1
     call_label="RETFROM_"+inst[1]+str(self.call_return)
     
     numargs= int(inst[2])
     # push return-address
     ASM.append("@%s"%(call_label))
     ASM.append("D=A")
     ASM.append("@SP")
     ASM.append("AM=M+1")
     ASM.append("A=A-1")
     ASM.append("M=D")
     # push LCL
     ASM.append("@LCL")
     ASM.append("D=M")
     ASM.append("@SP")
     ASM.append("AM=M+1")
     ASM.append("A=A-1")
     ASM.append("M=D")
     # push ARG
     ASM.append("@ARG")
     ASM.append("D=M")
     ASM.append("@SP")
     ASM.append("AM=M+1")
     ASM.append("A=A-1")
     ASM.append("M=D")
     # push THIS
     ASM.append("@THIS")
     ASM.append("D=M")
     ASM.append("@SP")
     ASM.append("AM=M+1")
     ASM.append("A=A-1")
     ASM.append("M=D")
     # push THAT
     ASM.append("@THAT")
     ASM.append("D=M")
     ASM.append("@SP")
     ASM.append("AM=M+1")
     ASM.append("A=A-1")
     ASM.append("M=D")
     # ARG = SP-n-5
     ASM.append("@SP")
     ASM.append("D=M")
     ASM.append("@"+str(numargs+5))#we're doing work in machine that could be done in-compiler: numargs+5
     ASM.append("D=D-A")
     ASM.append("@ARG")
     ASM.append("M=D")
     # LCL = SP
     ASM.append("@SP")
     ASM.append("D=M")
     ASM.append("@LCL")
     ASM.append("M=D")
     # goto f
     ASM.append("@%s"%(str(inst[1])))
     ASM.append("0;JMP")
     # set label return-address
     ASM.append("(%s)"%(call_label))
     
     return "\n    ".join(common.flatten(ASM))

def usage():
    print"""
python S1VM.py <source>
    where <source> is a valid vm file or a folder containing vm files.
"""
if __name__=="__main__":
    translator=Translator()
    parser=Parser(instdict)
    
    outASM=[]
    
    try:
        givenpath = sys.argv[1]
    except IndexError:
        usage()
        exit()

    if os.path.isdir(givenpath):
        for file in os.listdir(givenpath):
            if file[-3:] == ".vm":
                outASM.append(translator.translate(parser.parse(os.path.join(givenpath, file))))
    elif os.path.isfile(givenpath) and givenpath[-3:] == ".vm":
            outASM.append(translator.translate(parser.parse(givenpath)))
    else:
        print("Error: Path supplied is invalid!")
        exit()

    print "\n".join(common.flatten(outASM))
    def generate(self, target_vhdl_file=None):
        if self.components != []:  # Sub-components determine the contents of this generated file (e.g. top level)
            target_vhdl_file = open(self.vhdl_file_name, "w")
            ###############################################################################
            # ASTRON HEADER, INFO
            ###############################################################################
            target_vhdl_file.write(ASTRON_HEADER)
            target_vhdl_file.write(self.info)
            target_vhdl_file.write("\n")

            ###############################################################################
            # USED LIBRARIES
            ###############################################################################
            target_vhdl_file.write(STANDARD_LIBS)

            for component in self.components:
                target_vhdl_file.write(component.vhdl_lib)
                #                print component.vhdl_lib
                vhdl_lib_str_words = component.vhdl_lib.replace(";", "").split(" ")
                for word in vhdl_lib_str_words:
                    #                    print word
                    if "_lib" in word and not "USE" in word and not "." in word and word not in self.hdllib_used_libs:
                        self.hdllib_used_libs += word.replace("_lib", "").replace("\n", "") + " "
            #            target_vhdl_file.write(STANDARD_LIBS)

            # Paste our default entity with replaced entity name.
            #            target_vhdl_file.write(ENTITY_TEMPPLATE.replace('DESIGN_NAME', self.name))

            ###############################################################################
            # ENTITY DECLARATION
            ###############################################################################
            target_vhdl_file.write(ENTITY_DECLARATION_TOP.replace("DESIGN_NAME", self.name))

            for component in self.components:
                for vhdl_port_declaration in component.vhdl_port_declarations:
                    # Put the port declarations this component needs in the top level entity
                    target_vhdl_file.write(vhdl_port_declaration)

            target_vhdl_file.write(ENTITY_DECLARATION_BOTTOM.replace("DESIGN_NAME", self.name))

            ###############################################################################
            # ARCHITECTURE DECLARATION
            ###############################################################################
            target_vhdl_file.write("ARCHITECTURE str OF %s IS\n\n" % self.name)

            ###############################################################################
            # CONSTANT + SIGNAL DECLARATIONS
            ###############################################################################
            for component in self.components:

                # Paste the parent constants
                #                print component.vhdl_parent_constants
                for vhdl_parent_constant in component.vhdl_parent_constants:
                    #                    print 'debug', vhdl_parent_constant
                    target_vhdl_file.write(vhdl_parent_constant)

                target_vhdl_file.write(component.vhdl_name_comment_block)

                # Paste this component's SIGNAL and CONSTANT declarations
                target_vhdl_file.write(component.vhdl_constants)
                target_vhdl_file.write(component.vhdl_signals)

                # Get the signal declaration from each internal component's port object
                for port_name, port in component.ports.iteritems():
                    target_vhdl_file.write(port.signal.vhdl_signal_declaration)

            target_vhdl_file.write("\n")

            ###############################################################################
            # ARCHITECTURE BEGIN
            ###############################################################################
            target_vhdl_file.write("BEGIN\n")

            ###############################################################################
            # COMPONENT CONNECTIONS
            ###############################################################################
            # Write this component's connections
            for connection in cm.flatten(self.connections):
                #                print connection
                target_vhdl_file.write(connection.vhdl_assignment)

            ###############################################################################
            # COMPONENT INSTANCES
            ###############################################################################
            # Iterate through internal components
            for component in self.components:

                target_vhdl_file.write(component.vhdl_name_comment_block)

                target_vhdl_file.write(component.get_vhdl_instance())

            ###############################################################################
            # ARCHITECTURE END
            ###############################################################################
            target_vhdl_file.write("\nEND str;\n")
            target_vhdl_file.close()

            ###############################################################################
            # hdllib.cfg
            ###############################################################################
            hdllib_file = open("generated/hdllib.cfg", "w")
            hdllib_file.write("hdl_lib_name = %s\n" % self.name)
            hdllib_file.write("hdl_library_clause_name = %s_lib\n" % self.name)
            hdllib_file.write("hdl_lib_uses_synth = %s\n" % self.hdllib_used_libs)
            hdllib_file.write("synth_files =\n")
            for component in self.components:
                if component.vhdl_file_name != None:
                    hdllib_file.write("    ../%s\n" % component.vhdl_file_name)
            hdllib_file.write("    ../%s\n" % self.vhdl_file_name)

            hdllib_file.write("synth_top_level_entity =\n")

            hdllib_file.write("test_bench_files =\n")
            hdllib_file.write("    tb_%s.vhd\n" % self.name)

            hdllib_file.write("quartus_copy_files =\n")
            hdllib_file.write("    qsys_mm_master.qsys .\n")

            for component in self.components:
                hdllib_file.write(component.hdllib_entries)

            hdllib_file.write("quartus_sdc_files =\n")
            for component in self.components:
                hdllib_file.write(component.hdllib_entries_sdc)

            # FIXME QSYS MM master QIP path depends on board...we should get rid of that.
            # . Put both options here as workaround. We don't want to forward self.name to either mm_master.py or ctrl_unb_common.
            # . Also, we don't want QSYS stuff in ctrl_unb_common or ctrl_unb_common stuff in mm_master.
            hdllib_file.write("quartus_qip_files =\n")
            hdllib_file.write(
                "    $HDL_BUILD_DIR/unb1/quartus/%s/qsys_mm_master/synthesis/qsys_mm_master.qip\n" % self.name
            )
            hdllib_file.write(
                "    $HDL_BUILD_DIR/unb2/quartus/%s/qsys_mm_master/synthesis/qsys_mm_master.qip\n" % self.name
            )

            hdllib_file.write("quartus_tcl_files =\n")
            hdllib_file.write("    %s_pins.tcl\n" % self.name)

            hdllib_file.close()

            ###############################################################################
            # Pin TCL file
            ###############################################################################
            pin_file = open("generated/%s_pins.tcl" % self.name, "w")
            for component in self.components:
                pin_file.write(component.tcl_pin_files)
            pin_file.close()

            for component in self.components:  # Chance for subcomponents to run their own overloaded generate()
                component.generate()