def get_topcities(ctx: context.Context, src_root: str) -> List[Tuple[str, int]]: """ Generates a list of cities, sorted by how many new hours numbers they got recently. """ ret: List[Tuple[str, int]] = [] new_day = datetime.date.fromtimestamp( ctx.get_time().now()).strftime("%Y-%m-%d") day_delta = datetime.date.fromtimestamp( ctx.get_time().now()) - datetime.timedelta(days=30) old_day = day_delta.strftime("%Y-%m-%d") old_counts: Dict[str, int] = {} counts: List[Tuple[str, int]] = [] old_count_path = os.path.join(src_root, "%s.citycount" % old_day) if not ctx.get_file_system().path_exists(old_count_path): return ret with ctx.get_file_system().open(old_count_path, "rb") as stream: for line_bytes in stream.readlines(): line = util.from_bytes(line_bytes).strip() city, _, count = line.partition('\t') if count: old_counts[city] = int(count) new_count_path = os.path.join(src_root, "%s.citycount" % new_day) if not ctx.get_file_system().path_exists(new_count_path): return ret with ctx.get_file_system().open(new_count_path, "rb") as stream: for line_bytes in stream.readlines(): line = util.from_bytes(line_bytes.strip()) city, _, count = line.partition('\t') if count and city in old_counts: counts.append((city, int(count) - old_counts[city])) ret = sorted(counts, key=lambda x: x[1], reverse=True) return ret
def sign(priv, message): x, p, q, g = priv k = random.randint(2, q - 1) r = pow(g, k, p) % q if isinstance(message, int): message = util.from_bytes(message) elif not isinstance(message, (bytes, bytearray)): raise ValueError('Message should be int or bytes') H = util.from_bytes(hashlib.sha1(message).digest()) s = (util.modinv(k, q) * (H + x * r)) % q return r, s
def verify(pub, message, signature): y, p, q, g = pub r, s = signature if not (0 <= r < q and 0 <= s < q): raise ValueError('Invalid signature value') if isinstance(message, int): message = util.from_bytes(message) elif not isinstance(message, (bytes, bytearray)): raise ValueError('Message should be int or bytes') H = util.from_bytes(hashlib.sha1(message).digest()) w = util.modinv(s, q) u1 = (H * w) % q u2 = (r * w) % q v = (pow(g, u1, p) * pow(y, u2, p) % p) % q return v == r
def get_missing_housenumbers_txt(ctx: context.Context, relation: areas.Relation) -> str: """Gets the cached plain text of the missing housenumbers for a relation.""" output = "" if is_missing_housenumbers_txt_cached(ctx, relation): with relation.get_files().get_housenumbers_txtcache_stream( "rb") as stream: output = util.from_bytes(stream.read()) return output ongoing_streets, _ignore = relation.get_missing_housenumbers() table = [] for result in ongoing_streets: range_list = util.get_housenumber_ranges(result[1]) range_strings = [i.get_number() for i in range_list] # Street name, only_in_reference items. if not relation.get_config().get_street_is_even_odd( result[0].get_osm_name()): result_sorted = sorted(range_strings, key=util.split_house_number) row = result[0].get_osm_name() + "\t[" + ", ".join( result_sorted) + "]" else: elements = util.format_even_odd(range_list, doc=None) row = result[0].get_osm_name() + "\t[" + "], [".join( elements) + "]" table.append(row) table.sort(key=util.get_lexical_sort_key()) output += "\n".join(table) with relation.get_files().get_housenumbers_txtcache_stream("wb") as stream: stream.write(util.to_bytes(output)) return output
def handle_missing_streets(ctx: context.Context, relations: areas.Relations, request_uri: str) -> yattag.doc.Doc: """Expected request_uri: e.g. /osm/missing-streets/ujbuda/view-[result|query].""" tokens = request_uri.split("/") relation_name = tokens[-2] action = tokens[-1] relation = relations.get_relation(relation_name) osmrelation = relation.get_config().get_osmrelation() doc = yattag.doc.Doc() doc.asis( webframe.get_toolbar(ctx, relations, "missing-streets", relation_name, osmrelation).getvalue()) if action == "view-turbo": doc.asis(missing_streets_view_turbo(relations, request_uri).getvalue()) elif action == "view-query": with doc.tag("pre"): with relation.get_files().get_ref_streets_stream(ctx, "rb") as sock: doc.text(util.from_bytes(sock.read())) elif action == "update-result": doc.asis( missing_streets_update(ctx, relations, relation_name).getvalue()) else: # assume view-result doc.asis( missing_streets_view_result(ctx, relations, request_uri).getvalue()) date = streets_diff_last_modified(relation) doc.asis(webframe.get_footer(date).getvalue()) return doc
def update_stats_topusers(ctx: context.Context, today: str) -> None: """Counts the top housenumber editors as of today.""" statedir = ctx.get_abspath("workdir/stats") csv_path = os.path.join(statedir, "%s.csv" % today) if not ctx.get_file_system().path_exists(csv_path): return topusers_path = os.path.join(statedir, "%s.topusers" % today) usercount_path = os.path.join(statedir, "%s.usercount" % today) users: Dict[str, int] = {} with ctx.get_file_system().open(csv_path, "rb") as stream: for line_bytes in stream.readlines(): line = util.from_bytes(line_bytes) # Only care about the last column. user = line[line.rfind("\t"):].strip() if user in users: users[user] += 1 else: users[user] = 1 with ctx.get_file_system().open(topusers_path, "wb") as stream: for user in sorted(users, key=cast(Callable[[str], int], users.get), reverse=True)[:20]: line = str(users[user]) + " " + user stream.write(util.to_bytes(line + "\n")) with ctx.get_file_system().open(usercount_path, "wb") as stream: stream.write(util.to_bytes(str(len(users)) + "\n"))
def update_stats_count(ctx: context.Context, today: str) -> None: """Counts the # of all house numbers as of today.""" statedir = ctx.get_abspath("workdir/stats") csv_path = os.path.join(statedir, "%s.csv" % today) if not ctx.get_file_system().path_exists(csv_path): return count_path = os.path.join(statedir, "%s.count" % today) city_count_path = os.path.join(statedir, "%s.citycount" % today) house_numbers = set() cities: Dict[str, Set[str]] = {} first = True valid_settlements = util.get_valid_settlements(ctx) with ctx.get_file_system().open(csv_path, "rb") as stream: for line_bytes in stream.readlines(): line = util.from_bytes(line_bytes) if first: # Ignore the oneliner header. first = False continue # postcode, city name, street name, house number, user cells = line.split("\t") # Ignore last column, which is the user who touched the object last. house_numbers.add("\t".join(cells[:4])) city_key = util.get_city_key(cells[0], cells[1], valid_settlements) city_value = "\t".join(cells[2:4]) if city_key in cities: cities[city_key].add(city_value) else: cities[city_key] = set([city_value]) write_count_path(ctx, count_path, house_numbers) write_city_count_path(ctx, city_count_path, cities)
def challenge43(): def sign(priv, message): x, p, q, g = priv r = pow(g, k, p) % q if r == 0: return None if isinstance(message, int): message = util.from_bytes(message) elif not isinstance(message, (bytes, bytearray)): raise ValueError('Message should be int or bytes') H = util.from_bytes(hashlib.sha1(message).digest()) s = (util.modinv(k, q) * (H + x * r)) % q return r, s p = 0x800000000000000089e1855218a0e7dac38136ffafa72eda7859f2171e25e65eac698c1702578b07dc2a1076da241c76c62d374d8389ea5aeffd3226a0530cc565f3bf6b50929139ebeac04f48c3c84afb796d61e5a4f9a8fda812ab59494232c7d2b4deb50aa18ee9e132bfa85ac4374d7f9091abc3d015efc871a584471bb1 q = 0xf4f47f05794b256174bba6e9b396a7707e563c5b g = 0x5958c9d3898b224b12672c0b98e06c60df923cb8bc999d119458fef538b8fa4046c8db53039db620c094c9fa077ef389b5322a559946a71903f990f1f7e0e025e2d7f7cf494aff1a0470f5b64c36b625a097f1651fe775323556fe00b3608c887892878480e99041be601a62166ca6894bdd41a7054ec89f756ba9fc95302291 y = 0x84ad4719d044495496a3201c8ff484feb45b962e7302e56a392aee4abab3e4bdebf2955b4736012f21a08084056b19bcd7fee56048e004e44984e2f411788efdc837a0d2e5abb7b555039fd243ac01f0fb2ed1dec568280ce678e931868d23eb095fde9d3779191b8c0299d6e07bbb283e6633451e535c45513b2d33c99ea17 r = 548099063082341131477253921760299949438196259240 s = 857042759984254168557880549501802188789837994940 H = 0xd2d0714f014a9784047eaeccf956520045c45265 message = b'''For those that envy a MC it can be hazardous to your health So be friendly, a matter of life and death, just like a etch-a-sketch ''' assert H == util.from_bytes(hashlib.sha1(message).digest()) for k in range(0, 2**16 + 1): if util.egcd(k, q)[0] != 1: continue x = (s * k - H) * util.modinv(r, q) % q priv = x, p, q, g if sign(priv, message) == (r, s): return x raise RuntimeError('Cannot find x')
def get_missing_housenumbers_html(ctx: context.Context, relation: areas.Relation) -> yattag.doc.Doc: """Gets the cached HTML of the missing housenumbers for a relation.""" doc = yattag.doc.Doc() if is_missing_housenumbers_html_cached(ctx, relation): with relation.get_files().get_housenumbers_htmlcache_stream( "rb") as stream: doc.asis(util.from_bytes(stream.read())) return doc ret = relation.write_missing_housenumbers() todo_street_count, todo_count, done_count, percent, table = ret with doc.tag("p"): prefix = ctx.get_ini().get_uri_prefix() relation_name = relation.get_name() doc.text( tr("OpenStreetMap is possibly missing the below {0} house numbers for {1} streets." ).format(str(todo_count), str(todo_street_count))) doc.text( tr(" (existing: {0}, ready: {1}).").format( str(done_count), util.format_percent(str(percent)))) doc.stag("br") with doc.tag( "a", href="https://github.com/vmiklos/osm-gimmisn/tree/master/doc"): doc.text(tr("Filter incorrect information")) doc.text(".") doc.stag("br") with doc.tag( "a", href=prefix + "/missing-housenumbers/{}/view-turbo".format(relation_name)): doc.text(tr("Overpass turbo query for the below streets")) doc.stag("br") with doc.tag("a", href=prefix + "/missing-housenumbers/{}/view-result.txt".format( relation_name)): doc.text(tr("Plain text format")) doc.stag("br") with doc.tag("a", href=prefix + "/missing-housenumbers/{}/view-result.chkl".format( relation_name)): doc.text(tr("Checklist format")) doc.asis(util.html_table_from_list(table).getvalue()) doc.asis( util.invalid_refstreets_to_html( relation.get_invalid_refstreets()).getvalue()) doc.asis( util.invalid_filter_keys_to_html( relation.get_invalid_filter_keys()).getvalue()) with relation.get_files().get_housenumbers_htmlcache_stream( "wb") as stream: stream.write(util.to_bytes(doc.getvalue())) return doc
def get_ref_streets(self) -> List[str]: """Gets streets from reference.""" streets: List[str] = [] with self.get_files().get_ref_streets_stream(self.__ctx, "rb") as sock: for line in sock.readlines(): line = line.strip() streets.append(util.from_bytes(line)) return sorted(set(streets))
def sign(priv, message): x, p, q, g = priv r = None for _ in range(5 * q): k = random.randint(2, q-1) r = pow(g, k, p) % q if r != 0: break else: raise RuntimeError('Cannot find a valid r') if isinstance(message, int): message = util.from_bytes(message) elif not isinstance(message, (bytes, bytearray)): raise ValueError('Message should be int or bytes') H = util.from_bytes(hashlib.sha1(message).digest()) s = (util.modinv(k, q) * (H + x*r)) % q return r, s
def challenge41(ciphertext, pub, decrypt_once): C = util.from_bytes(ciphertext) E, N = pub S = random.randint(2, N - 1) C2 = (pow(S, E, N) * C) % N P2 = decrypt_once(C2) P2 = (P2 * rsa.modinv(S, N)) % N return util.to_bytes(P2)
def challenge46(pub, ciphertext, parity_oracle): ciphernum = util.from_bytes(ciphertext) e, n = pub multiplier = pow(2, e, n) result = 0 power = pow(2, n.bit_length()) for _ in range(n.bit_length()): ciphernum = ciphernum * multiplier % n p = parity_oracle(ciphernum) result = 2 * result + p result = n * (result + 1) // power return util.to_bytes(result)
def update_stats(ctx: context.Context, overpass: bool) -> None: """Performs the update of country-level stats.""" # Fetch house numbers for the whole country. info("update_stats: start, updating whole-country csv") query = util.from_bytes( util.get_content( ctx.get_abspath("data/street-housenumbers-hungary.txt"))) statedir = ctx.get_abspath("workdir/stats") os.makedirs(statedir, exist_ok=True) today = time.strftime("%Y-%m-%d") csv_path = os.path.join(statedir, "%s.csv" % today) if overpass: retry = 0 while should_retry(retry): if retry > 0: info("update_stats: try #%s", retry) retry += 1 overpass_sleep(ctx) response, err = overpass_query.overpass_query(ctx, query) if err: info("update_stats: http error: %s", err) continue with open(csv_path, "wb") as stream: stream.write(util.to_bytes(response)) break update_stats_count(ctx, today) update_stats_topusers(ctx, today) update_stats_refcount(ctx, statedir) # Remove old CSV files as they are created daily and each is around 11M. current_time = time.time() for csv in glob.glob(os.path.join(statedir, "*.csv")): creation_time = os.path.getmtime(csv) if (current_time - creation_time) // (24 * 3600) >= 7: os.unlink(csv) info("update_stats: removed old %s", csv) info("update_stats: generating json") json_path = os.path.join(statedir, "stats.json") with ctx.get_file_system().open(json_path, "wb") as stream: stats.generate_json(ctx, statedir, stream) info("update_stats: end")
def get_ref_housenumbers(self) -> Dict[str, List[util.HouseNumber]]: """Gets house numbers from reference, produced by write_ref_housenumbers().""" ret: Dict[str, List[util.HouseNumber]] = {} lines: Dict[str, List[str]] = {} with self.get_files().get_ref_housenumbers_stream(self.ctx, "rb") as sock: for line_bytes in sock.readlines(): line = util.from_bytes(line_bytes) line = line.strip() key, _, value = line.partition("\t") if key not in lines: lines[key] = [] lines[key].append(value) street_ranges = self.get_street_ranges() streets_invalid = self.get_street_invalid() for osm_street in self.get_osm_streets(): osm_street_name = osm_street.get_osm_name() house_numbers: List[util.HouseNumber] = [] ref_street_name = get_ref_street_from_osm_street( self.get_config(), osm_street_name) prefix = ref_street_name + "\t" street_invalid: List[str] = [] if osm_street_name in streets_invalid.keys(): street_invalid = streets_invalid[osm_street_name] # Simplify invalid items by default, so the 42a markup can be used, no matter what # is the value of housenumber-letters. street_invalid = self.__normalize_invalids( osm_street_name, street_invalid) if ref_street_name in lines.keys(): for line in lines[ref_street_name]: house_number = line.replace(prefix, '') normalized = normalize(self, house_number, osm_street_name, street_ranges) normalized = \ [i for i in normalized if not util.HouseNumber.is_invalid(i.get_number(), street_invalid)] house_numbers += normalized ret[osm_street_name] = util.sort_numerically(set(house_numbers)) return ret
def get_additional_housenumbers_html( ctx: context.Context, relation: areas.Relation) -> yattag.doc.Doc: """Gets the cached HTML of the additional housenumbers for a relation.""" doc = yattag.doc.Doc() if is_additional_housenumbers_html_cached(ctx, relation): with relation.get_files().get_additional_housenumbers_htmlcache_stream( "rb") as stream: doc.asis(util.from_bytes(stream.read())) return doc ret = relation.write_additional_housenumbers() todo_street_count, todo_count, table = ret with doc.tag("p"): doc.text( tr("OpenStreetMap additionally has the below {0} house numbers for {1} streets." ).format(str(todo_count), str(todo_street_count))) doc.stag("br") with doc.tag( "a", href="https://github.com/vmiklos/osm-gimmisn/tree/master/doc"): doc.text(tr("Filter incorrect information")) doc.asis(util.html_table_from_list(table).getvalue()) doc.asis( util.invalid_refstreets_to_html( relation.get_invalid_refstreets()).getvalue()) doc.asis( util.invalid_filter_keys_to_html( relation.get_invalid_filter_keys()).getvalue()) with relation.get_files().get_additional_housenumbers_htmlcache_stream( "wb") as stream: stream.write(util.to_bytes(doc.getvalue())) return doc
def challenge42(pub, message): md = hashlib.sha1(message).digest() md = b'\x00\x01\xff\x00' + md + (b'\x00' * (124 - len(md))) e = pub[0] return util.to_bytes(util.nth_root(util.from_bytes(md), e) + 1)
def main(argv): ports = list(serial.tools.list_ports.comports()) for p in ports: print p s = serial.Serial("/dev/cu.SLAB_USBtoUART", 115200, parity=serial.PARITY_EVEN) print ('bytesize:' + str(s.bytesize)) s.flushInput() s.flushOutput() waves = [] # 0x11 -> control mux (0x02 output wave to target row, 0x03 connect ground to target row, 0x01 connect adc to target row) # 0x23 -> read wave (40000byte, 2 byte is int_16) # s.write([0x11, 0x02, 0x01, 0x01]) # print (s.read()) # s.write([0x23]) # v = from_bytes(s.read(size=2)) # Vvalue = float((v-2048)*0.00244140625) try: # print(s.readline()) sum_value = 0; pin_list = [0, 8] for i in pin_list: s.write([0x11, 0x01, chr(i)]) result = s.read(size=2) for j in pin_list: if i == j: continue s.write([0x11, 0x02, chr(j)]) result = s.read(size=2) for k in pin_list: print('%d, %d, %d' % (i, j, k)) if j == k: continue s.write([0x11, 0x00, chr(k)]) result = s.read(size=2) s.flushInput() s.flushOutput() wave = [] for count in range(1): s.write([0x23]) for byte_n in xrange(2000): #print('in_waiting:' + str(s.in_waiting)) data = s.read(size=2) v = from_bytes(data) Vvalue = float((v-2048)*0.00244140625) wave.append(Vvalue) #print(byte_n) data = { 'wave': wave, 'rate': 80000, 'volt': 1, } plt.plot(wave) plt.show() print(ct.two_pin_discriminator(data)) print(ct.caculate_C(125, data, 1)) #plt.plot(wave) # plt.ylabel('some numbers') # plt.show() #plt.show() # print wave waves.append(wave) with open("result.json", 'wb') as output: output.write(json.dumps(waves, indent=4)) # # print(((num-2048)*0.00244140625)) # # data = s.read(size=2) # # result=struct.pack('>h', data) # # print(s.read(size=2)) # print ('sum' + str(sum_value/100)) # s.close() except Exception as e: print e pass