def sign(self, message, pvt_key): """ Generate a cryptographic signature for the supplied message using a LMOTS private key. Algorithm 3: Generating a Signature from a Private Key and a Message as defined in Hash-Based Signatures draft RFC. :param message: message bytes :param pvt_key: LMOTS private key object :return: LMOTS signature object """ if not isinstance(pvt_key, LmotsPrivateKey): raise ValueError("pvt_key must be of type LmotsPrivateKey") if pvt_key.signatures_remaining != 1: raise ValueError("private key has no signature operations remaining") c = self._entropy_source.read(self.lmots_type.n) hash_q = digest(self.lmots_type.hash_alg, pvt_key.s + c + message + D_MESG) v = hash_q + Merkle.checksum(hash_q, self.lmots_type.w, self.lmots_type.ls) y = list() for i, x in enumerate(pvt_key.raw_key): tmp = x for j in xrange(0, Merkle.coef(v, i, self.lmots_type.w)): tmp = digest(self.lmots_type.hash_alg, pvt_key.s + tmp + u16str(i) + u8str(j) + D_ITER) y.append(tmp) pvt_key.signatures_remaining = 0 lmots_sig = LmotsSignature(c, y, pvt_key.lmots_type) return LmotsSerializer.serialize_signature(lmots_sig)
async def set(self, key, name, value, hash=True): if not check_dht_value_type(value): raise TypeError( "Value must be of type int, float, bool, str, or bytes") log.info("setting '%s' = '%s' on network", key, value) dkey = digest(key) return await self.set_digest(dkey, key, name, value, hash)
def __init__(self, path, gerber_lp=None): self._gerber_lp = gerber_lp self._original = path digest = utils.digest(path) self._record = config.pth.get(digest) self._svgGrammar = self._makeSVGGrammar() if self._record == None: self._original_parsed = self._svgGrammar.parseString(self._original) self._original_parsed = self._parseResultsToList(self._original_parsed) self._first_point = ([self._original_parsed[0][1][0], self._original_parsed[0][1][1]]) self._relative = self._makeRelative(self._original_parsed) self._relative_parsed = self._svgGrammar.parseString(self._relative) self._relative_parsed = self._parseResultsToList(self._relative_parsed) self._width, self._height = self._getDimensions(self._relative_parsed) config.pth[digest] = {} config.pth[digest]['first-point'] = self._first_point config.pth[digest]['relative'] = self._relative config.pth[digest]['relative-parsed'] = self._relative_parsed config.pth[digest]['width'] = self._width config.pth[digest]['height'] = self._height self._record = config.pth[digest] else: self._first_point = self._record['first-point'] self._relative = self._record['relative'] self._relative_parsed = self._record['relative-parsed'] self._width = self._record['width'] self._height = self._record['height']
def upload(): file = request.files['file'] if not file.filename: flash(u'Не выбран файл') return redirect('/') if not file.filename.endswith('.xml'): flash(u'Неподдерживаемый формат файла') return redirect('/') token = '' tmp_file_args = { 'prefix': str(os.getpid()), 'suffix': '.xml', 'delete': False, 'dir': '/tmp' } with tempfile.NamedTemporaryFile(**tmp_file_args) as pers_file: file.save(pers_file) ProcessFileTask().delay(filepath=pers_file.name, original_filename=file.filename) token = digest(pers_file.name) return redirect(url_for('uploaded', token=token, _external=True))
def on_failure(self, exc, task_id, args, kwargs, einfo): token = digest(kwargs['filepath']) sr.hset(token, 'status', 'failure') try: os.remove(kwargs['filepath']) except OSError: pass
def on_success(self, retval, task_id, args, kwargs): token = digest(kwargs['filepath']) sr.hset(token, 'status', 'complete') try: os.remove(kwargs['filepath']) except OSError: pass
def __init__(self, ksize=20, alpha=3, node_id=None, storage=None): self.ksize = ksize self.alpha = alpha self.storage = (storage or ForgetfulStorage()) self.node = Node(node_id or digest(random.getrandbits(255))) self.transport = None self.protocol = None self.refresh_loop = None self.save_state_loop = None
def set(self, dkey, key, name, value, hash=True): if dkey in self.data_tag: s = pickle.loads(self.data_tag[dkey][1]) if hash: s.add(digest(value)) else: s.add(value) self.data_tag[dkey] = (time.monotonic(), pickle.dumps(s)) else: s = set() if hash: s.add(digest(value)) else: s.add(value) self.data_tag[dkey] = (time.monotonic(), pickle.dumps(s)) dvalue = value if hash: dvalue = digest(value) self.set_file(dvalue, value, key, name) self.cull()
def _setWithTimestamp(self, existingValue, key, value, requestedTimeStamp, encryptionKey, ttl): """ Sends the command to store the key/value pair on all required nodes. :param existingValue: The current (value,timestamp) associated with the key, if one exists. :param key: The key to store the value under. :param value: The value to store. :param requestedTimeStamp: An explicit timestamp if desired, if None the existing timestamp will be incremented by one. """ if requestedTimeStamp is None: if existingValue: existingTimestamp = decodeTimestamp(value[1], encryptionKey) if not existingTimestamp: return defer.succeed(False) timestamp = str(existingTimestamp + random.randint(1, 100)) #timestamp = existingValue[1] + 1 else: timestamp = random.randint(0, 1000) self.log.debug( "setting '%s' = '%s' on network with automatic timestamp '%s'" % (key, value, timestamp)) else: timestamp = requestedTimeStamp self.log.debug( "setting '%s' = '%s' on network with explicit timestamp '%s'" % (key, value, timestamp)) dkey = digest(key) def store(nodes): self.log.info("setting '%s' on %s" % (key, map(str, nodes))) ds = [ self.protocol.callStore(n, dkey, [ value, encodeTimestamp(str(timestamp), encryptionKey), encryptionKey, ttl, timestamp ]) for n in nodes ] return defer.DeferredList(ds).addCallback(self._anyRespondSuccess) node = Node(dkey) nearest = self.protocol.router.findNeighbors(node) self.log.debug("Found %s neighbours to store values at" % str(nearest)) if len(nearest) == 0: self.log.warning("There are no known neighbors to set key %s" % key) return defer.succeed(False) spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find().addCallback(store)
async def get(self, key, hash=False): log.info("Looking up key %s", key) dkey = key if hash: dkey = digest(key) node = Node(dkey) nearest = self.protocol.router.find_neighbors(node) if not nearest: log.warning("There are no known neighbors to get key %s", key) return None spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return await spider.find()
def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, link=None, link_error=None, **options): token = digest(kwargs['filepath']) sr.hmset(token, { 'filename': kwargs['original_filename'], 'status': 'wait', 'items_cnt': 0, 'processed_items': 0, }) super(ProcessFileTask, self).apply_async(args, kwargs, task_id, producer, link, link_error, **options)
def extract_public_key(self, signature, s, message): """ Extracts a LMOTS public key object from a LMOTS signature and the s value. :param signature: LMOTS signature :param s: entropy s value :param message: original message :return: LMOTS public key object """ lmots_sig = LmotsSerializer.deserialize_signature(signature) if lmots_sig.lmots_type != self.lmots_type: raise ValueError("signature type code does not match expected value") hash_q = digest(self.lmots_type.hash_alg, s + lmots_sig.c + message + D_MESG) v = hash_q + Merkle.checksum(hash_q, self.lmots_type.w, self.lmots_type.ls) outer_hash = create_digest(self.lmots_type.hash_alg) outer_hash.update(s) for i, y in enumerate(lmots_sig.y): tmp = y for j in xrange(Merkle.coef(v, i, self.lmots_type.w), 2 ** self.lmots_type.w - 1): tmp = digest(self.lmots_type.hash_alg, s + tmp + u16str(i) + u8str(j) + D_ITER) outer_hash.update(tmp) outer_hash.update(D_PBLC) return LmotsPublicKey(s=s, k=outer_hash.digest(), lmots_type=self.lmots_type)
def run(self, filepath, original_filename): token = digest(filepath) with open(filepath, 'rt') as xml_file: tree = etree.parse(xml_file) root = tree.getroot() sr.hset(token, 'items_cnt', len(root)) sr.hset(token, 'status', 'processing') for item in root.iterchildren(): fields_cnt = len(item) filled_fields_cnt = sum(imap(lambda field: bool(field.text), item)) item_filling_percentage = filled_fields_cnt / float(fields_cnt) * 100 sr.hincrby(token, 'processed_items') sr.lpush(token + 'items_filling_percentages', item_filling_percentage)
def welcome_if_new(self, node): if not self.router.is_new_node(node): return log.info("never seen %s before, adding to router", node) for key, value in self.storage: keynode = Node(digest(key)) neighbors = self.router.find_neighbors(keynode) if neighbors: last = neighbors[-1].distance_to(keynode) new_node_close = node.distance_to(keynode) < last first = neighbors[0].distance_to(keynode) this_closest = self.source_node.distance_to(keynode) < first if not neighbors or (new_node_close and this_closest): asyncio.ensure_future(self.call_store(node, key, value)) self.router.add_contact(node)
def upload(self, tmp, last=False): # type: (str, bool) -> None enc = self.__cipher.encrypt( self.__cipher.pad(tmp) if last else tmp) self.__dig = digest(self.__dig + ":" + enc) res = False for _ in range(self.__retry): if _session.post_code('raw', json={ 'id': self.__info_id, 'content': enc, 'hash': self.__dig, 'last': last }) in _session.res_ok: res = True break if not res: raise RuntimeError('inconsistency in the chunk sequence')
def get(self, key): """ Get a key if the network has it. Returns: :class:`None` if not found, the value otherwise. """ self.log.debug("Finding value at %s" % key) node = Node(digest(key)) nearest = self.protocol.router.findNeighbors(node) if len(nearest) == 0: self.log.warning("There are no known neighbors to get key %s" % key) return defer.succeed(None) spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) return spider.find()
def run(self, filepath, original_filename): token = digest(filepath) with open(filepath, 'rt') as xml_file: tree = etree.parse(xml_file) root = tree.getroot() sr.hset(token, 'items_cnt', len(root)) sr.hset(token, 'status', 'processing') for item in root.iterchildren(): fields_cnt = len(item) filled_fields_cnt = sum( imap(lambda field: bool(field.text), item)) item_filling_percentage = filled_fields_cnt / float( fields_cnt) * 100 sr.hincrby(token, 'processed_items') sr.lpush(token + 'items_filling_percentages', item_filling_percentage)
def __init__(self, ksize=20, alpha=3, id=None, storage=None): """ Create a server instance. This will start listening on the given port. Args: ksize (int): The k parameter from the paper alpha (int): The alpha parameter from the paper id: The id for this node on the network. storage: An instance that implements :interface:`~kademlia.storage.IStorage` """ self.ksize = ksize self.alpha = alpha self.log = Logger(system=self) self.storage = storage or ForgetfulStorage() self.node = Node(id or digest(random.getrandbits(255))) self.protocol = KademliaProtocol(self.node, self.storage, ksize) self.refreshLoop = LoopingCall(self.refreshTable).start(3600) self.ttlLoop = LoopingCall(self.countTTL).start(60, now=False)
async def delete_tag(self, key, value): dkey = digest(key) node = Node(dkey) nearest = self.protocol.router.find_neighbors(node) if not nearest: log.warning("There are no known neighbors to set key %s", key) return None spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) nodes = await spider.find() log.info("setting '%s' on %s", dkey.hex(), list(map(str, nodes))) results = [ self.protocol.call_delete_tag(n, dkey, key, value) for n in nodes ] # return true only if at least one store call succeeded return any(await asyncio.gather(*results))
def generate_private_key(self, s, seed=None): """ Generate a LMOTS private key. In most cases, a key-pair is generated by calling the generate_key_pair method. Since LMOTS public keys can be derived from the private key later, it is valid to simply create the private key only and compute the public key at a different time. In that use-case the generate_private_key method can be called directly. Algorithm 0 as defined in Hash-Based Signatures draft RFC. :param s: entropy s value :param seed: seed value; if None then random bytes read from entropy source :return: LMOTS private key object """ raw_key = list() if seed is None: for i in xrange(0, self.lmots_type.p): raw_key.append(self._entropy_source.read(self.lmots_type.n)) else: for i in xrange(0, self.lmots_type.p): raw_key.append(digest(self.lmots_type.hash_alg, s + seed + u16str(i + 1) + D_PRG)) return LmotsPrivateKey(lmots_type=self.lmots_type, raw_key=raw_key, s=s, seed=seed, signatures_remaining=1)
def apply_async(self, args=None, kwargs=None, task_id=None, producer=None, link=None, link_error=None, **options): token = digest(kwargs['filepath']) sr.hmset( token, { 'filename': kwargs['original_filename'], 'status': 'wait', 'items_cnt': 0, 'processed_items': 0, }) super(ProcessFileTask, self).apply_async(args, kwargs, task_id, producer, link, link_error, **options)
def generate_public_key(self, s, pvt_key): """ Generate LMOTS public key from a private key. In most cases, a key-pair is generated by calling the generate_key_pair method. Alternatively the public key can be derived from the private key at any time. Algorithm 1 as defined in Hash-Based Signatures draft RFC. :param s: entropy s value :param pvt_key: LMOTS private key object :return: LMOTS public key object """ if not isinstance(pvt_key, LmotsPrivateKey): raise ValueError("pvt_key must be of type LmotsPrivateKey") outer_hash = create_digest(self.lmots_type.hash_alg) outer_hash.update(s) for i, pvt_key in enumerate(pvt_key.raw_key): tmp = pvt_key for j in xrange(0, 2 ** self.lmots_type.w - 1): tmp = digest(self.lmots_type.hash_alg, s + tmp + u16str(i) + u8str(j) + D_ITER) outer_hash.update(tmp) outer_hash.update(D_PBLC) return LmotsPublicKey(s=s, k=outer_hash.digest(), lmots_type=self.lmots_type)
async def delete(self, key, hash=True): dkey = key if hash: dkey = digest(key) """ if self.storage.get(dkey) is not None: # delete the key from here self.storage.delete(dkey) """ # also delete the key from neighbors node = Node(dkey) nearest = self.protocol.router.find_neighbors(node) if not nearest: log.warning("There are no known neighbors to get key %s", key) return None spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha) nodes = await spider.find() results = [self.protocol.call_delete(n, dkey) for n in nodes] # return true only if at least one delete call succeeded return any(await asyncio.gather(*results))
def extractRouting(svg_in): """ Extracts routing from the the 'routing' SVG layers of each PCB layer. Inkscape SVG layers for each PCB ('top', 'bottom', etc.) layer. """ # Open the routing file if it exists. The existing data is used # for stats displayed as PCBmodE is run. The file is then # overwritten. output_file = os.path.join(config.cfg['base-dir'], config.cfg['name'] + '_routing.json') try: routing_dict_old = utils.dictFromJsonFile(output_file, False) except: routing_dict_old = {'routes': {}} #--------------- # Extract routes #--------------- # Store extracted data here routing_dict = {} # The XPATH expression for extracting routes, but not vias xpath_expr = "//svg:g[@pcbmode:pcb-layer='%s']//svg:g[@pcbmode:sheet='routing']//svg:path[(@d) and not (@pcbmode:type='via')]" routes_dict = {} for pcb_layer in config.stk['layer-names']: routes = svg_in.xpath(xpath_expr % pcb_layer, namespaces={'pcbmode':config.cfg['ns']['pcbmode'], 'svg':config.cfg['ns']['svg']}) for route in routes: route_dict = {} route_id = route.get('{'+config.cfg['ns']['pcbmode']+'}id') path = route.get('d') style_text = route.get('style') or '' # This hash digest provides a unique identifier for # the route based on its path, location, and style digest = utils.digest(path+ #str(location.x)+ #str(location.y)+ style_text) try: routes_dict[pcb_layer][digest] = {} except: routes_dict[pcb_layer] = {} routes_dict[pcb_layer][digest] = {} routes_dict[pcb_layer][digest]['type'] = 'path' routes_dict[pcb_layer][digest]['value'] = path stroke_width = utils.getStyleAttrib(style_text, 'stroke-width') if stroke_width != None: # Sometimes Inkscape will add a 'px' suffix to the stroke-width #property pf a path; this removes it stroke_width = stroke_width.rstrip('px') routes_dict[pcb_layer][digest]['style'] = 'stroke' routes_dict[pcb_layer][digest]['stroke-width'] = round(float(stroke_width), 4) custom_buffer = route.get('{'+config.cfg['ns']['pcbmode']+'}buffer-to-pour') if custom_buffer != None: routes_dict[pcb_layer][digest]['buffer-to-pour'] = float(custom_buffer) gerber_lp = route.get('{'+config.cfg['ns']['pcbmode']+'}gerber-lp') if gerber_lp != None: routes_dict[pcb_layer][digest]['gerber-lp'] = gerber_lp routing_dict['routes'] = routes_dict # Create simple stats and display them total = 0 total_old = 0 new = 0 existing = 0 for pcb_layer in config.stk['layer-names']: try: total += len(routing_dict['routes'][pcb_layer]) except: pass try: new_dict = routing_dict['routes'][pcb_layer] except: new_dict = {} try: old_dict = routing_dict_old['routes'][pcb_layer] except: old_dict = {} for key in new_dict: if key not in old_dict: new += 1 else: existing += 1 for pcb_layer in config.stk['layer-names']: total_old += len(old_dict) message = "Extracted %s routes; %s new (or modified), %s existing" % (total, new, existing) if total_old > total: message += ", %s removed" % (total_old - total) msg.subInfo(message) #------------------------------- # Extract vias #------------------------------- xpath_expr_place = '//svg:g[@pcbmode:pcb-layer="%s"]//svg:g[@pcbmode:sheet="placement"]//svg:g[@pcbmode:type="via"]' vias_dict = {} for pcb_layer in config.stk['surface-layer-names']: # Find all markers markers = svg_in.findall(xpath_expr_place % pcb_layer, namespaces={'pcbmode':config.cfg['ns']['pcbmode'], 'svg':config.cfg['ns']['svg']}) for marker in markers: transform_data = utils.parseTransform(marker.get('transform')) location = transform_data['location'] # Invert 'y' coordinate location.y *= config.cfg['invert-y'] # Change component rotation if needed if transform_data['type'] == 'matrix': rotate = transform_data['rotate'] rotate = utils.niceFloat((rotate) % 360) digest = utils.digest("%s%s" % (location.x, location.y)) # Define a via, just like any other component, but disable # placement of refdef vias_dict[digest] = {} vias_dict[digest]['footprint'] = marker.get('{'+config.cfg['ns']['pcbmode']+'}footprint') vias_dict[digest]['location'] = [utils.niceFloat(location.x), utils.niceFloat(location.y)] vias_dict[digest]['layer'] = 'top' routing_dict['vias'] = vias_dict # Display stats if len(vias_dict) == 0: msg.subInfo("No vias found") elif len(vias_dict) == 1: msg.subInfo("Extracted 1 via") else: msg.subInfo("Extracted %s vias" % (len(vias_dict))) # Save extracted routing into routing file try: with open(output_file, 'wb') as f: f.write(json.dumps(routing_dict, sort_keys=True, indent=2)) except: msg.error("Cannot save file %s" % output_file) return
def transform(self, scale=1, rotate_angle=0, rotate_point=Point(), mirror=False, center=True): """ Transforms a path """ path = self._relative_parsed string = "%s%s%s%s%s%s" % (path,scale,rotate_angle,rotate_point,mirror,center) digest = utils.digest(string) record = self._record.get(digest) if record != None: self._transformed = record['path'] self._transformed_mirrored = record['mirrored'] self._width = record['width'] self._height = record['height'] else: width, height = self._getDimensions(path) #width = self._width #height = self._height # first point of path first_point = Point(path[0][1][0], path[0][1][1]) if center is True: # center point of path origin_point = Point(self._bbox_top_left.x+width/2, self._bbox_top_left.y-height/2) # caluclate what's the new starting point of path based on the new origin new_first_point = Point(first_point.x - origin_point.x, first_point.y - origin_point.y) else: new_first_point = Point(first_point.x, first_point.y) new_first_point.rotate(rotate_angle, rotate_point) new_first_point.mult(scale) new_p = "m %f,%f " % (new_first_point.x, new_first_point.y) tmpp = Point() origin = Point() for n in range(0, len(path)): if path[n][0] == 'm' and n == 0: for m in range(2, len(path[n])): tmpp.assign(path[n][m][0], path[n][m][1]) tmpp.rotate(rotate_angle, rotate_point) tmpp.mult(scale) new_p += str(tmpp.x) + "," + str(tmpp.y) + " " else: if path[n][0] == 'h' or path[n][0] == 'v': new_p += "l " else: new_p += path[n][0] + " " for m in range(1, len(path[n])): if path[n][0] == 'h': tmpp.assign(path[n][m][0], 0) elif path[n][0] == 'v': tmpp.assign(0, path[n][m][0]) else: tmpp.assign(path[n][m][0], path[n][m][1]) tmpp.rotate(rotate_angle, rotate_point) tmpp.mult(scale) new_p += str(tmpp.x) + "," + str(tmpp.y) + " " self._transformed = new_p parsed = self._svgGrammar.parseString(new_p) self._transformed_mirrored = self._mirrorHorizontally(parsed) width, height = self._getDimensions(parsed) self._width = width self._height = height self._record[digest] = {} self._record[digest]['path'] = self._transformed self._record[digest]['mirrored'] = self._transformed_mirrored self._record[digest]['width'] = self._width self._record[digest]['height'] = self._height return
def extractRouting(svg_in): """ Extracts routing from the the 'routing' SVG layers of each PCB layer. Inkscape SVG layers for each PCB ('top', 'bottom', etc.) layer. """ # Open the routing file if it exists. The existing data is used # for stats displayed as PCBmodE is run. The file is then # overwritten. output_file = os.path.join(config.cfg['base-dir'], config.cfg['name'] + '_routing.json') try: routing_dict_old = utils.dictFromJsonFile(output_file, False) except: routing_dict_old = {'routes': {}} #--------------- # Extract routes #--------------- # Store extracted data here routing_dict = {} # The XPATH expression for extracting routes, but not vias xpath_expr = "//svg:g[@pcbmode:pcb-layer='%s']//svg:g[@pcbmode:sheet='routing']//svg:path[(@d) and not (@pcbmode:type='via')]" routes_dict = {'top': {}, 'bottom': {}} for pcb_layer in utils.getSurfaceLayers(): routes = svg_in.xpath(xpath_expr % pcb_layer, namespaces={'pcbmode':config.cfg['ns']['pcbmode'], 'svg':config.cfg['ns']['svg']}) for route in routes: route_dict = {} route_id = route.get('{'+config.cfg['ns']['pcbmode']+'}id') path = route.get('d') style_text = route.get('style') or '' # This hash digest provides a unique identifier for # the route based on its path, location, and style digest = utils.digest(path+ #str(location.x)+ #str(location.y)+ style_text) routes_dict[pcb_layer][digest] = {} routes_dict[pcb_layer][digest]['type'] = 'path' routes_dict[pcb_layer][digest]['value'] = path stroke_width = utils.getStyleAttrib(style_text, 'stroke-width') if stroke_width != None: routes_dict[pcb_layer][digest]['style'] = 'stroke' routes_dict[pcb_layer][digest]['stroke-width'] = round(float(stroke_width), 4) custom_buffer = route.get('{'+config.cfg['ns']['pcbmode']+'}buffer-to-pour') if custom_buffer != None: routes_dict[pcb_layer][digest]['buffer-to-pour'] = float(custom_buffer) gerber_lp = route.get('{'+config.cfg['ns']['pcbmode']+'}gerber-lp') if gerber_lp != None: routes_dict[pcb_layer][digest]['gerber-lp'] = gerber_lp routing_dict['routes'] = routes_dict # Create simple stats and display them total = 0 total_old = 0 new = 0 existing = 0 for pcb_layer in utils.getSurfaceLayers(): try: total += len(routing_dict['routes'][pcb_layer]) except: pass try: new_dict = routing_dict['routes'][pcb_layer] except: new_dict = {} try: old_dict = routing_dict_old['routes'][pcb_layer] except: old_dict = {} for key in new_dict: if key not in old_dict: new += 1 else: existing += 1 for pcb_layer in utils.getSurfaceLayers(): total_old += len(old_dict) message = "Extracted %s routes; %s new (or modified), %s existing" % (total, new, existing) if total_old > total: message += ", %s removed" % (total_old - total) msg.subInfo(message) #------------- # Extract vias #------------- # XPATH expression for extracting vias xpath_expr = "//svg:g[@pcbmode:pcb-layer='%s']//svg:g[@pcbmode:sheet='routing']//svg:path[@pcbmode:type='via']" # Get new vias; only search the top layer new_vias = svg_in.xpath(xpath_expr % 'top', namespaces={'pcbmode':config.cfg['ns']['pcbmode'], 'svg':config.cfg['ns']['svg']}) # XPATH expression for extracting vias xpath_expr = "//svg:g[@pcbmode:pcb-layer='%s']//svg:g[@pcbmode:sheet='pads']//svg:g[@pcbmode:type='via']" # Get new vias; only search the top layer vias = svg_in.xpath(xpath_expr % 'top', namespaces={'pcbmode':config.cfg['ns']['pcbmode'], 'svg':config.cfg['ns']['svg']}) vias_dict = {} for via in vias: transform = via.get('transform') if transform != None: transform_data = utils.parseTransform(transform) location = transform_data['location'] else: location = Point() # Invery 'y' axis if needed location.y *= config.cfg['invert-y'] digest = utils.digest("%s%s" % (location.x, location.y)) # Define a via, just like any other component, but disable # placement of refdef vias_dict[digest] = {} vias_dict[digest]['footprint'] = via.get('{'+config.cfg['ns']['pcbmode']+'}via') vias_dict[digest]['location'] = [location.x, location.y] vias_dict[digest]['silkscreen'] = {'refdef': {'show': False }} vias_dict[digest]['assembly'] = {'refdef': {'show': False }} vias_dict[digest]['layer'] = 'top' for via in new_vias: # A newly-defined via will have a location set through the # 'sodipodi' namespace and possible also through a transform try: sodipodi_loc = Point(via.get('{'+config.cfg['ns']['sodipodi']+'}cx'), via.get('{'+config.cfg['ns']['sodipodi']+'}cy')) except: sodipodi_loc = Pound() transform = via.get('transform') if transform != None: transform_data = utils.parseTransform(transform) location = transform_data['location'] else: location = Point() location += sodipodi_loc # Invery 'y' axis if needed location.y *= config.cfg['invert-y'] digest = utils.digest("%s%s" % (location.x, location.y)) # Define a via, just like any other component, but disable # placement of refdef vias_dict[digest] = {} vias_dict[digest]['footprint'] = via.get('{'+config.cfg['ns']['pcbmode']+'}via') vias_dict[digest]['location'] = [location.x, location.y] vias_dict[digest]['silkscreen'] = {'refdef': {'show': False }} vias_dict[digest]['assembly'] = {'refdef': {'show': False }} vias_dict[digest]['layer'] = 'top' routing_dict['vias'] = vias_dict # Display stats if len(vias_dict) == 0: msg.subInfo("No vias found") elif len(vias_dict) == 1: msg.subInfo("Extracted 1 via") else: msg.subInfo("Extracted %s vias" % (len(vias_dict))) # Save extracted routing into routing file try: with open(output_file, 'wb') as f: f.write(json.dumps(routing_dict, sort_keys=True, indent=2)) except: msg.error("Cannot save file %s" % output_file) return
def extractRouting(svg_in): """ Extracts routing from the the 'routing' SVG layers of each PCB layer. Inkscape SVG layers for each PCB ('top', 'bottom', etc.) layer. """ # Open the routing file if it exists. The existing data is used # for stats displayed as PCBmodE is run. The file is then # overwritten. output_file = os.path.join(config.cfg['base-dir'], config.cfg['name'] + '_routing.json') try: routing_dict_old = utils.dictFromJsonFile(output_file, False) except: routing_dict_old = {'routes': {}} #--------------- # Extract routes #--------------- # Store extracted data here routing_dict = {} # The XPATH expression for extracting routes, but not vias xpath_expr = "//svg:g[@pcbmode:pcb-layer='%s']//svg:g[@pcbmode:sheet='routing']//svg:path[(@d) and not (@pcbmode:type='via')]" routes_dict = {'top': {}, 'bottom': {}} for pcb_layer in utils.getSurfaceLayers(): routes = svg_in.xpath(xpath_expr % pcb_layer, namespaces={ 'pcbmode': config.cfg['ns']['pcbmode'], 'svg': config.cfg['ns']['svg'] }) for route in routes: route_dict = {} route_id = route.get('{' + config.cfg['ns']['pcbmode'] + '}id') path = route.get('d') style_text = route.get('style') or '' # This hash digest provides a unique identifier for # the route based on its path, location, and style digest = utils.digest(path + #str(location.x)+ #str(location.y)+ style_text) routes_dict[pcb_layer][digest] = {} routes_dict[pcb_layer][digest]['type'] = 'path' routes_dict[pcb_layer][digest]['value'] = path stroke_width = utils.getStyleAttrib(style_text, 'stroke-width') if stroke_width != None: # Sometimes Inkscape will add a 'px' suffix to the stroke-width #property pf a path; this removes it stroke_width = stroke_width.rstrip('px') routes_dict[pcb_layer][digest]['style'] = 'stroke' routes_dict[pcb_layer][digest]['stroke-width'] = round( float(stroke_width), 4) custom_buffer = route.get('{' + config.cfg['ns']['pcbmode'] + '}buffer-to-pour') if custom_buffer != None: routes_dict[pcb_layer][digest]['buffer-to-pour'] = float( custom_buffer) gerber_lp = route.get('{' + config.cfg['ns']['pcbmode'] + '}gerber-lp') if gerber_lp != None: routes_dict[pcb_layer][digest]['gerber-lp'] = gerber_lp routing_dict['routes'] = routes_dict # Create simple stats and display them total = 0 total_old = 0 new = 0 existing = 0 for pcb_layer in utils.getSurfaceLayers(): try: total += len(routing_dict['routes'][pcb_layer]) except: pass try: new_dict = routing_dict['routes'][pcb_layer] except: new_dict = {} try: old_dict = routing_dict_old['routes'][pcb_layer] except: old_dict = {} for key in new_dict: if key not in old_dict: new += 1 else: existing += 1 for pcb_layer in utils.getSurfaceLayers(): total_old += len(old_dict) message = "Extracted %s routes; %s new (or modified), %s existing" % ( total, new, existing) if total_old > total: message += ", %s removed" % (total_old - total) msg.subInfo(message) #------------- # Extract vias #------------- # XPATH expression for extracting vias xpath_expr = "//svg:g[@pcbmode:pcb-layer='%s']//svg:g[@pcbmode:sheet='routing']//svg:*[@pcbmode:type='via']" # Get new vias; only search the top layer new_vias = svg_in.xpath(xpath_expr % 'top', namespaces={ 'pcbmode': config.cfg['ns']['pcbmode'], 'svg': config.cfg['ns']['svg'] }) # XPATH expression for extracting vias xpath_expr = "//svg:g[@pcbmode:pcb-layer='%s']//svg:g[@pcbmode:sheet='pads']//svg:g[@pcbmode:type='via']" # Get nexisting vias; only search the top layer vias = svg_in.xpath(xpath_expr % 'top', namespaces={ 'pcbmode': config.cfg['ns']['pcbmode'], 'svg': config.cfg['ns']['svg'] }) vias_dict = {} for via in vias: transform = via.get('transform') if transform != None: transform_data = utils.parseTransform(transform) location = transform_data['location'] else: location = Point() # Invery 'y' axis if needed location.y *= config.cfg['invert-y'] digest = utils.digest("%s%s" % (location.x, location.y)) # Define a via, just like any other component, but disable # placement of refdef vias_dict[digest] = {} vias_dict[digest]['footprint'] = via.get('{' + config.cfg['ns']['pcbmode'] + '}via') vias_dict[digest]['location'] = [location.x, location.y] vias_dict[digest]['silkscreen'] = {'refdef': {'show': False}} vias_dict[digest]['assembly'] = {'refdef': {'show': False}} vias_dict[digest]['layer'] = 'top' for via in new_vias: # A newly-defined via will have a location set through the # 'sodipodi' namespace and possible also through a transform try: # The commented lines below wored fro Inkscape prior to 0.91 #sodipodi_loc = Point(via.get('{'+config.cfg['ns']['sodipodi']+'}cx'), # via.get('{'+config.cfg['ns']['sodipodi']+'}cy')) sodipodi_loc = Point(via.get('cx'), via.get('cy')) except: sodipodi_loc = Point() print sodipodi_loc transform = via.get('transform') if transform != None: transform_data = utils.parseTransform(transform) location = transform_data['location'] else: location = Point() location += sodipodi_loc # Invery 'y' axis if needed location.y *= config.cfg['invert-y'] digest = utils.digest("%s%s" % (location.x, location.y)) # Define a via, just like any other component, but disable # placement of refdef vias_dict[digest] = {} vias_dict[digest]['footprint'] = via.get('{' + config.cfg['ns']['pcbmode'] + '}via') vias_dict[digest]['location'] = [location.x, location.y] vias_dict[digest]['silkscreen'] = {'refdef': {'show': False}} vias_dict[digest]['assembly'] = {'refdef': {'show': False}} vias_dict[digest]['layer'] = 'top' routing_dict['vias'] = vias_dict # Display stats if len(vias_dict) == 0: msg.subInfo("No vias found") elif len(vias_dict) == 1: msg.subInfo("Extracted 1 via") else: msg.subInfo("Extracted %s vias" % (len(vias_dict))) # Save extracted routing into routing file try: with open(output_file, 'wb') as f: f.write(json.dumps(routing_dict, sort_keys=True, indent=2)) except: msg.error("Cannot save file %s" % output_file) return
def extractRouting(svg_in): """ Extracts routing from the the 'routing' SVG layers of each PCB layer. Inkscape SVG layers for each PCB ('top', 'bottom', etc.) layer. """ # Open the routing file if it exists. The existing data is used # for stats displayed as PCBmodE is run. The file is then # overwritten. output_file = os.path.join(config.cfg['base-dir'], config.cfg['name'] + '_routing.json') try: routing_dict_old = utils.dictFromJsonFile(output_file, False) except: routing_dict_old = {'routes': {}} #--------------- # Extract routes #--------------- # Store extracted data here routing_dict = {} # The XPATH expression for extracting routes, but not vias xpath_expr = "//svg:g[@pcbmode:pcb-layer='%s']//svg:g[@pcbmode:sheet='routing']//svg:path[(@d) and not (@pcbmode:type='via')]" routes_dict = {} for pcb_layer in config.stk['layer-names']: routes = svg_in.xpath(xpath_expr % pcb_layer, namespaces={ 'pcbmode': config.cfg['ns']['pcbmode'], 'svg': config.cfg['ns']['svg'] }) for route in routes: route_dict = {} route_id = route.get('{' + config.cfg['ns']['pcbmode'] + '}id') path = route.get('d') style_text = route.get('style') or '' # This hash digest provides a unique identifier for # the route based on its path, location, and style digest = utils.digest(path + #str(location.x)+ #str(location.y)+ style_text) try: routes_dict[pcb_layer][digest] = {} except: routes_dict[pcb_layer] = {} routes_dict[pcb_layer][digest] = {} routes_dict[pcb_layer][digest]['type'] = 'path' routes_dict[pcb_layer][digest]['value'] = path stroke_width = utils.getStyleAttrib(style_text, 'stroke-width') if stroke_width != None: # Sometimes Inkscape will add a 'px' suffix to the stroke-width #property pf a path; this removes it stroke_width = stroke_width.rstrip('px') routes_dict[pcb_layer][digest]['style'] = 'stroke' routes_dict[pcb_layer][digest]['stroke-width'] = round( float(stroke_width), 4) custom_buffer = route.get('{' + config.cfg['ns']['pcbmode'] + '}buffer-to-pour') if custom_buffer != None: routes_dict[pcb_layer][digest]['buffer-to-pour'] = float( custom_buffer) gerber_lp = route.get('{' + config.cfg['ns']['pcbmode'] + '}gerber-lp') if gerber_lp != None: routes_dict[pcb_layer][digest]['gerber-lp'] = gerber_lp routing_dict['routes'] = routes_dict # Create simple stats and display them total = 0 total_old = 0 new = 0 existing = 0 for pcb_layer in config.stk['layer-names']: try: total += len(routing_dict['routes'][pcb_layer]) except: pass try: new_dict = routing_dict['routes'][pcb_layer] except: new_dict = {} try: old_dict = routing_dict_old['routes'][pcb_layer] except: old_dict = {} for key in new_dict: if key not in old_dict: new += 1 else: existing += 1 for pcb_layer in config.stk['layer-names']: total_old += len(old_dict) message = "Extracted %s routes; %s new (or modified), %s existing" % ( total, new, existing) if total_old > total: message += ", %s removed" % (total_old - total) msg.subInfo(message) #------------------------------- # Extract vias #------------------------------- xpath_expr_place = '//svg:g[@pcbmode:pcb-layer="%s"]//svg:g[@pcbmode:sheet="placement"]//svg:g[@pcbmode:type="via"]' vias_dict = {} for pcb_layer in config.stk['surface-layer-names']: # Find all markers markers = svg_in.findall(xpath_expr_place % pcb_layer, namespaces={ 'pcbmode': config.cfg['ns']['pcbmode'], 'svg': config.cfg['ns']['svg'] }) for marker in markers: transform_data = utils.parseTransform(marker.get('transform')) location = transform_data['location'] # Invert 'y' coordinate location.y *= config.cfg['invert-y'] # Change component rotation if needed if transform_data['type'] == 'matrix': rotate = transform_data['rotate'] rotate = utils.niceFloat((rotate) % 360) digest = utils.digest("%s%s" % (location.x, location.y)) # Define a via, just like any other component, but disable # placement of refdef vias_dict[digest] = {} vias_dict[digest]['footprint'] = marker.get( '{' + config.cfg['ns']['pcbmode'] + '}footprint') vias_dict[digest]['location'] = [ utils.niceFloat(location.x), utils.niceFloat(location.y) ] vias_dict[digest]['layer'] = 'top' routing_dict['vias'] = vias_dict # Display stats if len(vias_dict) == 0: msg.subInfo("No vias found") elif len(vias_dict) == 1: msg.subInfo("Extracted 1 via") else: msg.subInfo("Extracted %s vias" % (len(vias_dict))) # Save extracted routing into routing file try: with open(output_file, 'wb') as f: f.write(json.dumps(routing_dict, sort_keys=True, indent=2)) except: msg.error("Cannot save file %s" % output_file) return
def login(self, message): if self.state != LOGIN: logger.warning("Invalid State") return False logger.info(f"Loging in") data = message.get("data", None) self.state = LOGIN_FINISH status = False # status = False -> if login wasn't a success if AUTH_TYPE == AUTH_MEM: new_otp = base64.b64decode(data["otp"].encode()) current_otp_client = digest(new_otp, "SHA256") message = { "type": "ERROR", "message": "Invalid credentials for logging in" } if self.current_otp == current_otp_client: # success login status = True if self.clear_credentials: logger.info( "Clearing old credentials and saving new ones.") self.current_otp_index = self.new_index + 1 self.current_otp_root = self.new_root new_otp = self.new_otp with open(f"credentials/{self.user_id}_index", "wb") as file: file.write(f"{self.current_otp_index - 1}".encode()) with open(f"credentials/{self.user_id}_root", "wb") as file: file.write(self.current_otp_root) with open(f"credentials/{self.user_id}_otp", "wb") as file: file.write(new_otp) logger.info( "User logged in with success! Credentials updated.") else: logger.info( "User not logged in! Wrong credentials where given.") elif AUTH_TYPE == AUTH_CC: cc_certificate = certificate_object( base64.b64decode(data["certificate"].encode())) signed_nonce = base64.b64decode(data["sign_nonce"].encode()) certificates = load_certificates("cc_certificates/") chain = [] chain_completed = construct_certificate_chain( chain, cc_certificate, certificates) if not chain_completed: error_message = "Couldn't complete the certificate chain" logger.warning(error_message) message = {"type": "ERROR", "message": error_message} status = False else: valid_chain, error_messages = validate_certificate_chain(chain) if not valid_chain: logger.error(error_messages) message = {"type": "ERROR", "message": error_messages} status = False else: status = verify_signature(cc_certificate, signed_nonce, self.nonce) if status: oid = ObjectIdentifier( "2.5.4.5") # oid of citizens card's CI (civil id) self.user_id = cc_certificate.subject.get_attributes_for_oid( oid)[0].value logger.info("User logged in with success") message = {"type": "OK"} # Access verification if status: access_result = self.check_access() if not access_result[0]: logger.warning(access_result[1]) status = False message = {"type": "ERROR", "message": access_result[1]} else: message = {"type": "OK"} logger.info(access_result[1]) self._send(message) return status