def generate_certificate(joining_ia, core_ia, core_sign_priv_key_file, core_cert_file, trc_file): """ """ core_ia_chain = CertificateChain.from_raw(read_file(core_cert_file)) # AS cert is always expired one second before the expiration of the Core AS cert validity = core_ia_chain.core_as_cert.expiration_time - int(time.time()) - 1 comment = "AS Certificate" core_ia_sig_priv_key = base64.b64decode(read_file(core_sign_priv_key_file)) public_key_sign, private_key_sign = generate_sign_keypair() public_key_encr, private_key_encr = generate_enc_keypair() cert = Certificate.from_values( str(joining_ia), str(core_ia), INITIAL_TRC_VERSION, INITIAL_CERT_VERSION, comment, False, validity, public_key_encr, public_key_sign, core_ia_sig_priv_key) sig_priv_key = base64.b64encode(private_key_sign).decode() enc_priv_key = base64.b64encode(private_key_encr).decode() sig_priv_key_raw = base64.b64encode(SigningKey(private_key_sign)._signing_key).decode() joining_ia_chain = CertificateChain([cert, core_ia_chain.core_as_cert]).to_json() trc = open(trc_file).read() master_as_key = base64.b64encode(Random.new().read(16)).decode('utf-8') key_dict = { 'enc_key': enc_priv_key, 'sig_key': sig_priv_key, 'sig_key_raw': sig_priv_key_raw, 'master_as_key': master_as_key, } as_obj = ASCredential(joining_ia_chain, trc, key_dict) return as_obj
def _load_credentials(as_path, isd_as): print("Updating AS%s" % isd_as) # The element to get the credentials from. # We assume that the beacon server exists in every AS configuration. key_dict = {} core_key_dict = {} as_path = os.path.join(PROJECT_ROOT, GEN_PATH, 'ISD%s/AS%s' % (isd_as.isd_str(), isd_as.as_file_fmt())) instance_id = "bs%s-%s-1" % (isd_as.isd_str(), isd_as.as_file_fmt()) instance_path = os.path.join(as_path, instance_id) topo_path = os.path.join(instance_path, TOPO_FILE) # Credential files for all ASes as_key_path = { 'cert_path': get_cert_chain_file_path(instance_path, isd_as, INITIAL_CERT_VERSION), 'trc_path': get_trc_file_path(instance_path, isd_as[0], INITIAL_TRC_VERSION), 'enc_key_path': get_enc_key_file_path(instance_path), 'sig_key_path': get_sig_key_file_path(instance_path), 'sig_key_raw_path': get_sig_key_raw_file_path(instance_path), 'as_config_path': os.path.join(instance_path, AS_CONF_FILE), } # Credential files for core ASes core_key_path = { 'core_sig_key_path': get_core_sig_key_file_path(instance_path), 'core_sig_key_raw_path': get_core_sig_key_raw_file_path(instance_path), 'online_key_path': get_online_key_file_path(instance_path), 'online_key_raw_path': get_online_key_raw_file_path(instance_path), 'offline_key_path': get_offline_key_file_path(instance_path), 'offline_key_raw_path': get_offline_key_raw_file_path(instance_path), } for key, path in as_key_path.items(): try: if key.startswith('cert'): cert = _json_file_to_str(path) elif key.startswith('trc'): trc = _json_file_to_str(path) elif key.startswith('as'): as_config_dict = _yaml_file_to_dict(path) key_dict['master_as_key'] = as_config_dict['MasterASKey'] else: key_name = key[:len(key)-5] key_dict[key_name] = read_file(path)[:-1] except IOError as err: print("IOError({0}): {1}" % (err, path)) exit(1) tp = Topology.from_file(topo_path) if tp.is_core_as: for key, path in core_key_path.items(): try: key_name = key[:len(key)-5] core_key_dict[key_name] = read_file(path)[:-1] except IOError as err: print("IOError({0}): {1}" % (err, path)) exit(1) return ASCredential(cert, trc, key_dict, core_key_dict)
def _create_update_as(as_path, isd_id, as_id): """ Copy the new credentials and place into relevant DB tables of scion-web. If the AS is not already existing, it will create a new entry in AD table. :param as_path: Directory containing the new AS credentials :type as_path: string :param isd_id: ISD the AS belongs to. :type isd_id: string :param as_id: AS ID. :type as_id: string """ print("Updating AS %s, %s" % (isd_id, as_id)) # The element to get the credentials from. # We assume that the beacon server exists in every AS configuration. elem_id = "bs%s-%s-1" % (isd_id, as_id) # TODO(ercanucan): use the built-in defines cert_file = "ISD%s-AS%s-V0.crt" % (isd_id, as_id) trc_file = "ISD%s-V0.trc" % isd_id cert_path = os.path.join(as_path, elem_id, CERT_DIR, cert_file) trc_path = os.path.join(as_path, elem_id, CERT_DIR, trc_file) if os.path.exists(os.path.join(as_path, elem_id, KEYS_DIR, SIG_PRIV_KEY)): sig_priv_key_path = os.path.join(as_path, elem_id, KEYS_DIR, SIG_PRIV_KEY) else: sig_priv_key_path = os.path.join(as_path, elem_id, KEYS_DIR, SIG_PRIV_KEY_OLD) enc_priv_key_path = os.path.join(as_path, elem_id, KEYS_DIR, ENC_PRIV_KEY) as_config_path = os.path.join(as_path, elem_id, AS_CONF_FILE) cert = _json_file_to_str(cert_path) trc = _json_file_to_str(trc_path) sig_priv_key = read_file(sig_priv_key_path) enc_priv_key = read_file(enc_priv_key_path) as_config_dict = _yaml_file_to_dict(as_config_path) master_as_key = as_config_dict['MasterASKey'] print("Calling update or create for AS %s, %s" % (isd_id, as_id)) try: as_obj = AD.objects.get(as_id=as_id, isd_id=isd_id) except AD.DoesNotExist: print(as_id, " does not exist, creating it..") as_obj = AD.objects.create(as_id=as_id, isd_id=isd_id, original_topology={}) print("Setting credentials for AS %s, %s" % (isd_id, as_id)) as_obj.certificate = cert as_obj.trc = trc as_obj.sig_priv_key = sig_priv_key as_obj.enc_priv_key = enc_priv_key as_obj.master_as_key = master_as_key as_obj.save()
def get_core_sig_key(conf_dir): """ Return the raw core signing key. :rtype: bytes """ return base64.b64decode(read_file(get_core_sig_key_file_path(conf_dir)))
def _dispatcher_conf(self): entry = { 'image': 'scion_dispatcher', 'container_name': 'dispatcher', 'restart': 'always', 'network_mode': 'host', 'environment': { 'SU_EXEC_USERSPEC': self.user_spec, }, 'volumes': [ '/etc/passwd:/etc/passwd:ro', '/etc/group:/etc/group:ro', '/run/shm/dispatcher:/run/shm/dispatcher:rw', self.output_base + '/gen/dispatcher:/share/conf:rw', self.output_base + '/logs:/share/logs:rw' ] } self.dc_conf['services']['dispatcher'] = entry # Create dispatcher config tmpl = Template(read_file("topology/zlog.tmpl")) cfg = self.out_dir + "/dispatcher/dispatcher.zlog.conf" write_file(cfg, tmpl.substitute(name="dispatcher", elem="dispatcher"))
def __get_word_count__(self) -> dict: ''' Get a dictionary with key as word and value as count of that word. If word present in dictionary, increment counter by 1 (+= 1) Else set value for the new key (new word) to 1 (= 1) return ''' # read file and get it's content content = read_file(self.file) # get all words as a list words = self.__get_words__(content) # declare an empty dictionary word_count_dic = {} # for each word set/increment it's occurance count. for word in words: if word in word_count_dic: word_count_dic[word] += 1 else: word_count_dic[word] = 1 return word_count_dic
def __init__(self, server_id, conf_dir): """ :param str server_id: server identifier. :param str conf_dir: configuration directory. """ super().__init__(server_id, conf_dir) self.sendq = Queue() sig_key_file = get_sig_key_file_path(self.conf_dir) self.signing_key = base64.b64decode(read_file(sig_key_file)) self.segments = PathSegmentDB(max_res_no=1) # Maps of {ISD-AS: {steady path id: steady path}} for all incoming # (srcs) and outgoing (dests) steady paths: self.srcs = {} self.dests = {} # Map of SibraState objects by interface ID self.link_states = {} # Map of link types by interface ID self.link_types = {} self.lock = threading.Lock() self.CTRL_PLD_CLASS_MAP = { PayloadClass.PATH: { PMT.REG: self.handle_path_reg, }, PayloadClass.SIBRA: { SIBRAPayloadType.EMPTY: self.handle_sibra_pkt }, } self._find_links() name_addrs = "\0".join( [self.id, str(SCION_UDP_PORT), str(self.addr.host)]) self.zk = Zookeeper(self.addr.isd_as, SIBRA_SERVICE, name_addrs, self.topology.zookeepers) self.zk.retry("Joining party", self.zk.party_setup)
def create_new_ad_files(parent_ad_topo, isd_id, ad_id, out_dir): assert isinstance(parent_ad_topo, dict), 'Invalid topology dict' isd_ad_id = '{}-{}'.format(isd_id, ad_id) ad_dict = { "default_zookeepers": {"1": {"manage": False, "addr": "localhost"}}, isd_ad_id: {'level': 'LEAF'}, } gen = ConfigGenerator(out_dir=out_dir) path_policy_file = DEFAULT_PATH_POLICY_FILE zk_config = DEFAULT_ZK_CONFIG # Write basic config files for the new AD with tempfile.NamedTemporaryFile('w') as temp_fh: json.dump(ad_dict, temp_fh) temp_fh.flush() gen.generate_all(temp_fh.name, path_policy_file, zk_config) # Copy TRC file trc_path = get_some_trc_path(isd_id) if trc_path: dst_path = get_trc_file_path(isd_id, ad_id, isd_id, 0, isd_dir=out_dir) shutil.copyfile(trc_path, dst_path) new_topo_path = gen.path_dict(isd_id, ad_id)['topo_file_abs'] new_topo_file = read_file(new_topo_path) new_topo = json.loads(new_topo_file) existing_topo, new_topo = link_topologies(parent_ad_topo, new_topo, 'PARENT_CHILD') # Update the config files for the new AD write_file(new_topo_path, json.dumps(new_topo, sort_keys=4, indent=4)) gen.write_derivatives(new_topo) return new_topo, existing_topo
def get_enc_key(conf_dir): """ Return the private key. :rtype: PrivateKey """ return PrivateKey( base64.b64decode(read_file(get_enc_key_file_path(conf_dir))))
def __init__(self, server_id, conf_dir): """ :param str server_id: server identifier. :param str conf_dir: configuration directory. """ super().__init__(server_id, conf_dir) # TODO: add 2 policies self.path_policy = PathPolicy.from_file( os.path.join(conf_dir, PATH_POLICY_FILE)) sig_key_file = get_sig_key_file_path(self.conf_dir) self.signing_key = base64.b64decode(read_file(sig_key_file)) self.of_gen_key = kdf(self.config.master_as_key, b"Derive OF Key") self.hashtree_gen_key = kdf(self.config.master_as_key, b"Derive hashtree Key") logging.info(self.config.__dict__) self._hash_tree = None self._hash_tree_lock = Lock() self._next_tree = None self._init_hash_tree() self.ifid_state = {} for ifid in self.ifid2br: self.ifid_state[ifid] = InterfaceState() self.ifid_state_lock = RLock() self.CTRL_PLD_CLASS_MAP = { PayloadClass.PCB: { None: self.handle_pcb }, PayloadClass.IFID: { None: self.handle_ifid_packet }, PayloadClass.CERT: { CertMgmtType.CERT_CHAIN_REQ: self.process_cert_chain_request, CertMgmtType.CERT_CHAIN_REPLY: self.process_cert_chain_reply, CertMgmtType.TRC_REPLY: self.process_trc_reply, CertMgmtType.TRC_REQ: self.process_trc_request, }, PayloadClass.PATH: { PMT.IFSTATE_REQ: self._handle_ifstate_request, PMT.REVOCATION: self._handle_revocation, }, } self.SCMP_PLD_CLASS_MAP = { SCMPClass.PATH: { SCMPPathClass.REVOKED_IF: self._handle_scmp_revocation, }, } zkid = ZkID.from_values(self.addr.isd_as, self.id, [(self.addr.host, self._port)]).pack() self.zk = Zookeeper(self.addr.isd_as, BEACON_SERVICE, zkid, self.topology.zookeepers) self.zk.retry("Joining party", self.zk.party_setup) self.pcb_cache = ZkSharedCache(self.zk, self.ZK_PCB_CACHE_PATH, self._handle_pcbs_from_zk) self.revobjs_cache = ZkSharedCache(self.zk, self.ZK_REVOCATIONS_PATH, self.process_rev_objects) self.local_rev_cache = ExpiringDict( 1000, HASHTREE_EPOCH_TIME + HASHTREE_EPOCH_TOLERANCE) self._rev_seg_lock = RLock()
def get_sig_key(conf_dir): """ Return the signing key. :rtype: SigningKey """ return SigningKey( base64.b64decode(read_file(get_sig_key_file_path(conf_dir))))
def _init_trcs(self): # pragma: no cover trcfiles = list(glob.glob("%s/*.trc" % self._dir)) trcfiles.extend( glob.glob("%s/%s-*.trc" % (self._cachedir, self._ename))) for path in trcfiles: trc_raw = read_file(path) self.add_trc(TRC.from_raw(trc_raw), write=False) logging.debug("Loaded: %s" % path)
def _init_certs(self): # pragma: no cover certfiles = list(glob.glob("%s/*.crt" % self._dir)) certfiles.extend( glob.glob("%s/%s-*.crt" % (self._cachedir, self._ename))) for path in certfiles: cert_raw = read_file(path) self.add_cert(CertificateChain.from_raw(cert_raw), write=False) logging.debug("Loaded: %s" % path)
def get_master_key(conf_dir, master_key): """ Return the raw master key. :rtype: bytes """ return base64.b64decode( read_file(get_master_key_file_path(conf_dir, master_key)))
def test_read_file(self): filepath = "/some/file/path/file_name.txt" file_content = "file content" mock_open = mock.mock_open(read_data=file_content) with mock.patch('__builtin__.open', mock_open): actual = util.read_file(filepath) mock_open.assert_called_once_with(filepath) eq_(file_content, actual)
def write_zlog_file(service_type, instance_name, instance_path): """ Creates and writes the zlog configuration file for the given element. :param str service_type: the type of the service (e.g. beacon_server). :param str instance_name: the instance of the service (e.g. br1-8-1). """ tmpl = Template(read_file(os.path.join(PROJECT_ROOT, "topology/zlog.tmpl"))) cfg = os.path.join(instance_path, "%s.zlog.conf" % instance_name) write_file(cfg, tmpl.substitute(name=service_type, elem=instance_name))
def _write_elem_mininet_conf(self, elem, elem_dir): tmpl = Template(read_file("python/mininet/supervisord.conf")) mn_conf_path = os.path.join(self.args.output_dir, "mininet", "%s.conf" % elem) rel_conf_path = os.path.relpath( os.path.join(elem_dir, SUPERVISOR_CONF), os.path.join(self.args.output_dir, "mininet") ) write_file(mn_conf_path, tmpl.substitute(elem=elem, conf_path=rel_conf_path, user=getpass.getuser()))
def _write_elem_mininet_conf(self, elem, conf_path): tmpl = Template(read_file("topology/mininet/supervisord.conf")) mn_conf_path = os.path.join(self.out_dir, "mininet", "%s.conf" % elem) rel_conf_path = os.path.relpath(conf_path, os.path.join(self.out_dir, "mininet")) write_file( mn_conf_path, tmpl.substitute(elem=elem, conf_path=rel_conf_path, user=getpass.getuser()))
def __get_body_from_file(self): if not util.file_exists(self.absolute_filepath): return "" file_content = util.read_file(self.absolute_filepath) sections = file_content.split(SEPARATOR) if len(sections) > 1: return sections[1] return ""
def _infra_dispatcher(self, entry, topo_id): # Create dispatcher for Infra net = self.elem_networks["disp" + topo_id.file_fmt()][0] ip = str(net['ipv4']) entry['networks'][self.bridges[net['net']]] = {'ipv4_address': ip} entry['container_name'] = '%sdisp_%s' % (self.prefix, topo_id.file_fmt()) entry['volumes'].append(self._disp_vol(topo_id)) self.dc_conf['services']['scion_disp_%s' % topo_id.file_fmt()] = entry # Write log config file cfg = "%s/dispatcher/%s.zlog.conf" % (topo_id.base_dir(self.args.output_dir), "dispatcher") tmpl = Template(read_file("topology/zlog.tmpl")) write_file(cfg, tmpl.substitute(name="dispatcher", elem="disp_%s" % topo_id.file_fmt()))
def __init__(self, server_id, conf_dir): """ :param str server_id: server identifier. :param str conf_dir: configuration directory. """ super().__init__(server_id, conf_dir) # TODO: add 2 policies self.path_policy = PathPolicy.from_file( os.path.join(conf_dir, PATH_POLICY_FILE)) self.unverified_beacons = deque() self.trc_requests = {} self.trcs = {} sig_key_file = get_sig_key_file_path(self.conf_dir) self.signing_key = base64.b64decode(read_file(sig_key_file)) self.of_gen_key = PBKDF2(self.config.master_as_key, b"Derive OF Key") logging.info(self.config.__dict__) self.if2rev_tokens = {} self._if_rev_token_lock = threading.Lock() self.revs_to_downstream = ExpiringDict(max_len=1000, max_age_seconds=60) self.ifid_state = {} for ifid in self.ifid2er: self.ifid_state[ifid] = InterfaceState() self.CTRL_PLD_CLASS_MAP = { PayloadClass.PCB: { PCBType.SEGMENT: self.handle_pcb }, PayloadClass.IFID: { IFIDType.PAYLOAD: self.handle_ifid_packet }, PayloadClass.CERT: { CertMgmtType.CERT_CHAIN_REPLY: self.process_cert_chain_rep, CertMgmtType.TRC_REPLY: self.process_trc_rep, }, PayloadClass.PATH: { PMT.IFSTATE_REQ: self._handle_ifstate_request }, } # Add more IPs here if we support dual-stack name_addrs = "\0".join( [self.id, str(SCION_UDP_PORT), str(self.addr.host)]) self.zk = Zookeeper(self.addr.isd_as, BEACON_SERVICE, name_addrs, self.topology.zookeepers) self.zk.retry("Joining party", self.zk.party_setup) self.incoming_pcbs = deque() self.pcb_cache = ZkSharedCache(self.zk, self.ZK_PCB_CACHE_PATH, self.process_pcbs) self.revobjs_cache = ZkSharedCache(self.zk, self.ZK_REVOCATIONS_PATH, self.process_rev_objects)
def _create_ad_marking(self): """ Create an AD Marking with the given ingress and egress interfaces. """ hof = HopOpaqueField.from_values(1, 111, 222) rev_token = HashChain(Random.new().read(32)).next_element() pcbm = PCBMarking.from_values(1, 10, hof) peer_markings = [] signing_key = read_file(get_sig_key_file_path(1, 10)) signing_key = base64.b64decode(signing_key) data_to_sign = (b'11' + pcbm.hof.pack()) signature = sign(data_to_sign, signing_key) return ADMarking.from_values(pcbm, peer_markings, rev_token, signature)
def generate_certificate(joining_ia, core_ia, core_sign_priv_key_file, core_cert_file, trc_file): """ """ validity = Certificate.AS_VALIDITY_PERIOD comment = "AS Certificate" core_ia_sig_priv_key = base64.b64decode(read_file(core_sign_priv_key_file)) public_key_sign, private_key_sign = generate_sign_keypair() public_key_encr, private_key_encr = generate_enc_keypair() cert = Certificate.from_values(str(joining_ia), str(core_ia), INITIAL_TRC_VERSION, INITIAL_CERT_VERSION, comment, False, validity, public_key_encr, public_key_sign, core_ia_sig_priv_key) core_ia_chain = CertificateChain.from_raw(read_file(core_cert_file)) sig_priv_key = base64.b64encode(private_key_sign).decode() enc_priv_key = base64.b64encode(private_key_encr).decode() joining_ia_chain = CertificateChain([cert, core_ia_chain.core_as_cert ]).to_json() trc = open(trc_file).read() master_as_key = base64.b64encode(Random.new().read(16)).decode('utf-8') as_obj = ASCredential(sig_priv_key, enc_priv_key, joining_ia_chain, trc, master_as_key) return as_obj
def _br_dispatcher(self, entry, topo_id, topo): # Create dispatcher for BR Ctrl Port for k in topo.get("BorderRouters", {}): ctrl_net = self.elem_networks[k + "_ctrl"][0] ctrl_ip = str(ctrl_net['ipv4']) entry['networks'][self.bridges[ctrl_net['net']]] = {'ipv4_address': ctrl_ip} entry['container_name'] = '%sdisp_br_%s' % (self.prefix, topo_id.file_fmt()) vol = 'vol_%sdisp_br_%s:/run/shm/dispatcher:rw' % (self.prefix, topo_id.file_fmt()) entry['volumes'].append(vol) entry['environment']['ZLOG_CFG'] = "/share/conf/disp_br.zlog.conf" self.dc_conf['services']['scion_disp_br_%s' % topo_id.file_fmt()] = entry # Write log config file cfg = "%s/dispatcher/%s.zlog.conf" % (topo_id.base_dir(self.args.output_dir), "disp_br") tmpl = Template(read_file("topology/zlog.tmpl")) write_file(cfg, tmpl.substitute(name="dispatcher", elem="disp_br_%s" % topo_id.file_fmt()))
def run(conf): parames = get_parames(conf) user_file = parames.get("user").get("address", "") work_num = parames.get("master").get("work_num", "2") if not user_file: raise Exception("Please give a user file") user_list = read_file(user_file) if not user_list: raise Exception("User file no data") pool = multiprocessing.Pool(int(work_num)) for one in user_list: username = json.loads(one).get("response").get("username") password = json.loads(one).get("response").get("password") pool.apply_async(target, args=(username, password, parames)) pool.close() pool.join()
def run(conf): parames = get_parames(conf) user_file = parames.get("user").get("buy", "") work_num = parames.get("master").get("work_num", "2") if not user_file: raise Exception("Please give a user file") user_list = read_file(user_file) if not user_list: raise Exception("User file no data") queue = multiprocessing.Queue() record_list = multiprocessing.Manager().list() process_pool = [] for i in range(int(work_num)): p = QueueProcess(record_queue=queue, record_list=record_list, parames=parames) p.start() process_pool.append(p) for one in user_list: queue.put(one) for j in process_pool: j.join()
def main(in_path, out_path): cwd = os.getcwd() def relpath(path): return os.path.relpath(path, cwd) with util.TemporaryDirectory() as temp_dir: temp_deps_path = os.path.join(temp_dir, 'deps') temp_mk_path = os.path.join(temp_dir, 'mk') temp_files_path = os.path.join(temp_dir, 'files') _, out_ext = os.path.splitext(out_path) # OpenSCAD requires the output file name to end in .stl or .dxf. temp_out_path = os.path.join(temp_dir, 'out' + out_ext) _openscad(in_path, temp_out_path, temp_deps_path) mk_content = '%:; echo "$@" >> {}'.format( util.bash_escape_string(temp_files_path)) # Use make to parse the dependency makefile written by OpenSCAD. util.write_file(temp_mk_path, mk_content.encode()) util.command( ['make', '-s', '-B', '-f', temp_mk_path, '-f', temp_deps_path], remove_env=['MAKELEVEL', 'MAKEFLAGS']) # All dependencies as paths relative to the project root. deps = set( map(relpath, util.read_file(temp_files_path).decode().splitlines())) # Relative paths to all files that should not appear in the dependency makefile. ignored_files = set( map(relpath, [in_path, temp_deps_path, temp_mk_path, temp_out_path])) # Write output files. make.write_dependencies(out_path + '.d', out_path, deps - ignored_files) util.rename_atomic(temp_out_path, out_path)
def _dispatcher_conf(self, topo_id, base): # Create dispatcher config entry = { 'image': 'scion_dispatcher', 'container_name': 'scion_%sdisp_sig_%s' % (self.prefix, topo_id.file_fmt()), 'environment': { 'SU_EXEC_USERSPEC': self.user_spec, 'ZLOG_CFG': '/share/conf/disp_sig.zlog.conf' }, 'networks': {}, 'volumes': [ *DOCKER_USR_VOL, self._disp_vol(topo_id), '%s:/share/conf:rw' % os.path.join(base, 'dispatcher'), self._logs_vol() ] } net = self.args.networks['sig_%s' % topo_id.file_fmt()][0] entry['networks'][self.args.bridges[net['net']]] = { 'ipv4_address': str(net['ipv4']) } self.dc_conf['services']['scion_disp_sig_%s' % topo_id.file_fmt()] = entry vol_name = 'vol_scion_%sdisp_sig_%s' % (self.prefix, topo_id.file_fmt()) self.dc_conf['volumes'][vol_name] = None # Write log config file cfg = "%s/dispatcher/%s.zlog.conf" % (topo_id.base_dir( self.args.output_dir), "disp_sig") tmpl = Template(read_file("topology/zlog.tmpl")) write_file( cfg, tmpl.substitute(name="dispatcher", elem="disp_sig_%s" % topo_id.file_fmt()))
def main(in_path, out_path): cwd = os.getcwd() def relpath(path): return os.path.relpath(path, cwd) with util.TemporaryDirectory() as temp_dir: temp_deps_path = os.path.join(temp_dir, 'deps') temp_mk_path = os.path.join(temp_dir, 'mk') temp_files_path = os.path.join(temp_dir, 'files') _, out_ext = os.path.splitext(out_path) # OpenSCAD requires the output file name to end in .stl or .dxf. temp_out_path = os.path.join(temp_dir, 'out' + out_ext) _openscad(in_path, temp_out_path, temp_deps_path) mk_content = '%:; echo "$@" >> {}'.format(util.bash_escape_string(temp_files_path)) # Use make to parse the dependency makefile written by OpenSCAD. util.write_file(temp_mk_path, mk_content.encode()) util.command( ['make', '-s', '-B', '-f', temp_mk_path, '-f', temp_deps_path], remove_env=['MAKELEVEL', 'MAKEFLAGS']) # All dependencies as paths relative to the project root. deps = set(map(relpath, util.read_file(temp_files_path).decode().splitlines())) # Relative paths to all files that should not appear in the # dependency makefile. ignored_files = set(map(relpath, [in_path, temp_deps_path, temp_mk_path, temp_out_path])) # Write output files. make.write_dependencies(out_path + '.d', out_path, deps - ignored_files) util.rename_atomic(temp_out_path, out_path)
def get_overlay(): file_path = os.path.join(GEN_PATH, OVERLAY_FILE) return read_file(file_path).strip()
def _write_zlog_cfg(self, name, elem, elem_dir): tmpl = Template(read_file("topology/zlog.tmpl")) cfg = os.path.join(elem_dir, "%s.zlog.conf" % elem) write_file(cfg, tmpl.substitute(name=name, elem=elem))
def main(options): valid_emails_file = str( Path(__file__).absolute().parent / 'log' / 'valid_emails.txt') tried_logins_file = str( Path(__file__).absolute().parent / 'log' / 'tried_logins.txt') valid_logins_file = str( Path(__file__).absolute().parent / 'log' / 'valid_logins.txt') load_balancer = SSHLoadBalancer(hosts=options.ssh, key=options.key, key_pass=options.key_pass, base_port=options.base_port, current_ip=(not options.no_current_ip)) for password in options.passwords: sprayer = MSOLSpray(emails=options.emails, password=password, url=options.url, force=options.force, load_balancer=load_balancer, verbose=options.verbose, skip_logins=util.read_file(tried_logins_file)) try: load_balancer.start() for proxy in load_balancer.proxies: log.debug(f'Proxy: {proxy}') log.info( f'Spraying {len(options.emails):,} users against {options.url} at {time.ctime()}' ) log.info(f'Command: {" ".join(sys.argv)}') for i, result in enumerate(sprayer.spray()): print(f' Sprayed {i+1:,} accounts\r', end='', flush=True) if options.verbose and options.delay > 0: log.debug(f'Sleeping for {options.delay:,} seconds') sleep(options.delay) log.info( f'Finished spraying {len(options.emails):,} users against {options.url} at {time.ctime()}' ) for success in sprayer.valid_logins: log.critical(success) finally: load_balancer.stop() # write valid emails util.update_file(valid_emails_file, sprayer.valid_emails) log.debug( f'{len(sprayer.valid_emails):,} valid emails written to {valid_emails_file}' ) # write attempted logins util.update_file(tried_logins_file, sprayer.tried_logins) # write valid logins util.update_file(valid_logins_file, sprayer.valid_logins) log.debug( f'{len(sprayer.valid_logins):,} valid user/pass combos written to {valid_logins_file}' )
def main(options): if options.recon: for domain in options.recon: discovery = DomainDiscovery(domain) discovery.recon() consider = 'You can also try:\n' for suggestion in discovery.suggest(): consider += f' - {suggestion}\n' log.info(consider) if options.delay and options.ssh: num_ips = len(options.ssh) + (0 if options.no_current_ip else 1) new_delay = options.delay / num_ips log.debug( f'Adjusting delay for {num_ips:,} IPs: {options.delay:.2f}s --> {new_delay:.2f}s per IP' ) options.delay = new_delay if (options.passwords and options.emails): valid_emails_file = str(trevorspray_dir / 'valid_emails.txt') tried_logins_file = str(trevorspray_dir / 'tried_logins.txt') valid_logins_file = str(trevorspray_dir / 'valid_logins.txt') load_balancer = SSHLoadBalancer(hosts=options.ssh, key=options.key, key_pass=options.key_pass, base_port=options.base_port, current_ip=(not options.no_current_ip)) for password in options.passwords: sprayer = MSOLSpray(emails=options.emails, password=password, url=options.url, force=options.force, load_balancer=load_balancer, verbose=options.verbose, skip_logins=util.read_file(tried_logins_file)) try: load_balancer.start() for proxy in load_balancer.proxies: log.debug(f'Proxy: {proxy}') log.info( f'Spraying {len(options.emails):,} users against {options.url} at {time.ctime()}' ) log.info(f'Command: {" ".join(sys.argv)}') for i, result in enumerate(sprayer.spray()): print(f' Sprayed {i+1:,} accounts\r', end='', flush=True) if options.delay or options.jitter: delay = float(options.delay) jitter = random.random() * options.jitter delay += jitter if options.verbose and delay > 0: log.debug( f'Sleeping for {options.delay:,} seconds ({options.delay:.2f}s delay + {jitter:.2f}s jitter)' ) sleep(delay) log.info( f'Finished spraying {len(options.emails):,} users against {options.url} at {time.ctime()}' ) for success in sprayer.valid_logins: log.critical(success) finally: load_balancer.stop() # write valid emails util.update_file(valid_emails_file, sprayer.valid_emails) log.debug( f'{len(sprayer.valid_emails):,} valid emails written to {valid_emails_file}' ) # write attempted logins util.update_file(tried_logins_file, sprayer.tried_logins) # write valid logins util.update_file(valid_logins_file, sprayer.valid_logins) log.debug( f'{len(sprayer.valid_logins):,} valid user/pass combos written to {valid_logins_file}' )
# External packages from django.shortcuts import get_object_or_404 # SCION from lib.util import read_file # SCION-WEB from ad_manager.models import ( AD, ConnectionRequest, ) SIMPLE_CONF_OVERLAY_TYPE = 'UDP/IPv4' SIMPLE_CONF_DIR = os.path.dirname(os.path.abspath(__file__)) SimpleConfTemplate = Template(read_file( os.path.join(SIMPLE_CONF_DIR, "simple_config_topo.tmpl"))) def prep_simple_conf_con_req(as_obj, topo_dict, user): """ Creates the connection request object based on the simple topo values and saves it into the database. :param AD as_obj: The database object of the AS. :param topo_dict: Topology as a dictionary object. :param User user: Django user. :returns: Connection request object. :rtype: ConnectionRequest """ router_name = 'br%s-%s-1' % (as_obj.isd_id, as_obj.as_id) router = topo_dict['BorderRouters'][router_name] interface = router['Interfaces']['1']