Esempio n. 1
0
async def get_log(ctx, logfile):
    c = 'ssh %s@%s ls HomeAlone/code/logs/web/' % (HOSTN, SERVE)
    if logfile in utils.cmd(c, False):
        await ctx.send('Found %s. Downloading it now.' % logfile)
        c = f"sftp {HOSTN}@{SERVE}:/home/{HOSTN}/HomeAlone/code/logs/web/ <<< $'get {logfile}'"
        utils.arr2str(utils.cmd(c, False))
        await ctx.send('Finished downloading**[%d bytes]**' %
                       os.path.getsize(os.getcwd() + '/%s' % logfile))
Esempio n. 2
0
 def query_shares(self, csock, caddr, api_req):
     file_list = os.listdir(os.getcwd() + '/PoolData/Shares')
     msg = 'Shared Files on %s@%s:\n' % (self.info['host'],
                                         self.info['internal'])
     msg += utils.arr2str(file_list)
     csock.send(msg)
     return csock
    def test(self, loader: DataLoader, logdir: Path):
        group = logdir.name.split('_')[0]

        self.writer = CustomWriter(str(logdir), group=group)

        avg_measure = None
        self.model.eval()

        pbar = tqdm(loader, desc=group, dynamic_ncols=True)
        for i_iter, data in enumerate(pbar):
            # get data
            x, y = self._pre(data, loader.dataset)  # B, C, T
            T_ys = data['T_ys']

            # forward
            output = self.model(x)  # [..., :y.shape[-1]]

            # write summary
            one_sample = MulchWavDataset.decollate_padded(data, 0)  # F, T, C

            out_one = self._post_one(output, T_ys, 0, loader.dataset.norm_out)

            measure = self.writer.write_one(i_iter, **out_one, **one_sample)
            if avg_measure is None:
                avg_measure = measure
            else:
                avg_measure += measure

            # print
            str_measure = arr2str(measure).replace('\n', '; ')
            pbar.write(str_measure)

        self.model.train()

        avg_measure /= len(loader.dataset)

        self.writer.add_text(f'{group}/Average Measure/Proposed',
                             str(avg_measure[0]))
        self.writer.add_text(f'{group}/Average Measure/Reverberant',
                             str(avg_measure[1]))
        self.writer.close()  # Explicitly close

        print()
        str_avg_measure = arr2str(avg_measure).replace('\n', '; ')
        print(f'Average: {str_avg_measure}')
Esempio n. 4
0
async def list_log_files(ctx):
    await ctx.send('*Getting list of log files*')
    c = 'ssh %s@%s ls -la HomeAlone/code/logs/web/' % (HOSTN, SERVE)
    result = '```' + (utils.arr2str(utils.cmd(c, False))) + '```'
    try:
        await ctx.send(result)
    except:
        print(result)
        pass
Esempio n. 5
0
async def check_alarm(ctx, filename, n):
    print('[-] Checking Alarm File')
    while ARMED:
        await asyncio.sleep(35)
        try:
            c = f"sftp {HOSTN}@{SERVE}:/home/{HOSTN}/TripWire/tripwire/.alerts/alarm/ <<< $'get alarm'"
            utils.arr2str(utils.cmd(c, False))
            if filename in utils.swap(filename, True):
                n = N
                m = '{0.author.mention} **New Connection <a:siren:833794872204722248> **'.format(
                    ctx.message)
                m += '```' + utils.arr2str(
                    utils.cmd(f"tail -n 3 {filename} ", False)) + '```'
                await ctx.send(m)

        except IndexError:
            print('[!] Unable to read log file')
            pass
    def validate(self, loader: DataLoader, logdir: Path, epoch: int):
        """ Evaluate the performance of the model.

        :param loader: DataLoader to use.
        :param logdir: path of the result files.
        :param epoch:
        """

        self.model.eval()

        avg_loss = torch.zeros(len(self.criterions), device=self.out_device)

        pbar = tqdm(loader,
                    desc='validate ',
                    postfix='[0]',
                    dynamic_ncols=True)
        for i_iter, data in enumerate(pbar):
            # get data
            x, y = self._pre(data, loader.dataset)  # B, C, F, T
            T_ys = data['T_ys']

            # forward
            output = self.model(x)  # [..., :y.shape[-1]]

            # loss
            loss = self._calc_loss(y, output, T_ys)
            avg_loss += loss

            # print
            loss_np = loss.cpu().numpy() / len(T_ys)
            pbar.set_postfix_str(arr2str(loss_np, ndigits=1))

            # write summary
            if i_iter == 0:
                out_one = self._post_one(output, T_ys, 0,
                                         loader.dataset.norm_out)

                if not self.writer.reused_sample:
                    one_sample = MulchWavDataset.decollate_padded(data, 0)
                else:
                    one_sample = dict()
                self.writer.write_one(epoch, **out_one, **one_sample)

        avg_loss /= len(loader.dataset)
        tag = 'loss/valid'
        self.writer.add_scalar(tag, avg_loss.sum().item(), epoch)
        if len(self.criterions) > 1:
            for idx, (n, ll) in enumerate(zip(hp.criterion_names, avg_loss)):
                self.writer.add_scalar(f'{tag}/{idx + 1}. {n}', ll.item(),
                                       epoch)

        self.model.train()

        return avg_loss
Esempio n. 7
0
async def set_alarm(ctx, filename):
    try:
        c = f"sftp {HOSTN}@{SERVE}:/home/{HOSTN}/HomeAlone/code/logs/web/ <<< $'get {filename}'\n"
        c += f"ssh {HOSTN}@{SERVER} echo '{PATH}' >> /home/{HOSTN}/HomeAlone/code/filelist.txt\n"

        utils.arr2str(utils.cmd(c, False))
        PATH = f'/home/{HOSTN}/HomeAlone/code/logs/web/{filename}'
        utils.cmd(c2)
        n = int(
            utils.cmd("cat %s| grep 'Connection at ' | wc -l" % filename,
                      False).pop())
        await ctx.send(
            ':ok_hand: *Setting Alarm on %s*, which currently has **%d** entries.'
            % (filename, n))
        ARMED = True
        bot.loop.create_task(check_alarm(ctx, filename, n))
    except:
        c = 'ssh %s@%s ls -la HomeAlone/code/logs/web/' % (HOSTN, SERVE)
        result = 'Something went wrong... Select one of these to set alarm on:\n'
        result += '```' + (utils.arr2str(utils.cmd(c, False))) + '```'
        pass
Esempio n. 8
0
async def show_connection(ctx):
    msg = 'Aw Geez, lets see who is connected <:morty:833787213766066176>'.format(
        ctx.message)
    await ctx.send(msg)
    c = 'ssh %s@%s netstat -antup' % (HOSTN, SERVE)
    result = '```' + (utils.arr2str(utils.cmd(c, False))) + '```'
    try:
        await ctx.send(result)
    except:
        print(result)
        await ctx.send('```' + result.split('established)\n')[1:])
        pass
Esempio n. 9
0
async def read_log(ctx, filename):
    c = f'ssh {HOSTN}@{SERVE} ls HomeAlone/code/logs/web/'
    if filename in utils.cmd(c, False):
        await ctx.send('**This will take a minute...**')
        await ctx.send('<a:rickspin:834261749846507520>')
        c = f"sftp {HOSTN}@{SERVE}:/home/{HOSTN}/HomeAlone/code/logs/web/ <<< $'get {filename}'"
        utils.arr2str(utils.cmd(c, False))
        f = open(filename, 'r')
        while True:
            piece = f.read(1024)
            if not piece:
                break
            try:
                await ctx.send('```\n' + piece.replace(SERVE, '<removed>') +
                               '\n```')
                time.sleep(3)
            except:
                print(result)
                pass
        f.close()
        os.remove(filename)
    else:
        await ctx.send("I can't find %s" % filename)
Esempio n. 10
0
 def check_messaging_nodes(self):
     fn = 'PoolData/Config/Channels/Self/messaging.json'
     if os.path.isfile(fn):
         with open('PoolData/Config/Channels/Self/messaging.json',
                   'r') as f:
             messaging = json.load(f)
             if 'master' in messaging.keys():
                 print('[*] Master node is: %s' % messaging['master'])
             if 'brokers' in messaging.keys():
                 print('[*] Brokers are:\n%s' %
                       utils.arr2str('-'.join(messaging['brokers'])))
             # check if backend was killed
             if 'listening' in messaging.keys():
                 self.running = messaging['listening']
                 if not self.running:
                     print('[!!] Triggering Shutdown from Backend')
Esempio n. 11
0
def main():
    if '--setup' in sys.argv:
        utils.initialize_folders()
        utils.add_local_peers_pv2()
    else:
        nodes = test_connections(False)

    if '--cmd' in sys.argv and len(sys.argv) > 2:
        hname = utils.sys.argv[2]
        cmd = utils.arr2chr(sys.argv[3:])
        if hname in nodes.keys():
            i = nodes[hname]['ip']
            h = nodes[hname]['hname']
            p = nodes[hname]['pword']
            result = utils.arr2str(utils.ssh_exec(cmd, i, h, p, False))
            print result
    def test(self, loader: DataLoader, logdir: Path):
        group = logdir.name.split('_')[0]

        self.writer = CustomWriter(str(logdir), group=group)

        avg_measure = None
        self.model.eval()

        pbar = tqdm(loader, desc=group, dynamic_ncols=True)
        for i_iter, data in enumerate(pbar):
            # get data
            x, y = self.preprocess(data)  # B, C, F, T
            T_ys = data['T_ys']

            # forward
            output = self.model(x)  # [..., :y.shape[-1]]

            # write summary
            one_sample = DirSpecDataset.decollate_padded(data, 0)  # F, T, C

            out_one = self.postprocess(output, T_ys, 0, loader.dataset)

            DirSpecDataset.save_dirspec(
                logdir / hp.form_result.format(i_iter),
                **one_sample, **out_one
            )

            measure = self.writer.write_one(i_iter, **out_one, **one_sample)
            if avg_measure is None:
                avg_measure = AverageMeter(init_value=measure, init_count=len(T_ys))
            else:
                avg_measure.update(measure)
            # print
            # str_measure = arr2str(measure).replace('\n', '; ')
            # pbar.write(str_measure)

        self.model.train()

        avg_measure = avg_measure.get_average()

        self.writer.add_text(f'{group}/Average Measure/Proposed', str(avg_measure[0]))
        self.writer.add_text(f'{group}/Average Measure/Reverberant', str(avg_measure[1]))
        self.writer.close()  # Explicitly close

        print()
        str_avg_measure = arr2str(avg_measure).replace('\n', '; ')
        print(f'Average: {str_avg_measure}')
Esempio n. 13
0
async def scan_host(ctx, ip):
    await ctx.send('<a:radar:794818374420529162> *Scanning* **%s**' % ip)
    c = 'nmap -sV %s' % ip
    result = '```' + (utils.arr2str(utils.cmd(c, False))) + '```'
    await ctx.send(result)
Esempio n. 14
0
 def list_commands(self, csock, caddr, api_req):
     cmds = self.actions.keys()
     csock.send('Commands:\n%s' % utils.arr2str(cmds))
     return csock
    def train(self,
              loader_train: DataLoader,
              loader_valid: DataLoader,
              logdir: Path,
              first_epoch=0):

        n_train_data = len(loader_train.dataset)
        # Learning Rates Scheduler
        scheduler = optim.lr_scheduler.CosineAnnealingWarmRestarts(
            self.optimizer, last_epoch=first_epoch - 1, **hp.scheduler)
        avg_loss = torch.zeros(len(self.criterions), device=self.out_device)

        self.writer = CustomWriter(str(logdir),
                                   group='valid',
                                   purge_step=first_epoch)

        # self.writer.add_graph(
        #     self.model.module if isinstance(self.model, nn.DataParallel) else self.model,
        #     torch.zeros(2, *hp.dummy_input_size),
        #     # operator_export_type='RAW',
        # )

        # Start Training
        for epoch in range(first_epoch, hp.n_epochs):

            print()
            pbar = tqdm(loader_train,
                        desc=f'epoch {epoch:3d}',
                        postfix='[]',
                        dynamic_ncols=True)

            for i_iter, data in enumerate(pbar):
                # get data
                x, y = self._pre(data, loader_train.dataset)  # B, C, T
                T_ys = data['T_ys']

                i_first = 0
                loss_t = None
                for t in range(0, y.shape[-1], hp.l_target):
                    self.optimizer.zero_grad()
                    while i_first < y.shape[0] and t >= T_ys[i_first]:
                        i_first += 1
                    seg_y = y[i_first:, :, t:t + hp.l_target]
                    if seg_y.shape[-1] < 5:
                        break
                    seg_x = x[i_first:, :, t:t + hp.l_input]
                    seg_T_ys = np.clip(T_ys[i_first:] - t,
                                       a_min=None,
                                       a_max=hp.l_target)

                    # forward
                    output = self.model(seg_x)[..., :
                                               seg_y.shape[-1]]  # B, C, T

                    loss_t = self._calc_loss(seg_y, output, seg_T_ys)
                    loss_t_sum = loss_t.sum()

                    # backward
                    loss_t_sum.backward()
                    self.optimizer.step()

                    # print
                    avg_loss += loss_t.detach_()
                    del loss_t_sum, seg_x, seg_y, output

                scheduler.step(epoch + i_iter * hp.batch_size / n_train_data)

                loss_t_last_np = loss_t.cpu().numpy() / len(T_ys)
                pbar.set_postfix_str(arr2str(loss_t_last_np, ndigits=1))

            avg_loss /= n_train_data
            tag = 'loss/train'
            self.writer.add_scalar(tag, avg_loss.sum().item(), epoch)
            if len(self.criterions) > 1:
                for i, (n, ll) in enumerate(zip(hp.criterion_names, avg_loss)):
                    self.writer.add_scalar(f'{tag}/{i + 1}_{n}', ll.item(),
                                           epoch)

            # Validation
            self.validate(loader_valid, logdir, epoch)

            # save loss & model
            if epoch % hp.period_save_state == hp.period_save_state - 1:
                torch.save((
                    self.model.module.state_dict() if isinstance(
                        self.model, nn.DataParallel) else
                    self.model.state_dict(),
                    self.optimizer.state_dict(),
                ), logdir / f'{hp.model_name}_{epoch}.pt')
        self.writer.close()
    def test(self, loader: DataLoader, logdir: Path):
        def save_forward(module: nn.Module, in_: Tensor, out: Tensor):
            """ save forward propagation data

            """
            module_name = str(module).split('(')[0]
            dict_to_save = dict()
            # dict_to_save['in'] = in_.detach().cpu().numpy().squeeze()
            dict_to_save['out'] = out.detach().cpu().numpy().squeeze()

            i_module = module_counts[module_name]
            for i, o in enumerate(dict_to_save['out']):
                save_forward.writer.add_figure(
                    f'{group}/blockout_{i_iter}/{module_name}{i_module}',
                    draw_spectrogram(o, to_db=False),
                    i,
                )
            scio.savemat(
                str(logdir / f'blockout_{i_iter}_{module_name}{i_module}.mat'),
                dict_to_save,
            )
            module_counts[module_name] += 1

        group = logdir.name.split('_')[0]

        self.writer = CustomWriter(str(logdir), group=group)

        avg_measure = None
        self.model.eval()

        # register hook to save output of each block
        module_counts = None
        if hp.n_save_block_outs:
            module_counts = defaultdict(int)
            save_forward.writer = self.writer
            if isinstance(self.model, nn.DataParallel):
                module = self.model.module
            else:
                module = self.model
            for sub in module.children():
                if isinstance(sub, nn.ModuleList):
                    for m in sub:
                        m.register_forward_hook(save_forward)
                elif isinstance(sub, nn.ModuleDict):
                    for m in sub.values():
                        m.register_forward_hook(save_forward)
                else:
                    sub.register_forward_hook(save_forward)

        pbar = tqdm(loader, desc=group, dynamic_ncols=True)
        for i_iter, data in enumerate(pbar):
            # get data
            x, y = self.preprocess(data)  # B, C, F, T
            T_ys = data['T_ys']

            # forward
            if module_counts is not None:
                module_counts = defaultdict(int)

            if 0 < hp.n_save_block_outs == i_iter:
                break
            output = self.model(x)  # [..., :y.shape[-1]]

            # write summary
            one_sample = DirSpecDataset.decollate_padded(data, 0)  # F, T, C
            out_one = self.postprocess(output, T_ys, 0, loader.dataset)

            # DirSpecDataset.save_dirspec(
            #     logdir / hp.form_result.format(i_iter),
            #     **one_sample, **out_one
            # )

            measure = self.writer.write_one(
                i_iter,
                eval_with_y_ph=hp.eval_with_y_ph,
                **out_one,
                **one_sample,
            )
            if avg_measure is None:
                avg_measure = AverageMeter(init_value=measure,
                                           init_count=len(T_ys))
            else:
                avg_measure.update(measure)

        self.model.train()

        avg_measure = avg_measure.get_average()

        self.writer.add_text('Average Measure/Proposed', str(avg_measure[0]))
        self.writer.add_text('Average Measure/Reverberant',
                             str(avg_measure[1]))
        self.writer.close()  # Explicitly close

        print()
        str_avg_measure = arr2str(avg_measure).replace('\n', '; ')
        print(f'Average: {str_avg_measure}')
Esempio n. 17
0
async def pull_pcap(ctx):
    c = f"sftp {HOSTN}@{SERVE}:/home/{HOSTN}/ <<< $'get honey.pcap'"
    reply = utils.arr2str(utils.cmd(c, False))
    await ctx.send("'''%s'''" % reply)
Esempio n. 18
0
async def start_tcpdump(ctx):
    # c = 'iface=$(ip route get 1.1.1.1 | awk {print $5; exit});'
    # c+= f'tcpdump -ne -i $iface -Q in host {SERVE} and port 8080 -w honey.pcap'
    c = f"ssh {HOSTN}@{SERVE} bash listen.sh &"
    reply = utils.arr2str(utils.cmd(c, False))
    await ctx.send("'''%s'''" % reply)
Esempio n. 19
0
 def query_peerlist(self, csock, caddr, api_req):
     msg = 'Peer List:\n'
     msg += utils.arr2str(self.peers)
     csock.send(msg)
     return csock
Esempio n. 20
0
async def kill_tcpdump(ctx):
    c = f"ssh {HOSTN}@{SERVE} kill -9 $(pidof tcpdump)"
    reply = utils.arr2str(utils.cmd(c, False))
    await ctx.send("'''%s'''" % reply)
Esempio n. 21
0
    def test(self, loader: DataLoader, logdir: Path):
        def save_forward(module: nn.Module, in_: Tensor, out: Tensor):
            module_name = str(module).split('(')[0]
            dict_to_save = dict()
            # dict_to_save['in'] = in_.detach().cpu().numpy().squeeze()
            dict_to_save['out'] = out.detach().cpu().numpy().squeeze()

            i_module = module_counts[module_name]
            for i, o in enumerate(dict_to_save['out']):
                save_forward.writer.add_figure(
                    f'{group}/blockout_{i_iter}/{module_name}{i_module}',
                    draw_spectrogram(o, to_db=False),
                    i,
                )
            scio.savemat(
                str(logdir / f'blockout_{i_iter}_{module_name}{i_module}.mat'),
                dict_to_save,
            )
            module_counts[module_name] += 1

        group = logdir.name.split('_')[0]

        self.writer = CustomWriter(str(logdir), group=group)

        avg_measure = None
        self.model.eval()

        module_counts = None
        if hp.n_save_block_outs:
            module_counts = defaultdict(int)
            save_forward.writer = self.writer
            for sub in self.module.children():
                if isinstance(sub, nn.ModuleList):
                    for m in sub:
                        m.register_forward_hook(save_forward)
                elif isinstance(sub, nn.ModuleDict):
                    for m in sub.values():
                        m.register_forward_hook(save_forward)
                else:
                    sub.register_forward_hook(save_forward)

        pbar = tqdm(loader, desc=group, dynamic_ncols=True)
        cnt_sample = 0
        for i_iter, data in enumerate(pbar):
            # get data
            x, mag, max_length, y = self.preprocess(data)  # B, C, F, T
            T_ys = data['T_ys']

            # forward
            if module_counts is not None:
                module_counts = defaultdict(int)

            if 0 < hp.n_save_block_outs == i_iter:
                break
            _, output, residual = self.model(x,
                                             mag,
                                             max_length,
                                             repeat=hp.repeat_test)

            # write summary
            for i_b in range(len(T_ys)):
                i_sample = cnt_sample + i_b
                one_sample = ComplexSpecDataset.decollate_padded(
                    data, i_b)  # F, T, C

                out_one = self.postprocess(output, residual, T_ys, i_b,
                                           loader.dataset)

                ComplexSpecDataset.save_dirspec(
                    logdir / hp.form_result.format(i_sample), **one_sample,
                    **out_one)

                measure = self.writer.write_one(i_sample,
                                                **out_one,
                                                **one_sample,
                                                suffix=f'_{hp.repeat_test}')
                if avg_measure is None:
                    avg_measure = AverageMeter(init_value=measure)
                else:
                    avg_measure.update(measure)
                # print
                # str_measure = arr2str(measure).replace('\n', '; ')
                # pbar.write(str_measure)
            cnt_sample += len(T_ys)

        self.model.train()

        avg_measure = avg_measure.get_average()

        self.writer.add_text(f'{group}/Average Measure/Proposed',
                             str(avg_measure[0]))
        self.writer.add_text(f'{group}/Average Measure/Reverberant',
                             str(avg_measure[1]))
        self.writer.close()  # Explicitly close

        print()
        str_avg_measure = arr2str(avg_measure).replace('\n', '; ')
        print(f'Average: {str_avg_measure}')
Esempio n. 22
0
 def show_methods(self, cs, ca, req):
     result = utils.arr2str('-'.join(self.actions.keys()))
     cs.send(result)
     cs.close()