def slack_oauth_login(request): state = random_string(10) request.session['state'] = state url = 'https://slack.com/oauth/authorize?client_id=' + settings.SLACK_CLIENT_ID + "&state=" + state \ + "&scope=identify%20channels:history%20channels:read%20users:read%20usergroups:read&redirect_uri=" + settings.DOMAIN + "/slack-oauth/callback" return HttpResponseRedirect(url)
def slack_oauth_login(request): state = random_string(10) request.session['state'] = state url = 'https://slack.com/oauth/authorize?client_id=' + settings.SLACK_CLIENT_ID + "&state=" + state \ + "&scope=read&redirect_uri=" + settings.DOMAIN + "/slack-oauth/callback" return HttpResponseRedirect(url)
class User(Model): username = models.CharField(max_length=20, default='') password_hash = models.CharField(max_length=64, default='') password_salt = models.CharField(max_length=8, default=lambda: random_string(8)) nickname = models.CharField(max_length=20, default='') last_login_at = models.DateTimeField(null=True, default=None) last_login_ip = models.CharField(max_length=15, default='') created_at = models.DateTimeField(default=datetime.datetime.now) class Meta: db_table = 'user' @classmethod def authenticate(cls, username, password): try: user = cls.get(cls.username == username) if user.encrypt_password(password) != user.password_hash: user = None except cls.DoesNotExist: user = None return user @classmethod def create_user(cls, username, password): user = cls.create(username=username) user.set_password(password) user.save() def encrypt_password(self, password, salt=None): if not salt: salt = self.password_salt or self.regenerate_salt() return hashlib.sha256('{password} - {salt}'.format( password=password, salt=salt)).hexdigest() def regenerate_salt(self): self.password_salt = random_string(8) return self.password_salt def set_password(self, password): self.password_hash = self.encrypt_password(password) def verify_password(self, password): return self.encrypt_password(password) == self.password_hash
def main(): kappas = [1, 10**2, 10**4, 10**8] nrepeat = 5 d = 50 n = 300 batchsizes = [int(n), int(n / 2), int(n / 6)] # 50, 150, 300 optimizations = [ 'Avrng', 'MomentumCurvature', 'torch.optim.Adam', 'torch.optim.SGD' ] lrs = [10, 1, 0.1, 0.01, 0.001] for i in range(nrepeat): # 5 for k in kappas: # 4 A, y, x_star = generate_data(k, d, n) for bs in batchsizes: # 3 data = TensorDataset( torch.from_numpy(A).float(), torch.from_numpy(y).float()) loader = DataLoader(data, batch_size=bs) full_loader = DataLoader(data, batch_size=n) for opt in optimizations: # 3 for lr in lrs: # 5 res = { 'run': i, 'kappa': k, 'batch_size': bs, 'n': n, 'd': d, 'opt': opt, 'lr': lr } print(res) net = nn.Linear(d, 2, bias=True) optimizer = eval(opt)(net.parameters(), lr=lr) name = opt.split('.')[-1] errors, losses = train(net, loader, full_loader, optimizer, x_star=x_star) res['errors'] = errors res['losses'] = losses joblib.dump( res, "{}/{}.pkl".format( 'synthetic_data_results/LogisticRegression', random_string(5)))
class User(Base): __tablename__ = "bloguser" id = Column(Integer, primary_key=True, autoincrement=True) username = Column(String(80)) password_hash = Column(String(64)) password_salt = Column(String(8), default=lambda: random_string(8)) @classmethod def has_user(cls, connection, username): return bool( connection.query(User).filter_by(username=username).scalar()) @classmethod def check_password(cls, connection, username, password): user = connection.query(User).filter_by(username=username) if user.scalar().encrypt_password( password) != user.scalar().password_hash: return False return connection.execute(user).scalar() def encrypt_password(self, password, salt=None): if not salt: salt = self.password_salt or self.regenerate_salt() return hashlib.sha256('%s - %s' % (password, salt)).hexdigest() def regenerate_salt(self): self.password_salt = random_string(8) return self.password_salt def set_password(self, password): self.password_hash = self.encrypt_password(password) def verify_password(self, password): return self.encrypt_password(password) == self.password_hash
def regenerate_salt(self): self.password_salt = random_string(8) return self.password_salt
def generate_csrf_token(): session = sessions.get_store().get_session() if '_csrf_token' not in session: session['_csrf_token'] = utils.random_string() return session['_csrf_token']
def replace_variables(self, code, payload, key): namespace = utils.random_string() code = code.replace('$Payload$', payload) code = code.replace('$Key$', key) # C code = code.replace('$VirtualProtectDec$', utils.random_string()) code = code.replace('$VirtualAllocDec$', utils.random_string()) code = code.replace('$RtlMoveMemoryDec$', utils.random_string()) code = code.replace('$VirtualProtectVar$', utils.random_string()) code = code.replace('$VirtualAllocVar$', utils.random_string()) code = code.replace('$RtlMoveMemoryVar$', utils.random_string()) code = code.replace('$EncryptedShellcodeVar$', utils.random_string()) code = code.replace('$ShellcodeVar$', utils.random_string()) # CS code = code.replace('$Namespace$', utils.random_string()) code = code.replace('$EncryptedBase64Var$', utils.random_string()) code = code.replace('$DecryptedShellcode$', utils.random_string()) code = code.replace('$DecryptFunc$', utils.random_string()) code = code.replace('$RunShellcodeFunc$', utils.random_string()) code = code.replace('$PtrVar$', utils.random_string()) code = code.replace('$DelegateDec$', utils.random_string()) return code
def func_http_get(url): temp_var = utils.random_string(10) output = "$%s=Invoke-WebRequest -Uri %s;" % (temp_var, url) return output, temp_var
def func_http_post(url, body): temp_var = utils.random_string(10) output = "$%s=Invoke-WebRequest -Method Post -ContentType 'application/x-www-form-urlencoded' -Uri %s -Body %s;" % ( temp_var, url, body) return output, temp_var
def func_b64decode(string): temp_var = utils.random_string(10) output = "$%s=[System.Text.Encoding]::ASCII.GetString([System.Convert]::FromBase64String(%s));" % ( temp_var, string) return output, temp_var
def func_b64encode(string): temp_var = utils.random_string(10) output = "$tmp=[System.Text.Encoding]::UTF8.GetBytes(%s);" % string output += "$%s=[System.Convert]::ToBase64String($tmp);" % temp_var return output, temp_var
optimizer = eval(args.o)([p], lr=learning_rate) opt_recorder = OptRecorder(optimizer) for epoch in range(num_epochs): # sample input delta = np.random.choice([-1, -1, -1, 3]) def closure(): optimizer.zero_grad() l = s.forward(p) # p is parameter, s is the curve l.backward() p.grad.data.add_(grad_noise * delta) return l l = optimizer.step(closure) if epoch % max(int(num_epochs / 5000), 1) == 0: print('Epoch [{}/{}], Loss: {:.4f}'.format(epoch + 1, num_epochs, l.item())) opt_recorder.record() train_losses.append(l.item()) os.system('mkdir -p {}'.format(args.s)) name = "{}/{}-{}^{:.2f}^{}".format(args.s, args.o.split('.')[-1], args.lr, l.item(), random_string(5)) joblib.dump(train_losses, name + ".train_losses") joblib.dump(opt_recorder.tracker, name + ".opt_track")
help='save directory', default='train_loss') parser.add_argument('-lr', type=float, help='learning rate', default=1e-3) args = parser.parse_args() print(args) train_losses = [] train_errors = [] val_errors = [] test_errors = [] torch.set_num_threads(1) # Device configuration device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') os.system('mkdir -p {}'.format(args.s)) run_id = random_string() def eval_loader(model, loader): model.eval() loss_meter = AverageMeter() error_meter = AverageMeter() with torch.no_grad(): for i, (images, labels) in enumerate(tqdm.tqdm(loader)): # Move tensors to the configured device images = images.to(device) labels = labels.to(device) bs = labels.size(0) outputs = model(images) loss = criterion(outputs, labels) + l2loss.loss() loss_meter.update(loss.item(), bs)
def main(): kappas = [1, 10**2, 10**4] nrepeat = 3 d = 50 n = 300 batchsizes = [int(n), int(n / 2), int(n / 6)] # 50, 150, 300 optimizations = [ 'Diff', 'torch.optim.Adam', # test converting SGD # 'AdaBound', # 'CrossBound', # 'CrossAdaBound', # 'Swats', # test dominance # 'AdamC1(1,1)', # 'AdamC2(1,1)', # 'AlphaDiff(1,1)', # same as MC # 'AlphaAdam(1,1)', # same as Adam # 'AlphaSGD(1,1)', # 'AlphaDiff(1,0)', # no var(dg) # 'AlphaAdam(1,0)', # no var(g) # 'torch.optim.SGD', # same as AlphaSGD(1,0) with 0 momentum # 'AlphaDiff(0,1)', # only var(dg) # 'AlphaAdam(0,1)', # only var(g), same as AlphaSGD(0,1) ] lrs = [100, 10, 1, 0.1, 0.01, 0.001, 0.0001] for i in range(nrepeat): # 3 for k in kappas: # 4 A, y, x_star = get_data(k, d, n, i) for bs in batchsizes: # 3 data = TensorDataset( torch.from_numpy(A).float(), torch.from_numpy(y).float()) loader = DataLoader(data, batch_size=bs) full_loader = DataLoader(data, batch_size=n) for opt in optimizations: # 3 for lr in lrs: # 5 res = { 'run': i, 'kappa': k, 'batch_size': bs, 'n': n, 'd': d, 'opt': opt.split('.')[-1], 'lr': lr, } print(res) net = nn.Linear(d, 1, bias=True) if '(' in opt: alpha_index = opt.find('(') alphas = eval(opt[alpha_index:]) optimizer = eval(opt[:alpha_index])( net.parameters(), lr=lr, alphas=alphas) else: optimizer = eval(opt)(net.parameters(), lr=lr) losses, opt_tracker = train(net, loader, full_loader, optimizer) name = "{}/{}".format('synthetic_data_results/LSR', random_string(5)) joblib.dump(res, "{}.ind".format(name)) joblib.dump(losses, "{}.loss".format(name)) joblib.dump(opt_tracker, "{}.track".format(name))
def get_key_hash(self): return hashlib.md5(utils.random_string(60).encode('utf-8')).hexdigest()