Пример #1
0
async def run(client, message, args, prefix, db):
    if utils.is_dev(message.author):
        mesaj = message.content[len(prefix) + len("duyuru"):]
        basarisiz = list()

        for i, u in enumerate(message.guild.members):
            if not u.bot:
                try:
                    if u.id == 466306304063832065: continue
                    if u.id == 302515395552739339: continue
                    if u.id == 284697871331360778: continue
                    dm = await u.create_dm()
                    await dm.send(mesaj.format(u.name))

                except Exception as e:
                    print(message.author.display_name + " : " +
                          message.content + " >>")
                    print(e)
                    basarisiz.append(u)

            print(f"Duyuru {i}/{len(message.guild.members)}")

        if len(basarisiz) != 0:
            basarisiz_string = ""
            for u in basarisiz:
                basarisiz_string = basarisiz_string + str(u) + " \n"
            await message.channel.send("Duyuruyu almayan kişiler:\n```" +
                                       basarisiz_string + "```")

            for b in basarisiz:
                print(str(b.name), b.name, b.id)
    else:
        raise Exception(
            "Bu komutu kullanmak için yeterli yetkiye sahip değilsin.")
Пример #2
0
def get_quote(provider, data):
    if provider == 'supertext':
        if is_dev():
            url = 'https://dev.supertext.ch/api/v1/translation/quote'
        else:
            url = 'https://www.supertext.ch/api/v1/translation/quote'
        headers = {'Content-type': 'application/json; charset=UTF-8', 'Accept': '*'}
        r = requests.post(url, data=json.dumps(data, ensure_ascii=False).encode('ascii', 'xmlcharrefreplace'),
                          headers=headers, verify=verify_ssl())
        return r.content

    else:
        raise NotImplementedError()
Пример #3
0
def get_order(provider, data):
    if provider == 'supertext':
        user, api_key = get_creds('SUPERTEXT', ['USER', 'API_KEY'])
        if is_dev():
            url = 'https://dev.supertext.ch/api/v1/translation/order'
        else:
            url = 'https://www.supertext.ch/api/v1/translation/order'
        headers = {'Content-type': 'application/json; charset=UTF-8', 'Accept': '*'}
        r = requests.post(url, data=json.dumps(data, ensure_ascii=False).encode('ascii', 'xmlcharrefreplace'),
                          headers=headers, auth=(user, api_key), verify=verify_ssl())
        return r.content

    else:
        raise NotImplementedError()
async def run(client, message, args, prefix, db):
    if args[0] == "durum":
        await message.channel.send(f"__Veritabanı Durumu__ 🧪\n" + \
                                   f"Heroku Redis - AmazonAWS EU-West\n\n" + \
                                   f"```Bağlantı:   {('Sağlıklı', 'Ölü')[int(db.redis.connection_pool is None)]}\n" + \
                                   f"GiriÅŸ:      {int(db.elapsed*1000)}ms\n" + \
                                   f"Ort. Query: {int(db.query_elapsed*1000)}ms```" + \
                                   f"")

    elif args[0] == "kur":
        if is_dev(message.author):
            pass
        else:
            raise Exception(
                "Bu komutu kullanmak için yeterli yetkiye sahip değilsin.")
Пример #5
0
    def get_collections_for_bbox(self, bbox=None):
        if self.header is None:
            self.read_header()

        if bbox is None:
            if is_dev() is True:
                return self.header["collections"][:1]
            else:
                return self.header["collections"]

        filter_bbox = box(*[float(i) for i in bbox.split(",")])
        filtered_collections = []
        for collection in self.header["collections"]:
            collection_bbox = box(*collection["bbox"])
            if filter_bbox.intersects(collection_bbox) is True:
                filtered_collections.append(collection)
        return filtered_collections
Пример #6
0
async def run(client, message, args, prefix, db):
    if is_dev(message.author):
        user = get_user(message.guild, args[0])
        if not user:
            raise Exception(f"'{args[0]}' bulunamadı")

        if args[1].startswith("+"):
            db[user] = db[user] + int(args[1][1:])

        elif args[1].startswith("-"):
            db[user] = db[user] + int(args[1][1:])

        else:
            db[user] = int(args[1])

    else:
        raise Exception("Bu komutu kullanabilmek için yeterli yetkiye sahip değilsiniz")
Пример #7
0
    async def on_message(self, message):
        """
        don't touch this! keep it abstract
        """

        # we don't want interaction in any other channels
        if message.channel.id != self.secret_channel.id:
            return

        # quick content reference
        content = message.content

        # we also want to skip anything that doesn't start with the prefix
        if not content.startswith("!"):
            return

        # strip prefix
        content = content[1:]

        # get base command
        base_command = content.split(" ")

        # the function to call
        cmd_funct = None

        # parse base commands (not mapped)
        if content == 'commands':
            await self.commands(message)
        elif content == 'rules':
            await self.rules(message)
        elif content == 'devme':
            await self.devme(message)
        elif base_command[0] in commands_map:
            # user commands
            cmd_funct = self._get_command_function(commands_map, base_command)
        elif base_command[0] in dev_commands_map and utils.is_dev(message.author):
            # dev commands
            cmd_funct = self._get_command_function(dev_commands_map, base_command)
        elif base_command[0] in admin_commands_map and utils.is_admin(message.author):
            # admin commands
            cmd_funct = self._get_command_function(admin_commands_map, base_command)

        if cmd_funct is not None:
            await cmd_funct(message)
Пример #8
0
def get_quote_view(request, pk):
    t = TranslationRequest.objects.get(pk=pk)
    check_stage(t.status, 'selected_content')

    if request.method == 'POST':
        if request.POST.get('opt'):
            t.order_choice = request.POST.get('opt')  # TODO: possible security issue?
            t.status = 'selected_quote'
            t.save()
            return HttpResponseRedirect(reverse('admin:order', kwargs={'pk': pk}))

    else:
        data = prepare_data(t, t.from_lang, t.to_lang)
        quote = get_quote(t.provider, data=data)
        if log_to_file_enabled():
            log_to_file(data)
        if t.provider == 'supertext':
            res = json.loads(quote)
            return render_to_response(
                'aldryn_translator/quote.html', {'res': res, 'dev': is_dev()},
                context_instance=RequestContext(request))

        else:
            raise NotImplementedError()
Пример #9
0
 def check(self, ctx):
     return is_dev(ctx)
Пример #10
0
#!/usr/bin/env python
import os
import sys
from {{ project_name }} import utils

if __name__ == "__main__":
    if utils.is_dev():
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings.dev")
    else:
        os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings.prod")

    from django.core.management import execute_from_command_line

    execute_from_command_line(sys.argv)
Пример #11
0
def decode(model_file, target_file, gpu=-1, save_to=None):
    context = utils.load_context(model_file)
    if context.seed is not None:
        utils.set_random_seed(context.seed, gpu)
        Log.i("random seed: {}".format(context.seed))
    framework_utils.set_debug(App.debug)

    loader = context.loader
    Log.i('load test dataset from {}'.format(target_file))
    test_dataset = loader.load(target_file,
                               train=False,
                               size=16 if utils.is_dev() else None)
    Log.i('#samples {}'.format(len(test_dataset)))

    Log.v('')
    Log.v("initialize ...")
    Log.v('--------------------------------')
    Log.i('# gpu: {}'.format(gpu))
    Log.i('# tagset size: {}'.format(len(loader.tag_map)))
    Log.i('# model layers: {}'.format(context.models))
    Log.i('# context: {}'.format(context))
    Log.v('--------------------------------')
    Log.v('')

    models.USE_ORTHONORMAL = False
    # Set up a neural network model
    layers = [
        models.Input(
            word_embeddings=loader.get_embeddings('word'),
            char_embeddings=loader.get_embeddings('char'),
            char_feature_size=50,
            dropout=0.5,
        ),
        models.Recurrent(n_layers=2,
                         in_size=loader.get_embeddings('word').shape[1] + 50,
                         out_size=400,
                         dropout=0.5),
        models.Tagger(in_size=400 * 2,
                      out_size=len(loader.tag_map),
                      units=100,
                      dropout=0.5) if context.models[2] is models.Tagger else
        models.GoldTagger(out_size=len(loader.tag_map)),
    ]
    if models.Parser in context.models:
        layers.extend([
            models.Connection(in_size=400 * 2,
                              out_size=800,
                              tagset_size=len(loader.tag_map),
                              tag_embed_size=50,
                              dropout=0.5),
            models.Parser(in_size=850,
                          n_deprels=len(loader.rel_map),
                          n_blstm_layers=1,
                          lstm_hidden_size=400,
                          parser_mlp_units=800,
                          dropout=0.50),
        ])
    model = models.MTL(*layers)
    chainer.serializers.load_npz(model_file, model)
    if gpu >= 0:
        framework_utils.set_model_to_device(model, device_id=gpu)
    # Setup an evaluator
    evaluator = models.Evaluator(loader, target_file, save_to)
    evaluator.add_target(model)

    # Start decoding
    framework_utils.chainer_train_off()
    evaluator.on_epoch_validate_begin({'epoch': 0})
    for batch_index, batch in enumerate(
            test_dataset.batch(context.batch_size, colwise=True,
                               shuffle=False)):
        xs, ts = batch[:-1], batch[-1]
        evaluator.on_batch_begin({'train': False, 'xs': xs, 'ts': ts})
        model(*xs)
        evaluator.on_batch_end({'train': False, 'xs': xs, 'ts': ts})
    evaluator.on_epoch_validate_end({'epoch': 0})
Пример #12
0
def train(
        train_file,
        test_file=None,
        embed_file=None,
        embed_size=100,
        n_epoch=20,
        batch_size=32,
        lr=0.001,
        l2_lambda=0.0,
        grad_clip=5.0,
        tasks='tp',
        gpu=-1,
        save_to=None,
        seed=None):
    if seed is not None:
        utils.set_random_seed(seed, gpu)
        Log.i("random seed: {}".format(seed))
    framework_utils.set_debug(App.debug)

    # Select Task
    with_tagging_task = False
    with_parsing_task = False
    for char in tasks:
        if char == 't':
            with_tagging_task = True
        elif char == 'p':
            with_parsing_task = True
        else:
            raise ValueError("Invalid task specified: {}".format(char))
    if not any([with_tagging_task, with_parsing_task]):
        raise RuntimeError("No valid task specified")
    Log.i('Task: tagging={}, parsing={}'
          .format(with_tagging_task, with_parsing_task))

    # Transition System
    transition_system = transition.ArcStandard
    if with_parsing_task:
        Log.i('Transition System: {}'.format(transition_system))

    # Load files
    Log.i('initialize DataLoader with embed_file={} and embed_size={}'
          .format(embed_file, embed_size))
    loader = dataset.DataLoader(word_embed_file=embed_file,
                                word_embed_size=embed_size,
                                char_embed_size=10,
                                transition_system=transition_system)
    Log.i('load train dataset from {}'.format(train_file))
    train_dataset = loader.load(train_file, train=True,
                                size=120 if utils.is_dev() else None)
    if test_file:
        Log.i('load test dataset from {}'.format(test_file))
        test_dataset = loader.load(test_file, train=False,
                                   size=16 if utils.is_dev() else None)
    else:
        test_dataset = None

    Log.v('')
    Log.v("initialize ...")
    Log.v('--------------------------------')
    Log.i('# Minibatch-size: {}'.format(batch_size))
    Log.i('# epoch: {}'.format(n_epoch))
    Log.i('# gpu: {}'.format(gpu))
    Log.i('# tagset size: {}'.format(len(loader.tag_map)))
    Log.v('--------------------------------')
    Log.v('')

    # Set up a neural network model
    layers = [
        models.Input(
            word_embeddings=loader.get_embeddings('word'),
            char_embeddings=loader.get_embeddings('char'),
            char_feature_size=50,
            dropout=0.5,
        ),
        models.Recurrent(
            n_layers=2,
            in_size=loader.get_embeddings('word').shape[1] + 50,
            out_size=400,
            dropout=0.5),
        models.Tagger(
            in_size=400 * 2,
            out_size=len(loader.tag_map),
            units=100,
            dropout=0.5) if with_tagging_task else
        models.GoldTagger(out_size=len(loader.tag_map)),
    ]
    if with_parsing_task:
        layers.extend([
            models.Connection(
                tagset_size=len(loader.tag_map),
                tag_embed_size=50,
                dropout=0.5),
            models.Parser(
                in_size=850,
                n_deprels=len(loader.rel_map),
                n_blstm_layers=2,
                lstm_hidden_size=400,
                parser_mlp_units=800,
                dropout=0.50,
                transition_system=transition_system),
        ])
    model = models.MTL(*layers)
    if gpu >= 0:
        framework_utils.set_model_to_device(model, device_id=gpu)

    # Setup an optimizer
    optimizer = chainer.optimizers.Adam(
        alpha=lr, beta1=0.9, beta2=0.999, eps=1e-08)
    optimizer.setup(model)
    if l2_lambda > 0.0:
        optimizer.add_hook(chainer.optimizer.WeightDecay(l2_lambda))
    else:
        l2_lambda = False
    if grad_clip > 0.0:
        optimizer.add_hook(chainer.optimizer.GradientClipping(grad_clip))
    else:
        grad_clip = False
    # optimizer.add_hook(
    #     framework_utils.optimizers.ExponentialDecayAnnealing(
    #         initial_lr=lr, decay_rate=0.75, decay_step=5000, lr_key='alpha'))
    Log.i('optimizer: Adam(alpha={}, beta1=0.9, '
          'beta2=0.999, eps=1e-08), grad_clip={}, '
          'regularization: WeightDecay(lambda={})'
          .format(lr, grad_clip, l2_lambda))

    # Setup a trainer
    trainer = Trainer(optimizer, model,
                      loss_func=model.compute_loss,
                      accuracy_func=model.compute_accuracy)
    trainer.configure(framework_utils.config)
    if test_dataset:
        evaluator = models.Evaluator(loader, test_file, save_to)
        evaluator.add_target(model)
        trainer.attach_callback(evaluator)

    if save_to is not None:
        accessid = Log.getLogger().accessid
        date = Log.getLogger().accesstime.strftime('%Y%m%d')
        trainer.attach_callback(
            framework_utils.callbacks.Saver(
                model,
                basename="{}-{}".format(date, accessid),
                directory=save_to,
                context=dict(App.context,
                             models=[type(layer) for layer in layers],
                             loader=loader)))

    # Start training
    trainer.fit(train_dataset, None,
                batch_size=batch_size,
                epochs=n_epoch,
                validation_data=test_dataset,
                verbose=App.verbose)