def load(instance, logger, dirname):
    modulelist = load_modules(dirname)
    logger.print_info("Loading {} modules".format(len(modulelist)))
    for module in modulelist:
        try:
            modulelist[modulelist.index(module)] = module.Module(instance)
        except BaseException as e:
            continue
    for module in modulelist:
        if hasattr(module, "post_init"):
            module.post_init(modulelist)
        for method in dir(module):
            if method[:3] == "on_":
                try:
                    instance.connection.add_global_handler(
                        method[3:], getattr(module, method))
                except Exception as e:
                    logger.print_error(
                        "Error {} occured while hooking module {}.".format(
                            e, module.info["name"]))
    logger.print_info("Checking dependencies...")
    for module in modulelist:
        mods = [x.info["name"] for x in modulelist]
        for depend in module.info["depends"]:
            if depend not in mods:
                logger.print_error(
                    "{0} depends on {1} but {1} does not exist! Download {1} or remove {0}."
                    .format(module.info["name"], depend))
    logger.print_info("All dependencies are included.")
    def convert(self, annot):
        name = annot['filename']
        if self.verbose:
            print_info('Converting VIA annotations for {}'.format(name))
        if not (self.input_dir / name).exists:
            print_error('Original image {} not found'.format(name))
            return None

        size = Image.open(self.input_dir / name).size
        img = Image.new(self.mode, size, color=self.background_color)
        draw = ImageDraw.Draw(img)

        for region in annot['regions']:
            shape = region['shape_attributes']
            if shape['name'] == 'circle':
                cx, cy, r = shape['cx'], shape['cy'], shape['r']
                bbox = [(cx - r, cy - r), (cx + r, cy + r)]
                draw.ellipse(bbox, fill=self.color)
            elif shape['name'] == 'ellipse':
                cx, cy, rx, ry = shape['cx'], shape['cy'], shape['rx'], shape['ry']
                bbox = [(cx - rx, cy - ry), (cx + rx, cy + ry)]
                draw.ellipse(bbox, fill=self.color)
            elif shape['name'] == 'polygon':
                polygon = list(zip(shape['all_points_x'], shape['all_points_y']))
                draw.polygon(polygon, fill=self.color)
            else:
                raise NotImplementedError('shape "{}" not implemented'.format(shape['name']))

        return img
 def get_resources_with_targets(manifest):
     try:
         canvases = manifest['sequences'][0]['canvases']
         images_list = [canvas['images'] for canvas in canvases]
         return [(image['resource']['@id'], image['on']) for images in images_list for image in images]
     except KeyError as e:
         print_error(e)
         return []
 def get_json(url):
     try:
         response = requests.get(url)
         if response.ok:
             return response.json()
         else:
             response.raise_for_status()
     except requests.exceptions.RequestException as e:
         print_error(e)
         return None
 def update_bot():  # we won't call this function from inside the bot
     # Assuming you are inside the TextSiri git repository (you should be.)
     try:
         subprocess.check_output(["git", "fetch"])
         subprocess.check_output(["git", "pull"])
     except FileNotFoundError:
         logger.print_error("You must have git installed!")
         exit(1)
     logger.print_info("Updated TextSiri, restarting.")
     python = sys.executable
     sys.argv.remove("--upgrade")
     os.execl(python, python, *sys.argv)
Example #6
0
 def update_bot(): # we won't call this function from inside the bot
     # Assuming you are inside the TextSiri git repository (you should be.)
     try:
         subprocess.check_output(["git", "fetch"])
         subprocess.check_output(["git", "pull"])
     except FileNotFoundError:
         logger.print_error("You must have git installed!")
         exit(1)
     logger.print_info("Updated TextSiri, restarting.")
     python = sys.executable
     sys.argv.remove("--upgrade")
     os.execl(python, python, * sys.argv)
def load_modules(dirname):
    module_list = []
    plugins_folder = [os.path.join(os.getcwd(), dirname)]
    plugins = set(glob.glob(os.path.join(dirname, "*.py")))
    for plugin in plugins:
        if plugin.find("__") != -1:
            continue
        _plugin = os.path.join(os.getcwd(), plugin)
        try:
            moduleinfo = imp.find_module(
                plugin.split(os.sep)[1].split(".")[0], plugins_folder)
            module_object = imp.load_source(plugin, moduleinfo[1])
        except ImportError as e:
            if str(e).startswith('No module named'):
                logger.print_error(
                    'Failed to load plugin {}: the plugin could not be found!'.
                    format(plugin))
            else:
                logger.print_error(
                    'Failed to load plugin {}: import error {}'.format(
                        plugin, str(e)))
        except BaseException as e:
            logger.print_error(
                "The following error occured while importing module {}: \"{}\". Please fix the issue or contact the module author. "
                .format(plugin, str(e)))
            sys.exit(1)
        else:
            module_list.append(module_object)
    return module_list
 def message_loop(self, c):
     sent_message = False
     while True:
         time.sleep(0.05)
         if len(self.message_buffer) > 0:
             if self.message_counter > 5:
                 self.message_slowmode_lock = 1
         if self.message_slowmode_lock:
             time.sleep(0.5)
         if len(self.message_buffer) > 0:
             command = self.message_buffer.pop(0)
             command_name = command.pop(0)
             try:
                 getattr(c, command_name)(command.pop(0), *command)
                 sent_message = True
             except Exception as e:
                 logger.print_error(str(e))
         if sent_message:
             self.message_counter += 1
         else:
             self.message_counter -= 1
             if self.message_counter == 0:
                 self.message_slowmode_lock = 0
Example #9
0
 def message_loop(self, c):
     sent_message = False
     while True:
         time.sleep(0.05)
         if len(self.message_buffer) > 0:
             if self.message_counter > 5:
                 self.message_slowmode_lock = 1
         if self.message_slowmode_lock:
             time.sleep(0.5)
         if len(self.message_buffer) > 0:
             command = self.message_buffer.pop(0)
             command_name = command.pop(0)
             try:
                 getattr(c, command_name)(command.pop(0), *command)
                 sent_message = True
             except Exception as e:
                 logger.print_error(str(e))
         if sent_message:
             self.message_counter += 1
         else:
             self.message_counter -= 1
             if self.message_counter == 0:
                 self.message_slowmode_lock = 0
Example #10
0
def load(instance, logger, dirname):
    modulelist = load_modules(dirname)
    logger.print_info("Loading {} modules".format(len(modulelist)))
    for module in modulelist:
        try:
            modulelist[modulelist.index(module)] = module.Module(instance)
        except BaseException as e:
            continue
    for module in modulelist:
        if hasattr(module, "post_init"):
            module.post_init(modulelist)
        for method in dir(module):
            if method[:3] == "on_":
                try:
                    instance.connection.add_global_handler(method[3:], getattr(module, method))
                except Exception as e:
                    logger.print_error("Error {} occured while hooking module {}.".format(e, module.info["name"]))
    logger.print_info("Checking dependencies...")
    for module in modulelist:
        mods = [x.info["name"] for x in modulelist]
        for depend in module.info["depends"]:
            if depend not in mods:
                logger.print_error("{0} depends on {1} but {1} does not exist! Download {1} or remove {0}.".format(module.info["name"], depend))
    logger.print_info("All dependencies are included.")
Example #11
0
def load_modules(dirname):
    module_list = []
    plugins_folder = [os.path.join(os.getcwd(), dirname)]
    plugins = set(glob.glob(os.path.join(dirname, "*.py")))
    for plugin in plugins:
        if plugin.find("__") != -1:
            continue
        _plugin = os.path.join(os.getcwd(), plugin)
        try:
            moduleinfo = imp.find_module(plugin.split(os.sep)[1].split(".")[0], plugins_folder)
            module_object = imp.load_source(plugin, moduleinfo[1])
        except ImportError as e:
            if str(e).startswith('No module named'):
                logger.print_error('Failed to load plugin {}: the plugin could not be found!'.format(plugin))
            else:
                logger.print_error('Failed to load plugin {}: import error {}'.format(plugin, str(e)))
        except BaseException as e:
            logger.print_error("The following error occured while importing module {}: \"{}\". Please fix the issue or contact the module author. ".format(plugin, str(e)))
            sys.exit(1)
        else:
            module_list.append(module_object)
    return module_list
Example #12
0
 def print_and_log_error(self, string):
     self.logger.error(string)
     if self.verbose:
         print_error(string)
Example #13
0
                logger.print_info(
                    "{color_green}New bot version found! Pass --upgrade to upgrade TextSiri at startup."
                )

    def update_bot():  # we won't call this function from inside the bot
        # Assuming you are inside the TextSiri git repository (you should be.)
        try:
            subprocess.check_output(["git", "fetch"])
            subprocess.check_output(["git", "pull"])
        except FileNotFoundError:
            logger.print_error("You must have git installed!")
            exit(1)
        logger.print_info("Updated TextSiri, restarting.")
        python = sys.executable
        sys.argv.remove("--upgrade")
        os.execl(python, python, *sys.argv)


if __name__ == "__main__":
    if "--upgrade" in sys.argv:
        TextSiri.update_bot()
    else:
        Instance = TextSiri()
        try:
            Instance.start()
        except KeyboardInterrupt:
            Instance.die("Ctrl+C from terminal.")
            sys.exit(0)
        except Exception as e:
            logger.print_error(type(e).__name__ + str(e))
Example #14
0
def main():
    parser = argparse.ArgumentParser(description="socr")
    parser.add_argument('--name', type=str, default="dhSegment")
    parser.add_argument('--lr',
                        type=float,
                        default=0.0001,
                        help="Learning rate")
    parser.add_argument('--overlr',
                        action='store_const',
                        const=True,
                        default=False,
                        help="Override the learning rate")
    parser.add_argument('--bs', type=int, default=16, help="The batch size")
    parser.add_argument('--losstype',
                        type=str,
                        default='bce',
                        help="The loss type. Ex : mse, bce, norm")
    parser.add_argument('--thicknesses',
                        type=int,
                        default=2,
                        help="Line thicknesses in the document")
    parser.add_argument('--hystmin',
                        type=float,
                        default=0.5,
                        help="Hysteresys thresholding minimum")
    parser.add_argument('--hystmax',
                        type=float,
                        default=0.5,
                        help="Hysteresys thresholding maximum")
    parser.add_argument('--expdecay',
                        type=float,
                        default=0.98,
                        help="Exponential decay")
    parser.add_argument(
        '--heightimportance',
        type=float,
        default=0.001,
        help="Height prediction importance during the training")
    parser.add_argument('--weightdecay',
                        type=float,
                        default=0.000001,
                        help="Weight decay")
    parser.add_argument('--epochlimit',
                        type=int,
                        default=None,
                        help="Limit the number of epoch")
    parser.add_argument('--bnmomentum',
                        type=float,
                        default=0.1,
                        help="BatchNorm Momentum")
    parser.add_argument('--disablecuda',
                        action='store_const',
                        const=True,
                        default=False,
                        help="Disable cuda")
    parser.add_argument('--icdartrain',
                        type=str,
                        help="Path to the ICDAR Training set")
    parser.add_argument('--icdartest',
                        type=str,
                        default=None,
                        help="Path to the ICDAR Testing set")
    parser.add_argument('--generated',
                        action='store_const',
                        const=True,
                        default=False,
                        help="Enable generated data")
    args = parser.parse_args()

    model = dhSegment(args.losstype, args.hystmin, args.hystmax,
                      args.thicknesses, args.heightimportance, args.bnmomentum)
    loss = model.create_loss()

    if not args.disablecuda:
        model = torch.nn.DataParallel(model.cuda())
        loss = loss.cuda()
    else:
        model = CPUParallel(model.cpu())
        loss = loss.cpu()

    optimizer = torch.optim.Adam(model.parameters(),
                                 lr=args.lr,
                                 weight_decay=args.weightdecay)
    adaptative_optimizer = torch.optim.lr_scheduler.ExponentialLR(
        optimizer, args.expdecay)

    os.makedirs('checkpoints', exist_ok=True)
    checkpoint_name = "checkpoints/" + args.name + ".pth.tar"

    epoch = 0

    if os.path.exists(checkpoint_name):
        print_normal("Restoring the weights...")
        checkpoint = torch.load(checkpoint_name)
        epoch = checkpoint['epoch']
        model.load_state_dict(checkpoint['state_dict'])
        optimizer.load_state_dict(checkpoint['optimizer'])
        adaptative_optimizer.load_state_dict(
            checkpoint['adaptative_optimizer'])
    else:
        print_warning("Can't find '" + checkpoint_name + "'")

    if args.overlr is not None:
        print_normal("Overwriting the lr to " + str(args.lr))
        for param_group in optimizer.param_groups:
            param_group['lr'] = args.lr

    train_databases = [ICDARDocumentSet(args.icdartrain, loss, True)]

    if args.generated:
        sys.path.append(
            os.path.join(os.path.dirname(os.path.abspath(__file__)),
                         "submodules/scribbler"))
        from scribbler.generator import DocumentGenerator
        train_databases.append(DocumentGenerator(loss))

    train_database = torch.utils.data.ConcatDataset(train_databases)

    test_database_path = None
    if args.icdartest is not None:
        test_database_path = args.icdartest

    moving_average = MovingAverage(
        max(train_database.__len__() // args.bs, 1024))

    try:
        while True:
            if args.epochlimit is not None and epoch > args.epochlimit:
                print_normal("Epoch " + str(args.epochlimit) + "reached !")
                break

            model.train()

            loader = torch.utils.data.DataLoader(train_database,
                                                 batch_size=args.bs,
                                                 shuffle=True,
                                                 num_workers=4,
                                                 collate_fn=collate)
            for i, data in enumerate(loader, 0):

                inputs, labels = data

                optimizer.zero_grad()

                variable = torch.autograd.Variable(inputs).float()
                labels = torch.autograd.Variable(labels).float()

                if not args.disablecuda:
                    variable = variable.cuda()
                    labels = labels.cuda()
                else:
                    variable = variable.cpu()
                    labels = labels.cpu()

                outputs = model(variable)
                loss_value = loss.forward(outputs, labels)
                loss_value.backward()

                loss_value_cpu = loss_value.data.cpu().numpy()

                optimizer.step()

                loss_value_np = float(loss_value.data.cpu().numpy())
                moving_average.addn(loss_value_np)

                if (i * args.bs) % 8 == 0:
                    sys.stdout.write(TerminalColors.BOLD + '[%d, %5d] ' %
                                     (epoch + 1,
                                      (i * args.bs) + 1) + TerminalColors.ENDC)
                    sys.stdout.write(
                        'lr: %.8f; loss: %.4f ; curr: %.4f ;\r' %
                        (optimizer.state_dict()['param_groups'][0]['lr'],
                         moving_average.moving_average(), loss_value_cpu))

            epoch = epoch + 1
            adaptative_optimizer.step()

            sys.stdout.write("\n")

            try:
                if args.icdartest is not None:
                    callback(model, loss, test_database_path)
            except Exception as e:
                print_error("Can't test : " + str(e))

    except KeyboardInterrupt:
        pass

    print_normal("Done training ! Saving...")
    torch.save(
        {
            'epoch': epoch,
            'state_dict': model.state_dict(),
            'optimizer': optimizer.state_dict(),
            'adaptative_optimizer': adaptative_optimizer.state_dict(),
        }, checkpoint_name)
Example #15
0
        for i in range(1, len(local_version)):
            if int(remote_version[i]) > int(local_version[i]):
                logger.print_info("{color_green}New bot version found! Pass --upgrade to upgrade TextSiri at startup.")

    def update_bot(): # we won't call this function from inside the bot
        # Assuming you are inside the TextSiri git repository (you should be.)
        try:
            subprocess.check_output(["git", "fetch"])
            subprocess.check_output(["git", "pull"])
        except FileNotFoundError:
            logger.print_error("You must have git installed!")
            exit(1)
        logger.print_info("Updated TextSiri, restarting.")
        python = sys.executable
        sys.argv.remove("--upgrade")
        os.execl(python, python, * sys.argv)


if __name__ == "__main__":
    if "--upgrade" in sys.argv:
        TextSiri.update_bot()
    else:
        Instance = TextSiri()
        try:
            Instance.start()
        except KeyboardInterrupt:
            Instance.die("Ctrl+C from terminal.")
            sys.exit(0)
        except Exception as e:
            logger.print_error(type(e).__name__+str(e))