Exemplo n.º 1
0
def _family_tree_configurations_schema(family_trees_configuration_dict: Any) -> List[FamilyTreeConfiguration]:
    schema = Schema({
        'file': All(str, IsFile(), Path()),
    })
    family_trees_configuration = []
    for family_tree_configuration_dict in family_trees_configuration_dict:
        schema(family_tree_configuration_dict)
        family_trees_configuration.append(FamilyTreeConfiguration(family_tree_configuration_dict['file']))
    return family_trees_configuration
Exemplo n.º 2
0
class ShellProvisioner(Provisioner):
    """ Allows to perform provisioning shell operations on the host/guest sides. """

    name = 'shell'
    schema = {
        Exclusive('inline', 'shelltype'): str,
        Exclusive('script', 'shelltype'): IsFile(),
        'side': Any('guest', 'host'),
    }

    def provision_single(self, guest):
        """ Executes the shell commands in the guest container or in the host. """
        if 'script' in self.options and self._is_for_guest:
            # First case: we have to run the script inside the container. So the first step is
            # to copy the content of the script to a temporary file in the container, ensure
            # that the script is executable and then run the script.
            guest_scriptpath = os.path.join(
                '/tmp/', os.path.basename(self.options['script']))
            with open(self.homedir_expanded_path(
                    self.options['script'])) as fd:
                guest.lxd_container.files.put(guest_scriptpath, fd.read())
            guest.run(['chmod', '+x', guest_scriptpath])
            guest.run([
                guest_scriptpath,
            ], quiet=False)
        elif 'script' in self.options and self._is_for_host:
            # Second case: the script is executed on the host side.
            self.host.run([
                self.homedir_expanded_path(self.options['script']),
            ])
        elif 'inline' in self.options:
            # Final case: we run a command directly inside the container or outside.
            host_or_guest = self.host if self._side == 'host' else guest
            host_or_guest.run(['sh', '-c', self.options['inline']],
                              quiet=False)

    def setup(self):
        # nothing to set up, avoid spurious messages with this override.
        pass

    ##################################
    # PRIVATE METHODS AND PROPERTIES #
    ##################################

    @property
    def _is_for_guest(self):
        """ Returns True if this provisioner should run on the guest side. """
        return self._side == 'guest'

    @property
    def _is_for_host(self):
        """ Returns True if this provisioner should run on the host side. """
        return self._side == 'host'

    @property
    def _side(self):
        return self.options.get('side', 'guest')
Exemplo n.º 3
0
def validate_paths(options):
    # Refs Vagrant code:
    # https://github.com/mitchellh/vagrant/blob/9c299a2a357fcf87f356bb9d56e18a037a53d138/
    #         plugins/provisioners/puppet/config/puppet.rb#L112
    if options.get('manifests_path') is not None:
        host_manifest_file = str(
            Path(options['manifests_path']) / options['manifest_file'])
        IsFile(msg="File {} does not exist".format(host_manifest_file))(host_manifest_file)
    elif options.get('environment_path') is not None:
        host_selected_environment_path = str(
            Path(options['environment_path']) / options['environment'])
        IsDir(msg="Directory {} does not exist".format(host_selected_environment_path))(
            host_selected_environment_path)
    return options
Exemplo n.º 4
0
class Gramps(Plugin, Parser):
    configuration_schema: Schema = Schema({
        'file': All(str, IsFile(), Path()),
    })

    def __init__(self, site: Site, gramps_file_path: str):
        self._site = site
        self._gramps_file_path = gramps_file_path

    @classmethod
    def for_site(cls, site: Site, configuration: Any = NO_CONFIGURATION):
        return cls(site, configuration['file'])

    async def parse(self) -> None:
        parse_xml(self._site, self._gramps_file_path)
Exemplo n.º 5
0
class Gramps(Plugin):
    configuration_schema: Schema = Schema({
        'file': All(str, IsFile(), Path()),
    })

    def __init__(self, site: Site, gramps_file_path: str):
        self._site = site
        self._gramps_file_path = gramps_file_path

    @classmethod
    def for_site(cls, site: Site, configuration: Dict):
        return cls(site, configuration['file'])

    def subscribes_to(self) -> List[Tuple[Type[DispatchedEvent], Callable]]:
        return [
            (ParseEvent, self._parse),
        ]

    async def _parse(self, event: ParseEvent) -> None:
        parse_xml(self._site, self._gramps_file_path)
Exemplo n.º 6
0
class AnsibleProvisioner(Provisioner):
    """ Allows to perform provisioning operations using Ansible. """

    name = 'ansible'

    guest_required_packages_alpine = [
        'python',
    ]
    guest_required_packages_arch = [
        'python',
    ]
    guest_required_packages_centos = [
        'python',
    ]
    guest_required_packages_debian = [
        'apt-utils',
        'aptitude',
        'python',
    ]
    guest_required_packages_fedora = [
        'python2',
    ]
    guest_required_packages_gentoo = [
        'dev-lang/python',
    ]
    guest_required_packages_opensuse = [
        'python3-base',
    ]
    guest_required_packages_ol = [
        'python',
    ]
    guest_required_packages_ubuntu = [
        'apt-utils',
        'aptitude',
        'python',
    ]

    schema = {
        Required('playbook'): IsFile(),
        'ask_vault_pass': bool,
        'vault_password_file': IsFile(),
        'groups': {
            Extra: [
                str,
            ]
        },
        'lxd_transport': bool,
    }

    def get_inventory(self):
        def line(guest):
            if self.options.get('lxd_transport'):
                ip = guest.container.lxd_name
            else:
                ip = get_ip(guest.lxd_container)
            return '{} ansible_host={} ansible_user=root'.format(
                guest.container.name, ip)

        def fmtgroup(name, hosts):
            hosts = [host for host in hosts if host in guestnames]
            return '[{}]\n{}'.format(name, '\n'.join(hosts))

        all_hosts_lines = '\n'.join(line(guest) for guest in self.guests)
        groups = self.options.get('groups', {})
        guestnames = {guest.container.name for guest in self.guests}
        groups_lines = '\n\n'.join(
            fmtgroup(key, val) for key, val in groups.items())
        return '\n\n'.join([all_hosts_lines, groups_lines])

    def provision(self):
        """ Performs the provisioning operations using ansible-playbook. """
        self.setup()
        with tempfile.NamedTemporaryFile() as tmpinv:
            tmpinv.write(self.get_inventory().encode('ascii'))
            tmpinv.flush()
            self.host.run(
                self._build_ansible_playbook_command_args(tmpinv.name))

    def setup_single(self, guest):
        super().setup_single(guest)

        if self.options.get('lxd_transport'):
            # we don't need ssh
            return

        ssh_pkg_name = {
            'alpine': 'openssh',
            'arch': 'openssh',
            'gentoo': 'net-misc/openssh',
            'opensuse': 'openSSH',
        }.get(guest.name, 'openssh-server')
        guest.install_packages([ssh_pkg_name])

        # Make sure that sshd is started
        if guest.name == 'alpine':
            guest.run(['rc-update', 'add', 'sshd'])
            guest.run(['/etc/init.d/sshd', 'start'])
        elif guest.name in {'arch', 'centos', 'fedora'}:
            guest.run(['systemctl', 'enable', 'sshd'])
            guest.run(['systemctl', 'start', 'sshd'])
        elif guest.name == 'ol':
            guest.run(['/sbin/service', 'sshd', 'start'])

        # Add the current user's SSH pubkey to the container's root SSH config.
        ssh_pubkey = self.host.get_ssh_pubkey()
        if ssh_pubkey is not None:
            guest.add_ssh_pubkey_to_root_authorized_keys(ssh_pubkey)
        else:
            logger.warning(
                'SSH pubkey was not found. Provisioning tools may not work correctly...'
            )

    ##################################
    # PRIVATE METHODS AND PROPERTIES #
    ##################################

    def _build_ansible_playbook_command_args(self, inventory_filename):
        cmd_args = [
            'ANSIBLE_HOST_KEY_CHECKING=False',
            'ansible-playbook',
        ]
        cmd_args.extend([
            '--inventory-file',
            inventory_filename,
        ])
        # Append the --ask-vault-pass option if applicable.
        if self.options.get('ask_vault_pass'):
            cmd_args.append('--ask-vault-pass')

        # Append the --vault-password-file option if applicable.
        vault_password_file = self.options.get('vault_password_file')
        if vault_password_file is not None:
            cmd_args.extend([
                '--vault-password-file',
                self.homedir_expanded_path(vault_password_file)
            ])

        if self.options.get('lxd_transport'):
            cmd_args.extend([
                '-c',
                'lxd',
            ])

        # Append the playbook filepath and return the final command.
        cmd_args.append(self.homedir_expanded_path(self.options['playbook']))
        return cmd_args
Exemplo n.º 7
0
        Exclusive('id', 'parent.selector'): Any('null', All(int, Range(min=0))),
    },

    # The order of all projects under parent.
    # I don't know if there's actually a limit... but it would be weird if negative
    #   numbers worked here
    ##
    # Default to NONE to indicate that the user did not specify
    Optional('child_order', default=None): Any(None, All(int, Range(min=0))),

    # Should the project been made a favorite?
    Optional('favorite', default=False): Boolean(),

    # Do we 'seed' the project we created?
    Optional('template'): {
        Required('file'): IsFile()
    }

}

# Creating a label requires at LEAST one create_object
_high_level_schema = {
    # Filters must be a list w/ at least 1 item
    Required('projects'): All([_create_obj], Length(min=1))
}

# If the date: value is present, either the match: or absent: filter can be present
_project_create_task_filter_obj_schema = {
    # Search for regex matches against task-title
    Exclusive('content', 'task:'): {
        # If task.title is specified, MUST be a string w/ at least 1 char
Exemplo n.º 8
0
class AnsibleProvisioner(Provisioner):
    """ Allows to perform provisioning operations using Ansible. """

    name = 'ansible'

    guest_required_packages_alpine = [
        'openssh',
        'python',
    ]
    guest_required_packages_archlinux = [
        'openssh',
        'python2',
    ]
    guest_required_packages_centos = [
        'openssh-server',
        'python',
    ]
    guest_required_packages_debian = [
        'apt-utils',
        'aptitude',
        'openssh-server',
        'python',
    ]
    guest_required_packages_fedora = [
        'openssh-server',
        'python2',
    ]
    guest_required_packages_gentoo = [
        'net-misc/openssh',
        'dev-lang/python',
    ]
    guest_required_packages_opensuse = [
        'openSSH',
        'python3-base',
    ]
    guest_required_packages_oracle = [
        'openssh-server',
        'python',
    ]
    guest_required_packages_ubuntu = [
        'apt-utils',
        'aptitude',
        'openssh-server',
        'python',
    ]

    schema = {
        Required('playbook'): IsFile(),
        'ask_vault_pass': bool,
        'vault_password_file': IsFile(),
    }

    def provision(self):
        """ Performs the provisioning operations using ansible-playbook. """
        ip = get_ipv4_ip(self.guest.lxd_container)
        with tempfile.NamedTemporaryFile() as tmpinv:
            tmpinv.write('{} ansible_user=root'.format(ip).encode('ascii'))
            tmpinv.flush()
            self.host.run(
                self._build_ansible_playbook_command_args(tmpinv.name))

    def setup_guest_alpine(self):
        # On alpine guests we have to ensure that ssd is started!
        self.guest.run(['rc-update', 'add', 'sshd'])
        self.guest.run(['/etc/init.d/sshd', 'start'])

    ##################################
    # PRIVATE METHODS AND PROPERTIES #
    ##################################

    def _build_ansible_playbook_command_args(self, inventory_filename):
        cmd_args = [
            'ANSIBLE_HOST_KEY_CHECKING=False',
            'ansible-playbook',
        ]
        cmd_args.extend([
            '--inventory-file',
            inventory_filename,
        ])

        # Append the --ask-vault-pass option if applicable.
        if self.options.get('ask_vault_pass'):
            cmd_args.append('--ask-vault-pass')

        # Append the --vault-password-file option if applicable.
        vault_password_file = self.options.get('vault_password_file')
        if vault_password_file is not None:
            cmd_args.extend([
                '--vault-password-file',
                self.homedir_expanded_path(vault_password_file)
            ])

        # Append the playbook filepath and return the final command.
        cmd_args.append(self.homedir_expanded_path(self.options['playbook']))
        return cmd_args
Exemplo n.º 9
0
                  element: Element):
    file_handles = _xpath(element, './ns:objref/@hlink')
    for file_handle in file_handles:
        owner.files.add(ancestry.files[file_handle])


def _parse_urls(owner: HasLinks, element: Element):
    url_elements = _xpath(element, './ns:url')
    for url_element in url_elements:
        uri = str(_xpath1(url_element, './@href'))
        label = str(_xpath1(url_element, './@description'))
        owner.links.add(Link(uri, label))


GrampsConfigurationSchema = Schema({
    'file': IsFile(),
})


class Gramps(Plugin):
    def __init__(self, gramps_file_path: str, cache_directory_path: str):
        self._gramps_file_path = gramps_file_path
        self._cache_directory_path = cache_directory_path

    @classmethod
    def from_configuration_dict(cls, site: Site, configuration: Dict):
        assert_configuration(GrampsConfigurationSchema, configuration)
        return cls(configuration['file'],
                   join(site.configuration.cache_directory_path, 'gramps'))

    def subscribes_to(self) -> List[Tuple[str, Callable]]:
Exemplo n.º 10
0
def validate(d):

    Vector3d = All(
        Any([Number(), Number(), Number()], (Number(), Number(), Number())),
        Length(min=3, max=3))

    base_schema = Schema(
        {
            'units':
            'SI',
            'scale':
            Vector3d,
            Required('reference'):
            All(str, Length(min=1)),
            'partitioner':
            Any('metis', 'scotch', 'high order load balancing'),
            Required('safe', default=False):
            Boolean(),
            'initial':
            Any(str, {
                Required('name'): str,
                Required('func'): iscallable
            }),
            Required('restart', default=False):
            Boolean(),
            'restart casename':
            All(str, Length(min=1)),
            'restart ignore history':
            Boolean(),
            'preconditioner': {
                Required('factor'): Number()
            },
            Required('equations'):
            Any('euler', 'RANS', 'viscous', 'LES', 'DGviscous', 'DGRANS',
                'DGeuler'),  # , 'DGLES'),
            'report': {
                Required('frequency'): All(Coerce(int), Range(min=1)),
                'monitor': dict,
                'forces': dict,
                Required('Scale residuals by volume', default=False):
                Boolean()
            },
            'time marching':
            dict,
            'cell order':
            list,
            'Nodal Locations': {
                'Line': dict,
                'Tetrahedron': dict,
                'Tri': dict
            },
            Required('material', default='air'):
            All(str, Length(min=1)),
            'write output': {
                Required('frequency'):
                All(Coerce(int), Range(min=1)),
                Required('format'):
                Any('none', 'vtk', 'ensight', 'native'),
                Required('no volume vtk', default=False):
                Boolean(),
                'surface variables':
                list,
                'volume variables':
                list,
                'surface interpolate':
                list,
                'volume interpolate':
                list,
                'start output real time cycle':
                All(Coerce(int), Range(min=0)),
                'output real time cycle frequency':
                All(Coerce(int), Range(min=1)),
                'variable_name_alias':
                dict,
                'unsteady restart file output frequency':
                All(Coerce(int), Range(min=1))
            },
        },
        extra=ALLOW_EXTRA)

    d = base_schema(d)

    material_key = d['material']
    reference_key = d['reference']
    equations_key = d['equations']
    ic_keys = [key for key in d.keys() if key.startswith('IC_')]
    bc_keys = [key for key in d.keys() if key.startswith('BC_')]
    fz_keys = [key for key in d.keys() if key.startswith('FZ_')]

    material_schema = Schema({
        Required('gamma', default=1.4):
        Number(),
        Required('gas constant', default=287.0):
        Number(),
        Required('Sutherlands const', default=110.4):
        Number(),
        Required('Prandtl No', default=0.72):
        Number(),
        Required('Turbulent Prandtl No', default=0.9):
        Number(),
        'gravity':
        Vector3d,
        'latitude':
        Number()
    })

    ic_schema = Schema(
        {
            'pressure': Number(),
            'temperature': Number(),
            'V': {
                'vector': Vector3d,
                'Mach': Number(),
            },
            'Reference Length': Number(),
            'Reynolds No': Number(),
            'turbulence intensity': Number(),
            'eddy viscosity ratio': Number(),
            'ambient turbulence intensity': Number(),
            'ambient eddy viscosity ratio': Number(),
            'location': Vector3d,
            'profile': {
                'ABL': {
                    'roughness length': Number(),
                    'friction velocity': Number(),
                    'surface layer height': Number(),
                    'Monin-Obukhov length': Number(),
                    'TKE': Number(),
                    'z0': Number(),
                },
                'field': All(str, IsFile()),
                'local profile': Boolean()
            },
            'static pressure ratio': Number(),
            'total pressure ratio': Number(),
            'total temperature ratio': Number(),
            'reference': str,
            'viscosity': Number()
        },
        extra=ALLOW_EXTRA)

    timemarching_schema = Schema({
        'unsteady': {
            'total time': Number(),
            'time step': Number(),
            'order': Any('first', 'second', 1, 2),
            'start': Coerce(int)
        },
        Required('scheme'): {
            'name': Any('euler', 'runge kutta', 'lu-sgs'),
            'stage': Any(1, 'rk third order tvd', 4, 5),
            'class': Object,
            'kind': Any('local timestepping', 'global timestepping'),
            'linear gradients': Boolean()
        },
        Required('lu-sgs', default={}): {
            Required('Include Backward Sweep', default=True):
            Boolean(),
            Required('Number Of SGS Cycles', default=8):
            All(Coerce(int), Range(min=1)),
            Required('Jacobian Epsilon', default=1.0e-8):
            Number(),
            Required('Include Relaxation', default=True):
            Boolean(),
            Required('Jacobian Update Frequency', default=1):
            All(Coerce(int), Range(min=1)),
            Required('Finite Difference Jacobian', default=False):
            Boolean(),
            Required('Use Rusanov Flux For Jacobian', default=True):
            Boolean()
        },
        Required('cfl'):
        Number(),
        'cfl transport':
        Number(),
        'cfl coarse':
        Number(),
        'cfl ramp factor': {
            Required('growth'): Number(),
            Required('initial'): Number()
        },
        'cfl transport for pmg levels':
        list,
        'cfl for pmg levels':
        list,
        'ramp func':
        iscallable,
        Required('cycles'):
        All(Coerce(int), Range(min=1)),
        'multigrid':
        All(Coerce(int), Range(min=1)),
        'multigrid cycles':
        All(Coerce(int), Range(min=1)),
        'multigrid ramp':
        Number(),
        'prolong factor':
        Number(),
        'prolong transport factor':
        Number(),
        Required('multipoly', default=False):
        Boolean(),
        'multipoly cycle pattern':
        list,
        'multipoly convect only':
        Boolean(),
        'multipoly relaxation':
        Number(),
        'High Order Filter Frequency':
        Coerce(int),
        'number of time step smoothing iterations':
        Coerce(int),
        Required('cfl viscous factor', default=1.0):
        Number()
    })

    fv_euler_schema = Schema({
        Required('order'):
        Any('first', 'second', 'euler_second'),
        Required('limiter', default='vanalbada'):
        'vanalbada',
        Required('precondition', default=False):
        Boolean()
    })

    viscous_schema = fv_euler_schema.extend({
        Required('turbulence', default={}): {
            Required('les', default='none'): Any('none', 'WALE'),
        }
    })

    rans_schema = fv_euler_schema.extend({
        Required('turbulence', default={}): {
            Required('model'):
            Any('sst', 'sas', 'sa-neg'),
            Required('les', default='none'):
            Any('none', 'DES', 'DDES', 'IDDES', 'SAS'),
            Required('betastar', default=0.09):
            Number(),
            'limit mut':
            Boolean(),
            'CDES_kw':
            Number(),
            'CDES_keps':
            Number(),
            'production':
            Coerce(int),
            'rotation correction':
            Boolean(),
            'CDES':
            Number()
        }
    })

    dg_euler_schema = Schema({
        Required('order'):
        Any(0, 1, 2, 3, 4),
        Required('precondition', default=False):
        Boolean(),
        Required('c11 stability parameter', default=0.0):
        Number(),
        Required('c11 stability parameter transport', default=0.0):
        Number(),
        Required('LDG upwind parameter', default=0.5):
        Number(),
        'LDG upwind parameter aux':
        Number(),
        Required('Use MUSCL Reconstruction', default=False):
        Boolean(),
        'Approximate curved boundaries':
        Boolean(),
        'Filtering Cut-on Order':
        Coerce(int),
        'Filtering Epsilon':
        Coerce(int),
        'Filtering Strength':
        Coerce(int),
        'Inviscid Flux Scheme':
        Any('HLLC', 'Rusanov')
    })

    dg_viscous_schema = dg_euler_schema.extend({
        Required('BR2 Diffusive Flux Scheme', default=False):
        Boolean(),
        'Shock Sensing':
        Boolean(),
        'Shock Sensing k':
        Number(),
        'Shock Sensing Viscosity Scale':
        Number(),
        'Shock Sensing Variable':
        Any('density', 'temperature', 'mach', 'turbulence')
    })

    dg_rans_schema = dg_euler_schema.extend({
        Required('turbulence', default={}): {
            Required('model'):
            Any('sst', 'sas', 'sa-neg'),
            Required('les', default='none'):
            Any('none', 'DES', 'DDES', 'IDDES', 'SAS'),
            Required('betastar', default=0.09):
            Number(),
            'limit mut':
            Boolean(),
            'CDES_kw':
            Number(),
            'CDES_keps':
            Number(),
            'production':
            Coerce(int),
            'rotation correction':
            Boolean(),
            'CDES':
            Number()
        },
        Required('BR2 Diffusive Flux Scheme', default=False):
        Boolean(),
        Required('Use Rusanov for turbulence equations', default=False):
        Boolean(),
        'Shock Sensing':
        Boolean(),
        'Shock Sensing k':
        Number(),
        'Shock Sensing Viscosity Scale':
        Number(),
        'Shock Sensing Variable':
        Any('density', 'temperature', 'mach', 'turbulence')
    })

    equations_to_schema = {
        'euler': fv_euler_schema,
        'RANS': rans_schema,
        'viscous': viscous_schema,
        'LES': viscous_schema,
        'DGviscous': dg_viscous_schema,
        'DGRANS': dg_rans_schema,
        'DGeuler': dg_euler_schema,
        #        'DGLES': dg_rans_schema,
    }

    d[material_key] = material_schema(d.get(material_key, {}))
    d['time marching'] = timemarching_schema(d['time marching'])
    d[equations_key] = equations_to_schema[equations_key](d[equations_key])

    for k in ic_keys:
        d[k] = ic_schema(d[k])

    for k in bc_keys:
        pass

    for k in fz_keys:
        pass

    return d
    def __init__(self):
        self.global_config_schema = Schema({
            Optional("sumo_http_url"):
            Url(),
            Required("run_interval_seconds", default=60):
            All(int, Range(min=1)),
            Required("target_threads", default=10):
            All(int, Range(min=1, max=50)),
            Required("batch_size", default=1000):
            All(int, Range(min=1)),
            Required("retries", default=5):
            All(int, Range(min=1, max=20)),
            Required("backoff_factor", default=0.2):
            All(float, Range(min=0)),
            "source_category":
            str,
            "source_host":
            str,
            "source_name":
            str,
            "dimensions":
            str,
            "metadata":
            str,
        })

        self.target_source_config = Schema(
            Or(
                {Required("url"): Url()},
                {
                    Required("service"): str,
                    Required("namespace"): str
                },
            ))

        url_schema = Schema(
            Or(
                Required("url"),
                Url(),
                {
                    Required("service"): str,
                    Required("namespace"): str,
                    Required("path", default="/metrics"): str,
                    Required("protocol", default="http"): str,
                },
            ))

        self.target_config_schema = self.global_config_schema.extend({
            Required("url", default={}):
            url_schema,
            Required("name"):
            str,
            Required("exclude_metrics", default=[]):
            list([str]),
            Required("include_metrics", default=[]):
            list([str]),
            Required("exclude_labels", default={}):
            Schema({}, extra=ALLOW_EXTRA),
            Required("include_labels", default={}):
            Schema({}, extra=ALLOW_EXTRA),
            Required("strip_labels", default=[]):
            list([str]),
            Required("should_callback", default=True):
            bool,
            "token_file_path":
            IsFile(),
            "verify":
            Any(Boolean(), str),
            # repeat keys from global to remove default values
            "sumo_http_url":
            Url(),
            "run_interval_seconds":
            All(int, Range(min=1)),
            "target_threads":
            All(int, Range(min=1, max=50)),
            "batch_size":
            All(int, Range(min=1)),
            "retries":
            All(int, Range(min=1, max=20)),
            "backoff_factor":
            All(float, Range(min=0)),
        })

        self.config_schema = Schema(
            All(
                {
                    Required("global", default={}):
                    self.global_config_schema,
                    Required("targets"):
                    All(Length(min=1, max=256), [self.target_config_schema]),
                },
                self.check_url,
            ))
Exemplo n.º 12
0
class AnsibleLocalProvisioner(Provisioner):
    """ Allows to perform provisioning operations using Ansible on the guest. """

    PROVISIONING_DIR = "/provisioning"
    PLAYBOOOK_PATH = "/provisioning/playbook.yml"

    name = "ansible_local"

    schema = {
        Exclusive("playbook", "playbook"): IsFile(),
        Exclusive("dir", "playbook"): IsDir(),
        "ansible_version": str,
    }

    # guest_required_packages_alpine = ['python', ]
    # guest_required_packages_arch = ['python', ]
    # guest_required_packages_centos = ['python', ]
    # guest_required_packages_fedora = ['python2', ]
    # guest_required_packages_gentoo = ['dev-lang/python', ]
    # guest_required_packages_opensuse = ['python3-base', ]
    # guest_required_packages_ol = ['python', ]

    guest_required_packages_debian = [
        'apt-utils',
        'aptitude',
        'python',
        'python3-pip',
        'libssl-dev',
    ]
    guest_required_packages_ubuntu = [
        'apt-utils',
        'aptitude',
        'python',
        'python3-pip',
        'libssl-dev',
    ]

    def setup_single(self, guest):
        super().setup_single(guest)

        ansible_version = self.options.get("ansible_version")
        if ansible_version:
            ansible = "ansible=={}".format()
        else:
            ansible = "ansible"

        guest.run(["/usr/bin/pip3", "install", ansible])

    def provision_single(self, guest):
        super().provision_single(guest)

        guest.run(["rm", "-rf", self.PROVISIONING_DIR])
        guest.run(["mkdir", self.PROVISIONING_DIR])

        if self.options.get("playbook"):
            with open(self.homedir_expanded_path(
                    self.options["playbook"])) as fd:
                guest.lxd_container.files.put(self.PLAYBOOOK_PATH, fd.read())

        if self.options.get("dir"):
            guest.lxd_container.files.recursive_put(
                self.homedir_expanded_path(self.options["dir"]),
                self.PROVISIONING_DIR,
            )

        command = [
            "ansible-playbook",
            "--connection=local",
            "--inventory=127.0.0.1,",
            self.PLAYBOOOK_PATH,
        ]

        guest.run(command, quiet=False)
    list([str]),
    Required("include_metrics", default=[]):
    list([str]),
    # repeat keys from global to remove default values
    "run_interval_seconds":
    All(int, Range(min=1)),
    "target_threads":
    All(int, Range(min=1, max=50)),
    "batch_size":
    All(int, Range(min=1)),
    "retries":
    All(int, Range(min=1, max=20)),
    "backoff_factor":
    All(float, Range(min=0)),
    "token_file_path":
    IsFile(),
})

config_schema = Schema({
    Required("sumo_http_url"):
    Url(),
    Required("global", default={}):
    global_config_schema,
    Required("targets"):
    All(Length(min=1, max=256), [target_config_schema]),
})


def validate_config_file(ctx, param, value):
    try:
        return config_schema(json.load(value))
Exemplo n.º 14
0
 'not_unique_entity_error_dict' : dict,         # = DictField()          # List of resources that aren't unique in seqscape: {field_name : [field_val,...]}
 'last_updates_source' : dict,             # = DictField()                # keeps name of the field - source that last modified this field           
 'irods_jobs_dict' : dict,
 
 # BAM FILE SPECIFIC FIELDS:
 'bam_type' : str,
 'seq_centers' : list,          # List of sequencing centers where the data has been sequenced
 'lane_list' : list,
 'tag_list' : list,
 'run_list' : list,
 'platform_list' : list,
 'seq_date_list' : list,
 #'header_associations' : list,   # List of maps, as they are extracted from the header: [{}, {}, {}]
 'library_well_list' : list,
 'file_reference_genome_id' : str,
 IsFile('reference_genome') : str,
 'data_type' : str,
 'multiplex_lib_list' : list,
 
 'file_update_jobs_dict' : dict,
 'missing_mandatory_fields_dict' : dict,
 
 'index_file_path_irods' : str,
 'index_file_path_client' : str,
 'index_file_md5' : str,
 'index_file_upload_job_status' : str,
 
 'calc_file_md5_job_status' : str,
 'calc_index_file_md5_job_status' : str,
 
 #'tasks_dict' : dict, 
Exemplo n.º 15
0
def test_IsFile():
    schema = Schema(IsFile())
    assert_raises(MultipleInvalid, schema, 3)
    schema(os.path.abspath(__file__))
Exemplo n.º 16
0
class ElasticsearchBucket(Bucket):
    """
    Elasticsearch bucket
    """

    SCHEMA = Bucket.SCHEMA.extend({
        Required('addr'):
        str,
        Required('index'):
        str,
        Optional('doc_type', default='doc'):
        str,
        'routing':
        str,
        Optional('dbuser'):
        All(schemas.key, Length(max=256)),
        Optional('dbuser_password'):
        str,
        Optional('ca_certs'):
        IsFile(),
        Optional('client_cert'):
        IsFile(),
        Optional('client_key'):
        IsFile(),
        Optional('use_ssl', default=False):
        Boolean(),
        Optional('verify_ssl', default=False):
        Boolean(),
        Optional('number_of_shards', default=1):
        All(int, Range(min=1)),
        Optional('number_of_replicas', default=0):
        All(int, Range(min=0)),
    })

    def __init__(self, cfg):
        cfg['type'] = 'elasticsearch'
        super().__init__(cfg)
        self._es = None
        self._touched_indices = []

    @property
    def number_of_shards(self):
        return int(self.cfg.get('number_of_shards') or 1)

    @property
    def number_of_replicas(self):
        return int(self.cfg.get('number_of_replicas') or 0)

    @property
    def addr(self):
        return self.cfg['addr']

    @property
    def index(self):
        return self.cfg['index']

    @property
    def doc_type(self):
        return self.cfg['doc_type']

    @property
    def timeout(self):
        return self.cfg.get('timeout', 30)

    @property
    def dbuser(self):
        return self.cfg.get('dbuser')

    @property
    def dbuser_password(self):
        return self.cfg.get('dbuser_password')

    @property
    def use_ssl(self):
        return self.cfg.get('use_ssl') or False

    @property
    def verify_ssl(self):
        return self.cfg.get('verify_ssl') or False

    @property
    def ca_certs(self):
        return self.cfg.get('ca_certs')

    @property
    def client_cert(self):
        return self.cfg.get('client_cert')

    @property
    def client_key(self):
        return self.cfg.get('client_key')

    @property
    def es(self):
        if self._es is None:
            addr = parse_addr(self.addr, default_port=9200)
            logging.info('connecting to elasticsearch on %s:%d', addr['host'],
                         addr['port'])
            self._es = Elasticsearch(
                [addr],
                timeout=self.timeout,
                http_auth=(self.dbuser,
                           self.dbuser_password) if self.dbuser else None,
                use_ssl=self.use_ssl,
                verify_certs=self.verify_ssl,
                ca_certs=self.ca_certs,
                client_cert=self.client_cert,
                client_key=self.client_key,
            )

        # urllib3 & elasticsearch modules log exceptions, even if they are
        # caught! Disable this.
        urllib_logger = logging.getLogger('urllib3')
        urllib_logger.setLevel(logging.CRITICAL)
        es_logger = logging.getLogger('elasticsearch')
        es_logger.setLevel(logging.CRITICAL)

        return self._es

    def init(self, data_schema=None, *args, **kwargs):
        """
        Create index and write mapping
        """
        if data_schema and self.timestamp_field:
            data_schema[self.timestamp_field] = {
                "type": "date",
                "format": "epoch_millis",
            }
        if data_schema:
            info = self.es.info()
            mapping = {"properties": data_schema}
            if not self.es.indices.exists(index=self.index, ):
                params = {}
                if version(info['version']['number']) >= version('7.0.0'):
                    params['include_type_name'] = 'true'
                mappings = {
                    'mappings': {
                        self.doc_type: {
                            "properties": data_schema
                        }
                    },
                    'settings': {
                        "number_of_shards": self.number_of_shards,
                        "number_of_replicas": self.number_of_replicas,
                        "codec": "best_compression",
                    }
                }
                self.es.indices.create(
                    index=self.index,
                    body=mappings,
                    params=params,
                )
            params = {
                'allow_no_indices': 'true',
                'ignore_unavailable': 'true',
            }
            if version(info['version']['number']) >= version('7.0.0'):
                params['include_type_name'] = 'true'

            self.es.indices.put_mapping(
                doc_type=self.doc_type,
                body=mapping,
                index=self.index,
                params=params,
            )

    def drop(self, index=None):
        """
        Delete index
        """
        if index is None:
            index = self.index
        self.es.indices.delete(index, ignore=404)

    def send_bulk(self, requests):
        """
        Send data to Elasticsearch
        """
        logging.info("commit %d change(s) to elasticsearch", len(requests))

        try:
            helpers.bulk(
                self.es,
                requests,
                chunk_size=5000,
                timeout="30s",
            )
        except (
                urllib3.exceptions.HTTPError,
                elasticsearch.exceptions.TransportError,
        ) as exn:
            raise errors.TransportError(str(exn))

    def refresh(self, index=None):
        """
        Explicitely refresh index
        """

        if index is None:
            indices = self._touched_indices
            self._touched_indices = []
        else:
            indices = [index]

        for i in indices:
            self.es.indices.refresh(i)

    def get_index_name(self, index=None, timestamp=None):
        """
        Build index name
        """

        if index is None:
            index = self.index

        if '*' in index:
            if timestamp is None:
                dt = datetime.datetime.now()
            else:
                dt = datetime.datetime.fromtimestamp(timestamp)

            index = index.replace('*', dt.strftime("%Y.%m.%d"))

        return index

    def insert_data(
        self,
        data,
        index=None,
        doc_type=None,
        doc_id=None,
        timestamp=None,
    ):
        """
        Insert entry into the index
        """

        index = self.get_index_name(index, timestamp)

        req = {
            '_index': index,
            '_type': doc_type or self.doc_type,
            '_source': data,
        }

        if doc_id is not None:
            req['_id'] = doc_id

        self.enqueue(req)
        self._touched_indices.append(index)

    def insert_times_data(self,
                          ts,
                          data,
                          tags=None,
                          index=None,
                          doc_type=None,
                          doc_id=None,
                          *args,
                          **kwargs):
        """
        Insert time-indexed entry
        """
        ts = make_ts(ts)

        data[self.timestamp_field] = ts_to_ms(ts)

        if tags is not None:
            for tag, tag_val in tags.items():
                data[tag] = tag_val

        self.insert_data(
            data,
            index=index or self.index,
            doc_type=doc_type or self.doc_type,
            doc_id=doc_id,
            timestamp=int(ts),
        )

    def search(self, body, index=None, routing=None, doc_type=None, size=0):
        """
        Send search query to Elasticsearch
        """

        if index is None:
            index = self.index

        params = {}
        if routing is not None:
            params['routing'] = routing

        try:
            return self.es.search(
                index=index,
                doc_type=doc_type or self.doc_type,
                size=size,
                body=body,
                params=params,
            )
        except elasticsearch.exceptions.TransportError as exn:
            raise errors.TransportError(str(exn))
        except urllib3.exceptions.HTTPError as exn:
            raise errors.BucketError(self.name, str(exn))

    @staticmethod
    def _build_aggs(features):
        """
        Build Elasticsearch aggregations
        """

        aggs = {}

        for feature in features:
            if feature.metric in ['mean', 'average']:
                feature.metric = 'avg'
            if feature.metric in ['std_deviation', 'variance']:
                sub_agg = 'extended_stats'
            else:
                sub_agg = 'stats'

            if feature.script:
                agg = {
                    sub_agg: {
                        "script": {
                            "lang": "painless",
                            "inline": feature.script,
                        }
                    }
                }
            elif feature.field:
                agg = {
                    sub_agg: {
                        "field": feature.field,
                    }
                }

            aggs[feature.name] = agg

        return aggs

    @classmethod
    def _build_times_query(
        cls,
        bucket_interval,
        features,
        from_ms,
        to_ms,
        timestamp_field,
    ):
        """
        Build Elasticsearch query for time-series
        """

        body = {
            "size": 0,
            "aggs": {
                "histogram": {
                    "date_histogram": {
                        "field":
                        timestamp_field,
                        "extended_bounds":
                        _build_extended_bounds(from_ms,
                                               to_ms - 1000 * bucket_interval),
                        "interval":
                        "%ds" % bucket_interval,
                        "min_doc_count":
                        0,
                        "time_zone":
                        "UTC",
                        "format":
                        "yyyy-MM-dd'T'HH:mm:ss'Z'",  # key_as_string
                        "order": {
                            "_key": "asc"
                        }
                    },
                    "aggs": {},
                }
            }
        }

        must = []

        date_range = _build_date_range(timestamp_field, from_ms, to_ms)
        if date_range is not None:
            must.append(date_range)

        for feature in features:
            match_all = _build_match_all(feature.match_all)
            for condition in match_all:
                must.append(condition)

        if len(must) > 0:
            body['query'] = {
                'bool': {
                    'must': must,
                }
            }

        aggs = cls._build_aggs(features)

        for x in sorted(aggs):
            body['aggs']['histogram']['aggs'][x] = aggs[x]

        return body

    @staticmethod
    def _get_agg_val(bucket, feature):
        """
        Get aggregation value for the bucket returned by Elasticsearch
        """
        agg_val = bucket[feature.name].get(feature.metric)

        if agg_val is None:
            logging.info(
                "missing data: field '%s', metric: '%s', bucket: %s",
                feature.field,
                feature.metric,
                bucket['key'],
            )

        return agg_val

    def get_times_data(
        self,
        bucket_interval,
        features,
        from_date=None,
        to_date=None,
    ):
        from_ms, to_ms = _date_range_to_ms(from_date, to_date)

        body = self._build_times_query(
            bucket_interval,
            features,
            from_ms=from_ms,
            to_ms=to_ms,
            timestamp_field=self.timestamp_field,
        )

        es_res = self.search(
            body,
            routing=None,
        )

        hits = es_res['hits']['total']
        if hits == 0:
            return

        # TODO: last bucket may contain incomplete data when to_date == now
        """
        now = datetime.datetime.now().timestamp()
        epoch_ms = 1000 * int(now)
        min_bound_ms = 1000 * int(now / bucket_interval) * bucket_interval
        """

        t0 = None

        for bucket in es_res['aggregations']['histogram']['buckets']:
            X = np.full(len(features), np.nan, dtype=float)
            timestamp = int(bucket['key'])
            timeval = bucket['key_as_string']

            for i, feature in enumerate(features):
                X[i] = self._get_agg_val(bucket, feature)

            # TODO: last bucket may contain incomplete data when to_date == now
            """
            try:
                # The last interval contains partial data
                if timestamp == min_bound_ms:
                    R = float(epoch_ms - min_bound_ms
                       ) / (1000 * bucket_interval)
                    X = R * X + (1-R) * X_prev
            except NameError:
                # X_prev not defined. No interleaving required.
                pass

            X_prev = X
            """

            if t0 is None:
                t0 = timestamp

            yield (timestamp - t0) / 1000, X, timeval
Exemplo n.º 17
0
class PuppetProvisioner(Provisioner):
    """ Allows to perform provisioning operations using Puppet. """

    name = 'puppet'

    guest_required_packages_arch = ['puppet']
    guest_required_packages_debian = ['puppet']
    guest_required_packages_fedora = ['puppet']
    guest_required_packages_ubuntu = ['puppet']

    # Refs Vagrant docs:
    # https://www.vagrantup.com/docs/provisioning/puppet_apply.html#options
    schema = All(
        {
            'binary_path': str,
            'facter': dict,
            'hiera_config_path': IsFile(),
            'manifest_file': str,
            'manifests_path': IsDir(),
            'module_path': IsDir(),
            'environment': str,
            'environment_path': IsDir(),
            'environment_variables': dict,
            'options': str,
        }, finalize_options, validate_paths)

    _guest_manifests_path = '/.lxdock.d/puppet/manifests'
    _guest_module_path = '/.lxdock.d/puppet/modules'
    _guest_default_module_path = '/etc/puppet/modules'
    _guest_environment_path = '/.lxdock.d/puppet/environments'
    _guest_hiera_file = '/.lxdock.d/puppet/hiera.yaml'

    def provision_single(self, guest):
        """ Performs the provisioning operations using puppet. """
        # Verify if `puppet` has been installed.
        binary_path = self.options.get('binary_path')
        if binary_path is not None:
            puppet_bin = str(PurePosixPath(binary_path) / 'puppet')
            retcode = guest.run(['test', '-x', puppet_bin])
            fail_msg = (
                "puppet executable is not found in the specified path {} in the "
                "guest container. ".format(binary_path))
        else:
            retcode = guest.run(['which', 'puppet'])
            fail_msg = (
                "puppet was not automatically installed for this guest. "
                "Please specify the command to install it in LXDock file using "
                "a shell provisioner and try `lxdock provision` again. You may "
                "also specify `binary_path` that contains the puppet executable "
                "in LXDock file.")
        if retcode != 0:
            raise ProvisionFailed(fail_msg)

        # Copy manifests dir
        manifests_path = self.options.get('manifests_path')
        if manifests_path is not None:
            guest.copy_directory(Path(manifests_path),
                                 PurePosixPath(self._guest_manifests_path))

        # Copy module dir
        module_path = self.options.get('module_path')
        if module_path is not None:
            guest.copy_directory(Path(module_path),
                                 PurePosixPath(self._guest_module_path))

        # Copy environment dir
        environment_path = self.options.get('environment_path')
        if environment_path is not None:
            guest.copy_directory(Path(environment_path),
                                 PurePosixPath(self._guest_environment_path))

        # Copy hiera file
        hiera_file = self.options.get('hiera_config_path')
        if hiera_file is not None:
            guest.copy_file(Path(hiera_file),
                            PurePosixPath(self._guest_hiera_file))

        # Run puppet.
        command = self._build_puppet_command()

        if environment_path:
            logger.info("Running Puppet with environment {}...".format(
                self.options['environment']))
        else:
            logger.info("Running Puppet with {}...".format(
                self.options['manifest_file']))

        guest.run(['sh', '-c', ' '.join(command)])

    ##################################
    # PRIVATE METHODS AND PROPERTIES #
    ##################################

    def _build_puppet_command(self):
        """
        Refs:
        https://github.com/mitchellh/vagrant/blob/9c299a2a357fcf87f356bb9d56e18a037a53d138/
                plugins/provisioners/puppet/provisioner/puppet.rb#L173
        """

        options = self.options.get('options', '')
        options = list(map(shlex.quote, shlex.split(options)))

        module_path = self.options.get('module_path')
        if module_path is not None:
            options += [
                '--modulepath', '{}:{}'.format(self._guest_module_path,
                                               self._guest_default_module_path)
            ]

        hiera_path = self.options.get('hiera_config_path')
        if hiera_path is not None:
            options += ['--hiera_config={}'.format(self._guest_hiera_file)]

        # TODO: we are not detecting console color support now, but please contribute if you need!

        options += ['--detailed-exitcodes']

        environment_path = self.options.get('environment_path')
        if environment_path is not None:
            options += [
                '--environmentpath',
                str(self._guest_environment_path), '--environment',
                self.options['environment']
            ]
        else:
            options += ['--manifestdir', str(self._guest_manifests_path)]

        manifest_file = self.options.get('manifest_file')
        if manifest_file is not None:
            options += [
                str(PurePosixPath(self._guest_manifests_path) / manifest_file)
            ]

        # Build up the custom facts if we have any
        facter = []
        facter_dict = self.options.get('facter')
        if facter_dict is not None:
            for key, value in sorted(facter_dict.items()):
                facter.append("FACTER_{}={}".format(key, shlex.quote(value)))

        binary_path = self.options.get('binary_path')
        if binary_path is not None:
            puppet_bin = str(PurePosixPath(binary_path) / 'puppet')
        else:
            puppet_bin = 'puppet'

        # TODO: working_directory for hiera. Please contribute!

        env = []
        env_variables = self.options.get('environment_variables')
        if env_variables is not None:
            for key, value in sorted(env_variables.items()):
                env.append("{}={}".format(key, shlex.quote(value)))

        command = env + facter + [puppet_bin, 'apply'] + options

        return command
Exemplo n.º 18
0
class ElasticsearchDataSource(DataSource):
    """
    Elasticsearch datasource
    """

    SCHEMA = DataSource.SCHEMA.extend({
        Required('addr'):
        str,
        Required('index'):
        str,
        Optional('doc_type', default='doc'):
        str,
        'routing':
        str,
        Optional('dbuser'):
        All(schemas.key, Length(max=256)),
        Optional('dbuser_password'):
        str,
        Optional('ca_certs'):
        IsFile(),
        Optional('client_cert'):
        IsFile(),
        Optional('client_key'):
        IsFile(),
        Optional('use_ssl', default=False):
        Boolean(),
        Optional('verify_ssl', default=False):
        Boolean(),
    })

    def __init__(self, cfg):
        cfg['type'] = 'elasticsearch'
        super().__init__(cfg)
        self._es = None
        self._touched_indices = []

    @property
    def addr(self):
        return self.cfg['addr']

    @property
    def index(self):
        return self.cfg['index']

    @property
    def doc_type(self):
        return self.cfg['doc_type']

    @property
    def timeout(self):
        return self.cfg.get('timeout', 30)

    @property
    def dbuser(self):
        return self.cfg.get('dbuser')

    @property
    def dbuser_password(self):
        return self.cfg.get('dbuser_password')

    @property
    def use_ssl(self):
        return self.cfg.get('use_ssl') or False

    @property
    def verify_ssl(self):
        return self.cfg.get('verify_ssl') or False

    @property
    def ca_certs(self):
        return self.cfg.get('ca_certs')

    @property
    def client_cert(self):
        return self.cfg.get('client_cert')

    @property
    def client_key(self):
        return self.cfg.get('client_key')

    @property
    def es(self):
        if self._es is None:
            addr = parse_addr(self.addr, default_port=9200)
            logging.info('connecting to elasticsearch on %s:%d', addr['host'],
                         addr['port'])
            self._es = Elasticsearch(
                [addr],
                timeout=self.timeout,
                http_auth=(self.dbuser,
                           self.dbuser_password) if self.dbuser else None,
                use_ssl=self.use_ssl,
                verify_certs=self.verify_ssl,
                ca_certs=self.ca_certs,
                client_cert=self.client_cert,
                client_key=self.client_key,
            )

        # urllib3 & elasticsearch modules log exceptions, even if they are
        # caught! Disable this.
        urllib_logger = logging.getLogger('urllib3')
        urllib_logger.setLevel(logging.CRITICAL)
        es_logger = logging.getLogger('elasticsearch')
        es_logger.setLevel(logging.CRITICAL)

        return self._es

    def init(self, template_name=None, template=None, *args, **kwargs):
        """
        Create index and put template
        """

        if template is not None:
            self.es.indices.put_template(
                name=template_name,
                body=template,
                ignore=400,
            )

    def drop(self, index=None):
        """
        Delete index
        """
        if index is None:
            index = self.index
        self.es.indices.delete(index, ignore=404)

    def send_bulk(self, requests):
        """
        Send data to Elasticsearch
        """
        logging.info("commit %d change(s) to elasticsearch", len(requests))

        try:
            helpers.bulk(
                self.es,
                requests,
                chunk_size=5000,
                timeout="30s",
            )
        except (
                urllib3.exceptions.HTTPError,
                elasticsearch.exceptions.TransportError,
        ) as exn:
            raise errors.TransportError(str(exn))

    def refresh(self, index=None):
        """
        Explicitely refresh index
        """

        if index is None:
            indices = self._touched_indices
            self._touched_indices = []
        else:
            indices = [index]

        for i in indices:
            self.es.indices.refresh(i)

    def get_index_name(self, index=None, timestamp=None):
        """
        Build index name
        """

        if index is None:
            index = self.index

        if '*' in index:
            if timestamp is None:
                dt = datetime.datetime.now()
            else:
                dt = datetime.datetime.fromtimestamp(timestamp)

            index = index.replace('*', dt.strftime("%Y.%m.%d"))

        return index

    def insert_data(
        self,
        data,
        index=None,
        doc_type=None,
        doc_id=None,
        timestamp=None,
    ):
        """
        Insert entry into the index
        """

        index = self.get_index_name(index, timestamp)

        req = {
            '_index': index,
            '_type': doc_type or self.doc_type,
            '_source': data,
        }

        if doc_id is not None:
            req['_id'] = doc_id

        self.enqueue(req)
        self._touched_indices.append(index)

    def insert_times_data(self,
                          ts,
                          data,
                          tags=None,
                          index=None,
                          doc_type=None,
                          doc_id=None,
                          timestamp_field='timestamp',
                          *args,
                          **kwargs):
        """
        Insert time-indexed entry
        """
        ts = make_ts(ts)

        data[timestamp_field] = ts_to_ms(ts)

        if tags is not None:
            for tag, tag_val in tags.items():
                data[tag] = tag_val

        self.insert_data(
            data,
            index=index,
            doc_type=doc_type or self.doc_type,
            doc_id=doc_id,
            timestamp=int(ts),
        )

    def search(self, body, index=None, routing=None, doc_type=None, size=0):
        """
        Send search query to Elasticsearch
        """

        if index is None:
            index = self.index

        params = {}
        if routing is not None:
            params['routing'] = routing

        try:
            return self.es.search(
                index=index,
                doc_type=doc_type or self.doc_type,
                size=size,
                body=body,
                params=params,
            )
        except elasticsearch.exceptions.TransportError as exn:
            raise errors.TransportError(str(exn))
        except urllib3.exceptions.HTTPError as exn:
            raise errors.DataSourceError(self.name, str(exn))

    @staticmethod
    def _build_aggs(model):
        """
        Build Elasticsearch aggregations
        """

        aggs = {}

        for feature in model.features:
            if feature.metric in ['mean', 'average']:
                feature.metric = 'avg'
            if feature.metric in ['std_deviation', 'variance']:
                sub_agg = 'extended_stats'
            else:
                sub_agg = 'stats'

            if feature.script:
                agg = {
                    sub_agg: {
                        "script": {
                            "lang": "painless",
                            "inline": feature.script,
                        }
                    }
                }
            elif feature.field:
                agg = {
                    sub_agg: {
                        "field": feature.field,
                    }
                }

            aggs[feature.name] = agg

        return aggs

    def get_field_cardinality(
        self,
        model,
        from_ms=None,
        to_ms=None,
    ):
        body = {
            "size": 0,
            "aggs": {
                "count": {
                    "cardinality": {
                        "field": model.key,
                    }
                }
            }
        }

        must = []
        date_range = _build_date_range(model.timestamp_field, from_ms, to_ms)
        if date_range is not None:
            must.append(date_range)

        if len(must) > 0:
            body['query'] = {
                'bool': {
                    'must': must,
                }
            }

        es_res = self.search(
            body,
            routing=model.routing,
        )

        return int(es_res['aggregations']['count']['value'])

    @staticmethod
    def build_quadrant_aggs(model, agg):
        res = {}
        fields = [feature.field for feature in agg.features]
        for field in set(fields):
            res.update({
                build_agg_name(agg.measurement, field): {
                    "extended_stats": {
                        "field": field
                    }
                }
            })
        return res

    @staticmethod
    def read_quadrant_aggs(key, time_buckets):
        return key, time_buckets

    @classmethod
    def _build_quadrant_query(
        cls,
        model,
        aggregation,
        from_ms=None,
        to_ms=None,
        key=None,
        partition=0,
        num_partition=1,
    ):
        body = {
            "size": 0,
            "aggs": {
                "key": {
                    "terms": {
                        "field": model.key,
                        "size": model.max_keys,
                        "collect_mode": "breadth_first",
                        "include": {
                            "partition": partition,
                            "num_partitions": num_partition,
                        },
                    },
                    "aggs": {
                        "quadrant_data": {
                            "date_histogram": {
                                "field":
                                model.timestamp_field,
                                "interval":
                                "%ds" % (model.bucket_interval),
                                "min_doc_count":
                                0,
                                "time_zone":
                                "UTC",
                                "format":
                                "yyyy-MM-dd'T'HH:mm:ss'Z'",  # key_as_string format
                                "extended_bounds":
                                _build_extended_bounds(from_ms, to_ms - 1),
                            },
                            "aggs":
                            cls.build_quadrant_aggs(model, aggregation),
                        }
                    }
                }
            }
        }

        must = []

        date_range = _build_date_range(model.timestamp_field, from_ms, to_ms)
        if date_range is not None:
            must.append(date_range)

        if key is not None:
            must.append({"match": {model.key: key}})

        match_all = _build_match_all(aggregation.match_all)
        for condition in match_all:
            must.append(condition)

        if len(must) > 0:
            body['query'] = {"bool": {"must": must}}

        return body

    def get_quadrant_data(
        self,
        model,
        aggregation,
        from_date=None,
        to_date=None,
        key=None,
    ):
        from_ms, to_ms = _date_range_to_ms(from_date, to_date)

        if key is None:
            num_series = self.get_field_cardinality(model, from_ms, to_ms)
            num_partition = math.ceil(num_series / self.max_series_per_request)
        else:
            num_partition = 1

        for partition in range(0, num_partition):
            logging.info(
                "running aggregations for model '%s', partition %d/%d",
                model.name, partition, num_partition)

            body = self._build_quadrant_query(
                model,
                aggregation,
                from_ms=from_ms,
                to_ms=to_ms,
                key=key,
                partition=partition,
                num_partition=num_partition,
            )

            es_res = self.search(
                body,
                routing=model.routing,
            )

            for bucket in es_res['aggregations']['key']['buckets']:
                yield self.read_quadrant_aggs(
                    bucket['key'],
                    bucket['quadrant_data']['buckets'],
                )

    @classmethod
    def _build_times_query(
        cls,
        model,
        from_ms,
        to_ms,
    ):
        """
        Build Elasticsearch query for time-series
        """

        body = {
            "size": 0,
            "aggs": {
                "histogram": {
                    "date_histogram": {
                        "field":
                        model.timestamp_field,
                        "extended_bounds":
                        _build_extended_bounds(
                            from_ms, to_ms - 1000 * model.bucket_interval),
                        "interval":
                        "%ds" % model.bucket_interval,
                        "min_doc_count":
                        0,
                        "time_zone":
                        "UTC",
                        "format":
                        "yyyy-MM-dd'T'HH:mm:ss'Z'",  # key_as_string format
                        "order": {
                            "_key": "asc"
                        }
                    },
                    "aggs": {},
                }
            }
        }

        must = []

        date_range = _build_date_range(model.timestamp_field, from_ms, to_ms)
        if date_range is not None:
            must.append(date_range)

        for feature in model.features:
            match_all = _build_match_all(feature.match_all)
            for condition in match_all:
                must.append(condition)

        if len(must) > 0:
            body['query'] = {
                'bool': {
                    'must': must,
                }
            }

        aggs = cls._build_aggs(model)

        for x in sorted(aggs):
            body['aggs']['histogram']['aggs'][x] = aggs[x]

        return body

    @staticmethod
    def _get_agg_val(bucket, feature):
        """
        Get aggregation value for the bucket returned by Elasticsearch
        """
        agg_val = bucket[feature.name].get(feature.metric)

        if agg_val is None:
            logging.info(
                "missing data: field '%s', metric: '%s', bucket: %s",
                feature.field,
                feature.metric,
                bucket['key'],
            )

        return agg_val

    def get_times_data(
        self,
        model,
        from_date=None,
        to_date=None,
    ):
        features = model.features

        from_ms, to_ms = _date_range_to_ms(from_date, to_date)

        body = self._build_times_query(
            model,
            from_ms=from_ms,
            to_ms=to_ms,
        )

        es_res = self.search(
            body,
            routing=model.routing,
        )

        hits = es_res['hits']['total']
        if hits == 0:
            logging.info("Aggregations for model %s: Missing data", model.name)
            return

        # TODO: last bucket may contain incomplete data when to_date == now
        """
        now = datetime.datetime.now().timestamp()
        epoch_ms = 1000 * int(now)
        min_bound_ms = 1000 * int(now / model.bucket_interval) * model.bucket_interval
        """

        t0 = None

        for bucket in es_res['aggregations']['histogram']['buckets']:
            X = np.full(model.nb_features, np.nan, dtype=float)
            timestamp = int(bucket['key'])
            timeval = bucket['key_as_string']

            for i, feature in enumerate(features):
                X[i] = self._get_agg_val(bucket, feature)

            # TODO: last bucket may contain incomplete data when to_date == now
            """
            try:
                # The last interval contains partial data
                if timestamp == min_bound_ms:
                    R = float(epoch_ms - min_bound_ms) / (1000 * model.bucket_interval)
                    X = R * X + (1-R) * X_prev
            except NameError:
                # X_prev not defined. No interleaving required.
                pass

            X_prev = X
            """

            if t0 is None:
                t0 = timestamp

            yield (timestamp - t0) / 1000, X, timeval

    def gen_template(
        self,
        model,
        prediction,
    ):
        template = {
            "template": self.index,
            "mappings": {
                self.doc_type: {
                    "properties": {
                        "timestamp": {
                            "type": "date",
                            "format": "epoch_millis"
                        },
                        "score": {
                            "type": "float"
                        },
                        "is_anomaly": {
                            "type": "boolean"
                        },
                    }
                }
            }
        }
        properties = {}
        for tag in model.get_tags():
            properties[tag] = {"type": "keyword"}

        for field in prediction.get_field_names():
            properties[field] = {"type": "float"}

        if model.timestamp_field is not None:
            properties[model.timestamp_field] = {
                "type": "date",
                "format": "epoch_millis",
            }
        template['mappings'][self.doc_type]['properties'].update(properties)
        return template

    def save_timeseries_prediction(
        self,
        prediction,
        model,
        index=None,
    ):
        template = self.gen_template(model, prediction)
        self.init(template_name=self.index, template=template)

        for bucket in prediction.format_buckets():
            data = bucket['predicted']
            tags = model.get_tags()
            stats = bucket.get('stats', None)
            if stats is not None:
                data['score'] = float(stats.get('score'))
                tags['is_anomaly'] = stats.get('anomaly', False)

            self.insert_times_data(
                index=self.index or index,
                ts=bucket['timestamp'],
                tags=tags,
                data=data,
                timestamp_field=model.timestamp_field,
            )
        self.commit()