Beispiel #1
0
    def list_networks(self, persist=True):
        """Return list of networks for cloud

        A list of networks is fetched from libcloud, data is processed, stored
        on network models, and a list of network models is returned.

        Subclasses SHOULD NOT override or extend this method.

        This method wraps `_list_networks` which contains the core
        implementation.

        """
        task_key = 'cloud:list_networks:%s' % self.cloud.id
        task = PeriodicTaskInfo.get_or_add(task_key)
        first_run = False if task.last_success else True

        async def _list_subnets_async(networks):
            loop = asyncio.get_event_loop()
            subnets = [
                loop.run_in_executor(None, network.ctl.list_subnets)
                for network in networks
            ]
            return await asyncio.gather(*subnets)

        with task.task_runner(persist=persist):
            # Get cached networks as dict
            cached_networks = {
                '%s-%s' % (n.id, n.network_id): n.as_dict()
                for n in self.list_cached_networks()
            }
            networks = self._list_networks()
            loop = asyncio.get_event_loop()
            loop.run_until_complete(_list_subnets_async(networks))

        # Publish patches to rabbitmq.
        new_networks = {
            '%s-%s' % (n.id, n.network_id): n.as_dict()
            for n in networks
        }
        # Exclude last seen and probe field
        if cached_networks or new_networks:
            # Publish patches to rabbitmq.
            patch = jsonpatch.JsonPatch.from_diff(cached_networks,
                                                  new_networks).patch
            if patch:
                if not first_run and self.cloud.observation_logs_enabled:
                    from mist.api.logs.methods import log_observations
                    log_observations(self.cloud.owner.id, self.cloud.id,
                                     'network', patch, cached_networks,
                                     new_networks)
                if amqp_owner_listening(self.cloud.owner.id):
                    amqp_publish_user(self.cloud.owner.id,
                                      routing_key='patch_networks',
                                      data={
                                          'cloud_id': self.cloud.id,
                                          'patch': patch
                                      })
        return networks
Beispiel #2
0
    def ping_probe(self, persist=True):
        if not self.machine.cloud.enabled:
            return False
        from mist.api.methods import ping
        from mist.api.machines.models import PingProbe

        def _get_probe_dict():
            data = {}
            if self.machine.ping_probe is not None:
                data = self.machine.ping_probe.as_dict()
            return {
                '%s-%s' % (self.machine.id, self.machine.machine_id): {
                    'probe': {
                        'ping': data
                    }
                }
            }

        try:
            host = self.machine.ctl.get_host()
            if host in ['localhost', '127.0.0.1']:
                return
        except RuntimeError:
            return

        old_probe_data = _get_probe_dict()

        task_key = 'machine:ping_probe:%s' % self.machine.id
        task = PeriodicTaskInfo.get_or_add(task_key)
        with task.task_runner(persist=persist):
            try:
                data = ping(self.machine.cloud.owner, self.get_host())
            except:
                probe = self.machine.ping_probe
                if probe is not None:
                    probe.unreachable_since = datetime.datetime.now()
                raise
            else:
                probe = PingProbe()
                probe.update_from_dict(data)
            finally:
                self.machine.ping_probe = probe
                self.machine.save()
                new_probe_data = _get_probe_dict()
                patch = jsonpatch.JsonPatch.from_diff(old_probe_data,
                                                      new_probe_data).patch
                if patch:
                    amqp_publish_user(self.machine.cloud.owner.id,
                                      routing_key='patch_machines',
                                      data={
                                          'cloud_id': self.machine.cloud.id,
                                          'patch': patch
                                      })
        probe_result = self.machine.ping_probe
        return probe_result and probe_result.as_dict()
Beispiel #3
0
    def list_networks(self, persist=True):
        """Return list of networks for cloud

        A list of networks is fetched from libcloud, data is processed, stored
        on network models, and a list of network models is returned.

        Subclasses SHOULD NOT override or extend this method.

        This method wraps `_list_networks` which contains the core
        implementation.

        """
        task_key = 'cloud:list_networks:%s' % self.cloud.id
        task = PeriodicTaskInfo.get_or_add(task_key)
        with task.task_runner(persist=persist):
            cached_networks = {
                '%s' % n.id: n.as_dict()
                for n in self.list_cached_networks()
            }

            networks = self._list_networks()

        # Initialize AMQP connection to reuse for multiple messages.
        amqp_conn = Connection(config.AMQP_URI)
        if amqp_owner_listening(self.cloud.owner.id):
            networks_dict = [n.as_dict() for n in networks]
            if cached_networks and networks_dict:
                # Publish patches to rabbitmq.
                new_networks = {'%s' % n['id']: n for n in networks_dict}
                patch = jsonpatch.JsonPatch.from_diff(cached_networks,
                                                      new_networks).patch
                if patch:
                    amqp_publish_user(self.cloud.owner.id,
                                      routing_key='patch_networks',
                                      connection=amqp_conn,
                                      data={
                                          'cloud_id': self.cloud.id,
                                          'patch': patch
                                      })
            else:
                # TODO: remove this block, once patches
                # are implemented in the UI
                amqp_publish_user(self.cloud.owner.id,
                                  routing_key='list_networks',
                                  connection=amqp_conn,
                                  data={
                                      'cloud_id': self.cloud.id,
                                      'networks': networks_dict
                                  })
        return networks
Beispiel #4
0
    def ssh_probe(self, persist=True):
        from mist.api.methods import probe_ssh_only
        from mist.api.machines.models import SSHProbe

        def _get_probe_dict():
            data = {}
            if self.machine.ssh_probe is not None:
                data = self.machine.ssh_probe.as_dict()
            return {
                '%s-%s' % (self.machine.id, self.machine.machine_id): {
                    'probe': {
                        'ssh': data
                    }
                }
            }

        old_probe_data = _get_probe_dict()

        task_key = 'machine:ssh_probe:%s' % self.machine.id
        task = PeriodicTaskInfo.get_or_add(task_key)
        with task.task_runner(persist=persist):
            try:
                data = probe_ssh_only(
                    self.machine.cloud.owner,
                    self.machine.cloud.id,
                    self.machine.machine_id,
                    self.get_host(),
                )
            except:
                probe = self.machine.ssh_probe
                if probe is not None:
                    probe.unreachable_since = datetime.datetime.now()
                raise
            else:
                probe = SSHProbe()
                probe.update_from_dict(data)
            finally:
                self.machine.ssh_probe = probe
                self.machine.save()
                new_probe_data = _get_probe_dict()
                patch = jsonpatch.JsonPatch.from_diff(old_probe_data,
                                                      new_probe_data).patch
                if patch:
                    amqp_publish_user(self.machine.cloud.owner.id,
                                      routing_key='patch_machines',
                                      data={
                                          'cloud_id': self.machine.cloud.id,
                                          'patch': patch
                                      })
        return self.machine.ssh_probe.as_dict()
Beispiel #5
0
    def list_zones(self, persist=True):
        """Return list of zones for cloud

        A list of zones is fetched from libcloud, data is processed, stored
        on zone models, and a list of zone models is returned.

        Subclasses SHOULD NOT override or extend this method.

        This method wraps `_list_zones` which contains the core
        implementation.

        """
        task_key = 'cloud:list_zones:%s' % self.cloud.id
        task = PeriodicTaskInfo.get_or_add(task_key)
        first_run = False if task.last_success else True
        with task.task_runner(persist=persist):
            cached_zones = {
                '%s-%s' % (z.id, z.zone_id): z.as_dict()
                for z in self.list_cached_zones()
            }

            zones = self._list_zones()
            for zone in zones:
                self.list_records(zone)

        # Initialize AMQP connection to reuse for multiple messages.
        if amqp_owner_listening(self.cloud.owner.id):
            zones_dict = [z.as_dict() for z in zones]
            if cached_zones or zones_dict:
                # Publish patches to rabbitmq.
                new_zones = {
                    '%s-%s' % (z['id'], z['zone_id']): z
                    for z in zones_dict
                }
                patch = jsonpatch.JsonPatch.from_diff(cached_zones,
                                                      new_zones).patch
                if patch:
                    if not first_run and self.cloud.observation_logs_enabled:
                        from mist.api.logs.methods import log_observations
                        log_observations(self.cloud.owner.id, self.cloud.id,
                                         'zone', patch, cached_zones,
                                         new_zones)
                    amqp_publish_user(self.cloud.owner.id,
                                      routing_key='patch_zones',
                                      data={
                                          'cloud_id': self.cloud.id,
                                          'patch': patch
                                      })
        return zones
Beispiel #6
0
    def list_volumes(self, persist=True):
        """Return list of volumes for cloud

        A list of volumes is fetched from libcloud, data is processed, stored
        on volume models, and a list of volume models is returned.

        Subclasses SHOULD NOT override or extend this method.

        This method wraps `_list_volumes` which contains the core
        implementation.

        """
        task_key = 'cloud:list_volumes:%s' % self.cloud.id
        task = PeriodicTaskInfo.get_or_add(task_key)
        first_run = False if task.last_success else True
        with task.task_runner(persist=persist):
            cached_volumes = {
                '%s-%s' % (v.id, v.external_id): v.as_dict()
                for v in self.list_cached_volumes()
            }

            volumes = self._list_volumes()

        volumes_dict = [v.as_dict() for v in volumes]
        if cached_volumes or volumes:
            # Publish patches to rabbitmq.
            new_volumes = {
                '%s-%s' % (v['id'], v['external_id']): v
                for v in volumes_dict
            }
            patch = jsonpatch.JsonPatch.from_diff(cached_volumes,
                                                  new_volumes).patch
            if patch:
                if not first_run and self.cloud.observation_logs_enabled:
                    from mist.api.logs.methods import log_observations
                    log_observations(self.cloud.owner.id, self.cloud.id,
                                     'volume', patch, cached_volumes,
                                     new_volumes)
                if amqp_owner_listening(self.cloud.owner.id):
                    amqp_publish_user(self.cloud.owner.id,
                                      routing_key='patch_volumes',
                                      data={
                                          'cloud_id': self.cloud.id,
                                          'patch': patch
                                      })
        return volumes
Beispiel #7
0
    def ping_probe(self, persist=True):

        from mist.api.methods import ping
        from mist.api.machines.models import PingProbe

        def _get_probe_dict():
            data = {}
            if self.machine.ping_probe is not None:
                data = self.machine.ping_probe.as_dict()
            return {
                '%s-%s' % (self.machine.id, self.machine.machine_id): {
                    'probe': {
                        'ping': data
                    }
                }
            }

        old_probe_data = _get_probe_dict()

        task_key = 'machine:ping_probe:%s' % self.machine.id
        task = PeriodicTaskInfo.get_or_add(task_key)
        with task.task_runner(persist=persist):
            data = ping(self.machine.cloud.owner, self.get_host())

        probe = PingProbe()
        probe.update_from_dict(data)
        self.machine.ping_probe = probe
        self.machine.save()
        new_probe_data = _get_probe_dict()
        patch = jsonpatch.JsonPatch.from_diff(old_probe_data,
                                              new_probe_data).patch
        if patch:
            amqp_publish_user(self.machine.cloud.owner.id,
                              routing_key='patch_machines',
                              data={
                                  'cloud_id': self.machine.cloud.id,
                                  'patch': patch
                              })
        return self.machine.ping_probe.as_dict()
Beispiel #8
0
    def list_machines(self, persist=True):
        """Return list of machines for cloud

        A list of nodes is fetched from libcloud, the data is processed, stored
        on machine models, and a list of machine models is returned.

        Subclasses SHOULD NOT override or extend this method.

        This method wraps `_list_machines` which contains the core
        implementation.

        """

        task_key = 'cloud:list_machines:%s' % self.cloud.id
        task = PeriodicTaskInfo.get_or_add(task_key)
        try:
            with task.task_runner(persist=persist):
                old_machines = {
                    '%s-%s' % (m.id, m.machine_id): m.as_dict()
                    for m in self.list_cached_machines()
                }
                machines = self._list_machines()
        except PeriodicTaskThresholdExceeded:
            self.cloud.disable()
            raise

        # Initialize AMQP connection to reuse for multiple messages.
        amqp_conn = Connection(config.AMQP_URI)

        if amqp_owner_listening(self.cloud.owner.id):
            if not config.MACHINE_PATCHES:
                amqp_publish_user(
                    self.cloud.owner.id,
                    routing_key='list_machines',
                    connection=amqp_conn,
                    data={
                        'cloud_id': self.cloud.id,
                        'machines':
                        [machine.as_dict() for machine in machines]
                    })
            else:
                # Publish patches to rabbitmq.
                new_machines = {
                    '%s-%s' % (m.id, m.machine_id): m.as_dict()
                    for m in machines
                }
                # Exclude last seen and probe fields from patch.
                for md in old_machines, new_machines:
                    for m in md.values():
                        m.pop('last_seen')
                        m.pop('probe')
                patch = jsonpatch.JsonPatch.from_diff(old_machines,
                                                      new_machines).patch
                if patch:
                    amqp_publish_user(self.cloud.owner.id,
                                      routing_key='patch_machines',
                                      connection=amqp_conn,
                                      data={
                                          'cloud_id': self.cloud.id,
                                          'patch': patch
                                      })

        # Push historic information for inventory and cost reporting.
        for machine in machines:
            data = {
                'owner_id': self.cloud.owner.id,
                'machine_id': machine.id,
                'cost_per_month': machine.cost.monthly
            }
            amqp_publish(exchange='machines_inventory',
                         routing_key='',
                         auto_delete=False,
                         data=data,
                         connection=amqp_conn)

        return machines