コード例 #1
0
def _publish_message(host, amqp_settings, routing_key, data):
    """Publish an AMQP message.

    Returns:
        bool: True if message was sent successfully.
    """
    if host == "stdout":
        print("Published to %s: %s" % (routing_key, data))
        return True

    set_pika_log_level()

    conn_kwargs = dict()

    # name the conn like 'rez.publish.{host}'
    conn_kwargs["client_properties"] = {
        "connection_name": "rez.publish.%s" % socket.gethostname()
    }

    host, port = parse_host_and_port(url=host)
    conn_kwargs["host"] = host
    if port is not None:
        conn_kwargs["port"] = port

    if amqp_settings.get("userid"):
        conn_kwargs["credentials"] = PlainCredentials(
            username=amqp_settings.get("userid"),
            password=amqp_settings.get("password"))

    params = ConnectionParameters(
        socket_timeout=amqp_settings.get("connect_timeout"), **conn_kwargs)

    props = BasicProperties(
        content_type="application/json",
        content_encoding="utf-8",
        delivery_mode=amqp_settings.get("message_delivery_mode"))

    try:
        conn = BlockingConnection(params)
    except socket.error as e:
        print_error("Cannot connect to the message broker: %s" % e)
        return False

    try:
        channel = conn.channel()

        channel.basic_publish(exchange=amqp_settings["exchange_name"],
                              routing_key=routing_key,
                              body=json.dumps(data),
                              properties=props)
    except Exception as e:
        print_error("Failed to publish message: %s" % (e))
        return False
    finally:
        conn.close()

    return True
コード例 #2
0
ファイル: amqp.py プロジェクト: sdot-b/rez
def _publish_message(host, amqp_settings, routing_key, data):
    """Publish an AMQP message.

    Returns:
        bool: True if message was sent successfully.
    """
    if host == "stdout":
        print("Published to %s: %s" % (routing_key, data))
        return True

    creds = PlainCredentials(
        username=amqp_settings.get("userid"),
        password=amqp_settings.get("password")
    )

    params = ConnectionParameters(
        host=host,
        credentials=creds,
        socket_timeout=amqp_settings.get("connect_timeout")
    )

    props = BasicProperties(
        content_type="application/json",
        content_encoding="utf-8",
        delivery_mode=amqp_settings.get("message_delivery_mode")
    )

    try:
        conn = BlockingConnection(params)
    except socket.error as e:
        print_error("Cannot connect to the message broker: %s" % e)
        return False

    try:
        channel = conn.channel()

        channel.basic_publish(
            exchange=amqp_settings["exchange_name"],
            routing_key=routing_key,
            body=json.dumps(data),
            properties=props
        )
    except Exception as e:
        print_error("Failed to publish message: %s" % (e))
        return False
    finally:
        conn.close()

    return True
コード例 #3
0
ファイル: amqp.py プロジェクト: zclongpop123/rez
def _publish_message(host, amqp_settings, routing_key, data):
    """Publish an AMQP message.

    Returns:
        bool: True if message was sent successfully.
    """
    if host == "stdout":
        print("Published to %s: %s" % (routing_key, data))
        return True

    try:
        conn = Connection(**remove_nones(
            host=host,
            userid=amqp_settings.get("userid"),
            password=amqp_settings.get("password"),
            connect_timeout=amqp_settings.get("connect_timeout")
        ))
    except socket.error as e:
        print_error("Cannot connect to the message broker: %s" % (e))
        return False

    channel = conn.channel()

    # build the message
    msg = basic_message.Message(**remove_nones(
        body=json.dumps(data),
        delivery_mode=amqp_settings.get("message_delivery_mode"),
        content_type="application/json",
        content_encoding="utf-8"
    ))

    # publish the message
    try:
        channel.basic_publish(
            msg,
            amqp_settings["exchange_name"],
            routing_key
        )
    except Exception as e:
        print_error("Failed to publish message: %s" % (e))
        return False
    finally:
        conn.close()

    return True
コード例 #4
0
ファイル: amqp.py プロジェクト: mottosso/rez
def _publish_message(host, amqp_settings, routing_key, data):
    """Publish an AMQP message.

    Returns:
        bool: True if message was sent successfully.
    """
    if host == "stdout":
        print("Published to %s: %s" % (routing_key, data))
        return True

    try:
        conn = Connection(**remove_nones(
            host=host,
            userid=amqp_settings.get("userid"),
            password=amqp_settings.get("password"),
            connect_timeout=amqp_settings.get("connect_timeout")
        ))
    except socket.error as e:
        print_error("Cannot connect to the message broker: %s" % (e))
        return False

    channel = conn.channel()

    # build the message
    msg = basic_message.Message(**remove_nones(
        body=json.dumps(data),
        delivery_mode=amqp_settings.get("message_delivery_mode"),
        content_type="application/json",
        content_encoding="utf-8"
    ))

    # publish the message
    try:
        channel.basic_publish(
            msg,
            amqp_settings["exchange_name"],
            routing_key
        )
    except Exception as e:
        print_error("Failed to publish message: %s" % (e))
        return False
    finally:
        conn.close()

    return True
コード例 #5
0
ファイル: package_cache.py プロジェクト: zclongpop123/rez
    def add_variants_async(self, variants):
        """Update the package cache by adding some or all of the given variants.

        This method is called when a context is created or sourced. Variants
        are then added to the cache in a separate process.
        """

        # A prod install is necessary because add_variants_async works by
        # starting a rez-pkg-cache proc, and this can only be done reliably in
        # a prod install. On non-windows we could fork instead, but there would
        # remain no good solution on windows.
        #
        if not system.is_production_rez_install:
            raise PackageCacheError(
                "PackageCache.add_variants_async is only supported in a "
                "production rez installation.")

        variants_ = []

        # trim down to those variants that are cachable, and not already cached
        for variant in variants:
            if not variant.parent.is_cachable:
                continue

            status, _ = self._get_cached_root(variant)
            if status == self.VARIANT_NOT_FOUND:
                variants_.append(variant)

        # if there are no variants to add, and no potential cleanup to do, then exit
        if not variants_ and config.package_cache_clean_limit < 0:
            return

        # Write each variant out to a file in the 'pending' dir in the cache. A
        # separate proc reads these files and then performs the actual variant
        # copy. Note that these files are unique, in case two rez procs attempt
        # to write out the same pending variant file at the same time.
        #
        pending_filenames = os.listdir(self._pending_dir)

        for variant in variants_:
            prefix = variant.parent.qualified_name + '-'
            handle_dict = variant.handle.to_dict()
            already_pending = False

            # check if this variant is already pending
            for filename in pending_filenames:
                if filename.startswith(prefix):
                    filepath = os.path.join(self._pending_dir, filename)
                    try:
                        with open(filepath) as f:
                            data = json.loads(f.read())
                    except:
                        continue  # maybe file was just deleted

                    if data == handle_dict:
                        already_pending = True
                        break

            if already_pending:
                continue

            filename = prefix + uuid4().hex + ".json"
            filepath = os.path.join(self._pending_dir, filename)
            with open(filepath, 'w') as f:
                f.write(json.dumps(handle_dict))

        # configure executable
        if platform.system() == "Windows":
            kwargs = {"creationflags": subprocess.CREATE_NEW_PROCESS_GROUP}
        else:
            kwargs = {"preexec_fn": os.setsid}

        exe = os.path.join(system.rez_bin_path, "rez-pkg-cache")
        if not exe:
            # this should not happen
            raise RuntimeError("Did not find rez-pkg-cache executable")

        # start caching subproc
        args = [exe, "--daemon", self.path]

        try:
            with open(os.devnull, 'w') as devnull:

                # don't suppress output if selftest running, easier to debug
                if system.selftest_is_running:
                    out_target = None
                else:
                    out_target = devnull

                _ = subprocess.Popen([exe, "--daemon", self.path],
                                     stdout=out_target,
                                     stderr=out_target,
                                     **kwargs)
        except Exception as e:
            print_warning(
                "Failed to start package caching daemon (command: %s): %s",
                ' '.join(args), e)
コード例 #6
0
ファイル: package_cache.py プロジェクト: zclongpop123/rez
    def add_variant(self, variant, force=False):
        """Copy a variant's payload into the cache.

        The following steps are taken to ensure muti-thread/proc safety, and to
        guarantee that a partially-copied variant payload is never able to be
        used:

        1. The hash dir (eg '/<cache_dir>/foo/1.0.0/af8d') is created;
        2. A file lock mutex ('/<cache_dir>/.lock') is acquired;
        3. The file '/<cache_dir>/foo/1.0.0/af8d/.copying-a' (or -b, -c etc) is
           created. This tells rez that this variant is being copied and cannot
           be used yet;
        4. The file '/<cache_dir>/foo/1.0.0/af8d/a.json' is created. Now
           another proc/thread can't create the same local variant;
        5. The file lock is released;
        6. The variant payload is copied to '/<cache_dir>/foo/1.0.0/af8d/a';
        7. The '.copying-a' file is removed.

        Note that the variant will not be cached in the following circumstances,
        unless `force` is True:

        - The variant is not cachable as determined by `Variant.is_cachable`;
        - The variant is from a local package, and 'config.package_cache_local'
          is False;
        - The variant is stored on the same disk device as this cache, and
          config.package_cache_same_device' is False.

        Args:
            variant (`Variant`): The variant to copy into this cache
            force (bool): Copy the variant regardless. Use at your own risk (there
                is no guarantee the resulting variant payload will be functional).

        Returns:
            2-tuple:
            - str: Path to cached payload
            - int: One of:
              - VARIANT_FOUND
              - VARIANT_CREATED
              - VARIANT_COPYING
              - VARIANT_COPY_STALLED
        """
        from rez.utils.base26 import get_next_base26
        from rez.utils.filesystem import safe_makedirs

        # do some sanity checking on variant to cache
        package = variant.parent
        variant_root = getattr(variant, "root", None)

        if not variant_root:
            raise PackageCacheError(
                "Not cached - variant is a type that does not have a root: %s"
                % variant.uri)

        if not os.path.isdir(variant_root):
            raise PackageCacheError(
                "Not cached - variant %s root does not appear on disk: %s" %
                (variant.uri, variant_root))

        if not force:
            # package is configured to not be cachable
            if not package.is_cachable:
                raise PackageCacheError(
                    "Not cached - package is not cachable: %s" % package.uri)

            # package is local
            if not config.package_cache_local and variant.is_local:
                raise PackageCacheError("Not cached - package is local: %s" %
                                        package.uri)

            # Package is already on same disk device as package cache. Note that
            # this check is skipped on Windows + Py<3.4, as os.stat does not
            # support device identification.
            #
            dev_stat_not_supported = (platform.system() == "Windows"
                                      and sys.version_info[:2] < (3, 4))

            if not config.package_cache_same_device and not dev_stat_not_supported:
                st_pkgcache = os.stat(self.path)
                st_variant = os.stat(variant_root)
                if st_pkgcache.st_dev == st_variant.st_dev:
                    raise PackageCacheError(
                        "Not cached - variant %s is on same device as cache: %s"
                        % (variant.uri, variant_root))

            # Package belongs to a temp repo (this occurs when a package is
            # tested on pre_build/pre_release - see
            # https://github.com/nerdvegas/rez/wiki/Package-Definition-Guide#tests)
            #
            if package.repository.name() == "filesystem" and \
                    package.repository.location.startswith(config.tmpdir + os.sep):
                raise PackageCacheError(
                    "Not cached - package is in temp repository %s" %
                    package.repository)

        no_op_statuses = (self.VARIANT_FOUND, self.VARIANT_COPYING,
                          self.VARIANT_COPY_STALLED)

        # variant already exists, or is being copied to cache by another thread/proc
        status, rootpath = self._get_cached_root(variant)
        if status in no_op_statuses:
            return (rootpath, status)

        # 1.
        path = self._get_hash_path(variant)
        safe_makedirs(path)

        # construct data to store to json file
        data = {"handle": variant.handle.to_dict()}

        if variant.index is not None:
            # just added for debugging purposes
            data["data"] = package.data["variants"][variant.index]

        # 2. + 5.
        with self._lock():
            # Check if variant exists again, another proc could have created it
            # just before lock acquire
            #
            status, rootpath = self._get_cached_root(variant)
            if status in no_op_statuses:
                return (rootpath, status)

            # determine next increment name ('a', 'b' etc)
            names = os.listdir(path)
            names = [x for x in names if x.endswith(".json")]

            if names:
                prev = os.path.splitext(max(names))[0]
            else:
                prev = None

            incname = get_next_base26(prev)

            # 3.
            copying_filepath = os.path.join(path, ".copying-" + incname)
            with open(copying_filepath, 'w'):
                pass

            # 4.
            json_filepath = os.path.join(path, incname + ".json")
            with open(json_filepath, 'w') as f:
                f.write(json.dumps(data))

        # 6.
        #
        # Here we continually update mtime on the .copying file, to indicate
        # that the copy is active. This allows us to detect stalled/errored
        # copies, and report them as VARIANT_COPY_STALLED status.
        #
        still_copying = True

        def _while_copying():
            while still_copying:
                time.sleep(self._COPYING_TIME_INC)
                try:
                    os.utime(copying_filepath, None)
                except:
                    pass

        rootpath = os.path.join(path, incname)
        th = threading.Thread(target=_while_copying)
        th.daemon = True
        th.start()

        try:
            shutil.copytree(variant_root, rootpath)
        finally:
            still_copying = False

        # 7.
        th.join()
        os.remove(copying_filepath)

        return (rootpath, self.VARIANT_CREATED)