Пример #1
0
    def main(self):
        ccpinfo = self.env['ccpinfo']
        channels = string.split(self.env.get('channels'), ',')
        platforms = string.split(self.env.get('platforms'), ',')

        data = self.fetch(channels, platforms)
        self.validate_input()

        channel_cdn = {}
        for channel in data['channel']:
            if channel['name'] in channels:
                channel_cdn[channel['name']] = channel.get('cdn')

        # Resolve actual build versions from the feed
        products = []
        for product_info in ccpinfo['Products']:
            sapcode = product_info['sapCode']
            baseversion = product_info.get('baseVersion', '')
            version = product_info.get('version', 'latest')

            product = self.filter_product(data, sapcode, baseversion, version)
            if product is None:
                raise ProcessorError(
                    'No package matched the SAP code ({0}), base version ({1}), '
                    'and version ({2}) combination you specified.'.format(sapcode, baseversion, version)
                )

            self.output('Found matching product {}, version: {}'.format(product.get('displayName'),
                                                                        product.get('version')))

            product_info['version'] = product['version']
            products.append(product)
            self.cache_product_info(product_info, product)

        if len(products) == 1:  # Single product, will use normal version, notes, icon
            product = products[0]

            first_platform = {}
            for platform in product['platforms']['platform']:
                if platform['id'] in platforms:
                    first_platform = platform
                    break

            if first_platform.get('packageType') == 'RIBS':
                raise ProcessorError('This process does not support RIBS style packages.')

            if len(first_platform['systemCompatibility']['operatingSystem']['range']) > 0:
                compatibility_range = first_platform['systemCompatibility']['operatingSystem']['range'][0]
                # systemCompatibility currently has values like:
                # 10.x.0-
                # 10.10- (no minor version specified)
                # (empty array)
                self.env['minimum_os_version'] = compatibility_range.split('-')[0]
            else:
                # hacky workaround to avoid packager bailing when there is no minimum os version
                self.env['minimum_os_version'] = ''

            # output variable naming has been kept as close to pkginfo names as possible in order to feed munkiimport
            self.env['product_info_url'] = product.get('productInfoPage')
            self.env['version'] = product.get('version')
            self.env['display_name'] = product.get('displayName')

            extended_info = self.fetch_extended_product_info(product, first_platform, channel_cdn)
            for k, v in extended_info.items():
                self.env[k] = v

        else:  # more than one product: indeterminate version, concatenated notes, no icon
            raise ProcessorError('Multi product packages not yet supported')
Пример #2
0
    def curl(self, method, url, auth, data="", additional_headers=""):
        """
        build a curl command based on method (GET, PUT, POST, DELETE)
        If the URL contains 'uapi' then token should be passed to the auth variable,
        otherwise the enc_creds variable should be passed to the auth variable
        """
        tmp_dir = self.make_tmp_dir()
        headers_file = os.path.join(tmp_dir,
                                    "curl_headers_from_jamf_upload.txt")
        output_file = os.path.join(tmp_dir, "curl_output_from_jamf_upload.txt")
        cookie_jar = os.path.join(tmp_dir, "curl_cookies_from_jamf_upload.txt")

        # build the curl command
        curl_cmd = [
            "/usr/bin/curl",
            "-X",
            method,
            "-D",
            headers_file,
            "--output",
            output_file,
            url,
        ]

        # the authorisation is Basic unless we are using the uapi and already have a token
        if "uapi" in url and "tokens" not in url:
            curl_cmd.extend(["--header", f"authorization: Bearer {auth}"])
        else:
            curl_cmd.extend(["--header", f"authorization: Basic {auth}"])

        # set either Accept or Content-Type depending on method
        if method == "GET" or method == "DELETE":
            curl_cmd.extend(["--header", "Accept: application/json"])
        # icon upload requires special method
        elif method == "POST" and "fileuploads" in url:
            curl_cmd.extend(["--header", "Content-type: multipart/form-data"])
            curl_cmd.extend(["--form", f"name=@{data}"])
        elif method == "POST" or method == "PUT":
            if data:
                curl_cmd.extend(["--upload-file", data])
            # uapi sends json, classic API must send xml
            if "uapi" in url:
                curl_cmd.extend(["--header", "Content-type: application/json"])
            else:
                curl_cmd.extend(["--header", "Content-type: application/xml"])
        else:
            self.output(f"WARNING: HTTP method {method} not supported")

        # write session
        try:
            with open(headers_file, "r") as file:
                headers = file.readlines()
            existing_headers = [x.strip() for x in headers]
            for header in existing_headers:
                if "APBALANCEID" in header:
                    with open(cookie_jar, "w") as fp:
                        fp.write(header)
        except IOError:
            pass

        # look for existing session
        try:
            with open(cookie_jar, "r") as file:
                headers = file.readlines()
            existing_headers = [x.strip() for x in headers]
            for header in existing_headers:
                if "APBALANCEID" in header:
                    cookie = header.split()[1].rstrip(";")
                    self.output(f"Existing cookie found: {cookie}",
                                verbose_level=2)
                    curl_cmd.extend(["--cookie", cookie])
        except IOError:
            self.output("No existing cookie found - starting new session",
                        verbose_level=2)

        # additional headers for advanced requests
        if additional_headers:
            curl_cmd.extend(additional_headers)

        self.output(f"curl command: {' '.join(curl_cmd)}", verbose_level=3)

        # now subprocess the curl command and build the r tuple which contains the
        # headers, status code and outputted data
        subprocess.check_output(curl_cmd)

        r = namedtuple("r", ["headers", "status_code", "output"],
                       defaults=(None, None, None))
        try:
            with open(headers_file, "r") as file:
                headers = file.readlines()
            r.headers = [x.strip() for x in headers]
            for header in r.headers:  # pylint: disable=not-an-iterable
                if re.match(r"HTTP/(1.1|2)",
                            header) and "Continue" not in header:
                    r.status_code = int(header.split()[1])
        except IOError:
            raise ProcessorError(f"WARNING: {headers_file} not found")
        if os.path.exists(output_file) and os.path.getsize(output_file) > 0:
            with open(output_file, "rb") as file:
                if "uapi" in url:
                    r.output = json.load(file)
                else:
                    r.output = file.read()
        else:
            self.output(
                f"No output from request ({output_file} not found or empty)")
        return r()
    def get_installer_info(self):
        """Gets info about an installer from MS metadata."""
        base_url = BASE_URL % (CULTURE_CODE + PROD_DICT[self.env["product"]])
        # Get metadata URL
        req = urllib2.Request(base_url)
        # Add the MAU User-Agent, since MAU feed server seems to explicitly
        # block a User-Agent of 'Python-urllib/2.7' - even a blank User-Agent
        # string passes.
        req.add_header(
            "User-Agent",
            "Microsoft%20AutoUpdate/3.4 CFNetwork/760.2.6 Darwin/15.4.0 (x86_64)"
        )

        try:
            fdesc = urllib2.urlopen(req)
            data = fdesc.read()
            fdesc.close()
        except BaseException as err:
            raise ProcessorError("Can't download %s: %s" % (base_url, err))

        self.output(self.env["type"])
        metadata = plistlib.readPlistFromString(data)
        item = {}
        # Update feeds for a given 'channel' will have either combo or delta
        # pkg urls, with delta's additionally having a 'FullUpdaterLocation' key.
        # We populate the item dict with the appropriate section of the metadata
        # output, and do string replacement on the pattern of the URL in the
        # case of a Standalone app request.
        if self.env["type"] == "Combo" or self.env["type"] == "Standalone":
            item = [u for u in metadata if not u.get("FullUpdaterLocation")]
        elif self.env["type"] == "Delta":
            item = [u for u in metadata if u.get("FullUpdaterLocation")]
        if not item:
            raise ProcessorError("Could not find an applicable update in "
                                 "update metadata.")
        item = item[0]

        if self.env["type"] == "Standalone":
            p = re.compile(ur'(^[a-zA-Z0-9:/.-]*_[a-zA-Z]*_)(.*)Updater.pkg')
            url = item["Location"]
            (firstGroup, secondGroup) = re.search(p, url).group(1, 2)
            item[
                "Location"] = firstGroup + "2016_" + secondGroup + "Installer.pkg"

        self.env["url"] = item["Location"]
        self.output("Found URL %s" % self.env["url"])

        if self.env["type"] == "Combo" or self.env["type"] == "Delta":
            self.output("Got update: '%s'" % item["Title"])

        # now extract useful info from the rest of the metadata that could
        # be used in a pkginfo
        pkginfo = {}
        # Get a copy of the description in our locale_id
        all_localizations = item.get("Localized")
        lcid = self.env["locale_id"]
        if lcid not in all_localizations:
            raise ProcessorError(
                "Locale ID %s not found in manifest metadata. Available IDs: "
                "%s. See %s for more details." % (lcid, ", ".join(
                    all_localizations.keys()), LOCALE_ID_INFO_URL))
        manifest_description = all_localizations[lcid]['Short Description']
        # Store the description in a separate output variable and in our pkginfo
        # directly.
        pkginfo["description"] = "<html>%s</html>" % manifest_description
        self.env["description"] = manifest_description

        max_os = self.value_to_os_version_string(item['Max OS'])
        min_os = self.value_to_os_version_string(item['Min OS'])
        if max_os != "0.0.0":
            pkginfo["maximum_os_version"] = max_os
        if min_os != "0.0.0":
            pkginfo["minimum_os_version"] = min_os
        installs_items = self.get_installs_items(item)
        if installs_items:
            pkginfo["installs"] = installs_items

        # Extra work to do if this is a delta updater
        if self.env["type"] == "Delta":
            try:
                rel_versions = item["Triggers"]["Registered File"][
                    "VersionsRelative"]
            except KeyError:
                raise ProcessorError("Can't find expected VersionsRelative"
                                     "keys for determining minimum update "
                                     "required for delta update.")
            for expression in rel_versions:
                operator, ver_eval = expression.split()
                if operator == ">=":
                    self.min_delta_version = ver_eval
                    break
            if not self.min_delta_version:
                raise ProcessorError("Not able to determine minimum required "
                                     "version for delta update.")
            # Put minimum_update_version into installs item
            self.output("Adding minimum required version: %s" %
                        self.min_delta_version)
            pkginfo["installs"][0]["minimum_update_version"] = \
                self.min_delta_version
            required_update_name = self.env["NAME"]
            if self.env["munki_required_update_name"]:
                required_update_name = self.env["munki_required_update_name"]
            # Add 'requires' array
            pkginfo["requires"] = [
                "%s-%s" % (required_update_name, self.min_delta_version)
            ]

        self.env["version"] = self.get_version(item)
        self.env["minimum_os_version"] = min_os
        self.env["minimum_version_for_delta"] = self.min_delta_version
        self.env["additional_pkginfo"] = pkginfo
        self.env["url"] = item["Location"]
        self.output("Additional pkginfo: %s" % self.env["additional_pkginfo"])
Пример #4
0
    def main(self):
        '''Find the download URL'''
        def compare_version(this, that):
            '''compare LooseVersions'''
            return cmp(LooseVersion(this), LooseVersion(that))

        metadata = self.get_downloads_metadata()

        # build our own list of matching products, filtering on criteria:
        # - relatedProductOverride element must contain our 'product_name'
        #   (this name gets POSTed back to request the download URL)
        # - name element must match our 'product_name_pattern' regex

        self.output("Filtering json 'name' attributes with regex %s" %
                    self.env["product_name_pattern"])
        prods = []
        for m_prod in metadata["downloads"]:

            match = re.match(self.env["product_name_pattern"], m_prod["name"])
            if match:
                if not match.group("version"):
                    self.output("WARNING: Regex matched but no "
                                "named group 'version' matched!")
                p = m_prod.copy()
                # recording the version extracted by our named group in
                # 'product_name_pattern'
                p["version"] = match.group("version")
                prods.append(p)
        # sort by version and grab the highest one
        latest_prod = sorted(prods,
                             key=lambda v: LooseVersion(v['version']))[-1]

        # ensure our product contains info we need
        try:
            download_id = latest_prod["urls"]["Mac OS X"][0]["downloadId"]
            desc = latest_prod["desc"]
        except KeyError:
            raise ProcessorError("Metadata for product is missing at the "
                                 "expected location in feed.")

        # now build a request JSON to finally ask for the download URL
        req_data = {
            "country": "us",
            "platform": "Mac OS X",
            "product": {
                "name": self.env["product_name"]
            }
        }

        # if this download needs registration, or we want to register
        # otherwise, ensure we've set everything
        if latest_prod["requiresRegistration"]:
            errormsg = ("This product requires registration. Please set all "
                        "registration information in this processor using "
                        "the 'registration_info' input variable.")
            if not self.env.get("registration_info"):
                raise ProcessorError(errormsg)
            else:
                for key in REQUIRED_REG_KEYS:
                    if key not in self.env["registration_info"] or \
                       not self.env["registration_info"][key]:
                        raise ProcessorError(errormsg)
            # then add the registration info to req_data
            for k in self.env["registration_info"]:
                req_data[k] = self.env["registration_info"][k]
        req_data = json.dumps(req_data)

        url = "https://www.blackmagicdesign.com/api/register/us/download/"
        url += str(download_id)

        headers = {
            "Content-Type": "application/json;charset=UTF-8",
            "User-Agent": "Mozilla/5.0"
        }

        curl_cmd = self.prepare_curl_cmd()
        self.add_curl_headers(curl_cmd, headers)
        curl_cmd.append('--data')
        curl_cmd.append(req_data)
        curl_cmd.append(url)
        download_url = self.download_with_curl(curl_cmd)

        self.env["version"] = latest_prod["version"]
        self.env["url"] = download_url
        self.env["description"] = desc
        self.output(
            "Found download URL for %s, version %s: %s" %
            (self.env["product_name"], self.env["version"], self.env["url"]))
Пример #5
0
    def upload_computergroup(
        self,
        jamf_url,
        enc_creds,
        computergroup_name,
        computergroup_template,
        obj_id=None,
    ):
        """Upload computer group"""

        # import template from file and replace any keys in the template
        if os.path.exists(computergroup_template):
            with open(computergroup_template, "r") as file:
                template_contents = file.read()
        else:
            raise ProcessorError("Template does not exist!")

        # if JSS_INVENTORY_NAME is not given, make it equivalent to %NAME%.app
        # (this is to allow use of legacy JSSImporter group templates)
        try:
            self.env["JSS_INVENTORY_NAME"]
        except KeyError:
            try:
                self.env["JSS_INVENTORY_NAME"] = self.env["NAME"] + ".app"
            except KeyError:
                pass

        # substitute user-assignable keys
        template_contents = self.substitute_assignable_keys(template_contents)

        # if we find an object ID we put, if not, we post
        if obj_id:
            url = f"{jamf_url}/JSSResource/computergroups/id/{obj_id}"
        else:
            url = f"{jamf_url}/JSSResource/computergroups/id/0"

        self.output("Computer Group data:", verbose_level=2)
        self.output(template_contents, verbose_level=2)

        self.output("Uploading Computer Group...")
        # write the template to temp file
        template_xml = self.write_temp_file(template_contents)

        count = 0
        while True:
            count += 1
            self.output(f"Computer Group upload attempt {count}",
                        verbose_level=2)
            method = "PUT" if obj_id else "POST"
            r = self.curl(method, url, enc_creds, template_xml)
            # check HTTP response
            if self.status_check(r, "Computer Group",
                                 computergroup_name) == "break":
                break
            if count > 5:
                self.output(
                    "WARNING: Computer Group upload did not succeed after 5 attempts"
                )
                self.output(f"\nHTTP POST Response Code: {r.status_code}")
                raise ProcessorError("ERROR: Computer Group upload failed ")
            sleep(30)

        # clean up temp files
        self.clear_tmp_dir()
Пример #6
0
    def package_app(self, app_path):
        """Build a packaging request, send it to the autopkgserver and get the
        constructed package."""

        # clear any pre-exising summary result
        if "app_pkg_creator_summary_result" in self.env:
            del self.env["app_pkg_creator_summary_result"]

        # get version and bundleid
        infoplist = self.read_info_plist(app_path)
        if not self.env.get("version"):
            # Default to CFBundleShortVersionString if version_key is unset.
            version_key = self.env.get("version_key",
                                       "CFBundleShortVersionString")
            try:
                self.env["version"] = infoplist[version_key]
                self.output(f"Version: {self.env['version']}")
            # Specific error if the key does not exist.
            except KeyError:
                raise ProcessorError(
                    f"The key '{version_key}' does not exist in the App "
                    f"Bundle's Info.plist! ({app_path}/Contents/Info.plist) "
                    f"Please check the recipe and try again.")
            # Trap all other errors.
            except BaseException as err:
                raise ProcessorError(err)
        if not self.env.get("bundleid"):
            try:
                self.env["bundleid"] = infoplist["CFBundleIdentifier"]
                self.output(f"BundleID: {self.env['bundleid']}")
            except BaseException as err:
                raise ProcessorError(err)

        # get pkgdir and pkgname
        if self.env.get("pkg_path"):
            pkg_path = self.env["pkg_path"]
            pkgdir = os.path.dirname(pkg_path)
            pkgname = os.path.splitext(os.path.basename(pkg_path))[0]
        else:
            pkgdir = self.env["RECIPE_CACHE_DIR"]
            pkgname = (f"{os.path.splitext(os.path.basename(app_path))[0]}-"
                       f"{self.env['version']}")
            pkg_path = os.path.join(pkgdir, pkgname + ".pkg")

        # Check for an existing flat package in the output dir and compare
        # its identifier and version to the one we're going to build.
        if self.pkg_already_exists(pkg_path, self.env["bundleid"],
                                   self.env["version"]):
            self.output(
                "Existing package matches version and identifier, not building."
            )
            self.env["pkg_path"] = pkg_path
            self.env["new_package_request"] = False
            return

        # create pkgroot and copy application into it
        pkgroot = os.path.join(self.env["RECIPE_CACHE_DIR"], "payload")
        if os.path.exists(pkgroot):
            # remove it if it already exists
            try:
                if os.path.isdir(pkgroot) and not os.path.islink(pkgroot):
                    shutil.rmtree(pkgroot)
                else:
                    os.unlink(pkgroot)
            except OSError as err:
                raise ProcessorError(f"Can't remove {pkgroot}: {err.strerror}")
        try:
            os.makedirs(os.path.join(pkgroot, "Applications"), 0o775)
        except OSError as err:
            raise ProcessorError(f"Could not create pkgroot: {err.strerror}")

        app_name = os.path.basename(app_path)
        source_item = app_path
        dest_item = os.path.join(pkgroot, "Applications", app_name)
        try:
            if os.path.isdir(source_item):
                shutil.copytree(source_item, dest_item, symlinks=True)
            elif not os.path.isdir(dest_item):
                shutil.copyfile(source_item, dest_item)
            else:
                shutil.copy(source_item, dest_item)
            self.output(f"Copied {source_item} to {dest_item}")
        except OSError as err:
            raise ProcessorError(
                f"Can't copy {source_item} to {dest_item}: {err.strerror}")

        # build a package request
        request = {
            "pkgroot": pkgroot,
            "pkgdir": pkgdir,
            "pkgname": pkgname,
            "pkgtype": "flat",
            "id": self.env["bundleid"],
            "version": self.env["version"],
            "infofile": "",
            "resources": "",
            "chown": [{
                "path": "Applications",
                "user": "******",
                "group": "admin"
            }],
            "scripts": "",
        }

        # Send packaging request.
        try:
            self.output("Connecting")
            self.connect()
            self.output("Sending packaging request")
            self.env["new_package_request"] = True
            pkg_path = self.send_request(request)
        finally:
            self.output("Disconnecting")
            self.disconnect()

        # Tidy up, deleting the payload dir we created earlier.
        shutil.rmtree(pkgroot)

        # Return path to pkg.
        self.env["pkg_path"] = pkg_path
        self.env["app_pkg_creator_summary_result"] = {
            "summary_text": "The following packages were built:",
            "report_fields": ["identifier", "version", "pkg_path"],
            "data": {
                "identifier": request["id"],
                "version": request["version"],
                "pkg_path": pkg_path,
            },
        }
    def download(self,
                 url,
                 curl_opts,
                 output,
                 request_headers,
                 allow_failure=False):
        """Run a download with curl."""
        # construct curl command.
        curl_cmd = [
            self.env["CURL_PATH"],
            "--silent",
            "--show-error",
            "--no-buffer",
            "--fail",
            "--dump-header",
            "-",
            "--speed-time",
            "30",
            "--location",
            "--url",
            url,
            "--output",
            output,
        ]

        if request_headers:
            for header, value in request_headers.items():
                curl_cmd.extend(["--header", "%s: %s" % (header, value)])

        if curl_opts:
            for item in curl_opts:
                curl_cmd.extend([item])

        # Open URL.
        proc = subprocess.Popen(
            curl_cmd,
            shell=False,
            bufsize=1,
            stdin=subprocess.PIPE,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
        )

        donewithheaders = False
        maxheaders = 15
        header = {}
        header["http_result_code"] = "000"
        header["http_result_description"] = ""
        while True:
            if not donewithheaders:
                info = proc.stdout.readline().decode().strip("\r\n")
                if info.startswith("HTTP/"):
                    try:
                        header["http_result_code"] = info.split(None, 2)[1]
                        header["http_result_description"] = info.split(
                            None, 2)[2]
                    except IndexError:
                        pass
                elif ": " in info:
                    # got a header line
                    part = info.split(None, 1)
                    fieldname = part[0].rstrip(":").lower()
                    try:
                        header[fieldname] = part[1]
                    except IndexError:
                        header[fieldname] = ""
                elif info == "":
                    # we got an empty line; end of headers (or curl exited)
                    if header.get("http_result_code") in [
                            "301",
                            "302",
                            "303",
                            "307",
                            "308",
                    ]:
                        # redirect, so more headers are coming.
                        # Throw away the headers we've received so far
                        header = {}
                        header["http_result_code"] = "000"
                        header["http_result_description"] = ""
                    else:
                        donewithheaders = True
            else:
                time.sleep(0.1)

            if proc.poll() is not None:
                # For small download files curl may exit before all headers
                # have been parsed, don't immediately exit.
                maxheaders -= 1
                if donewithheaders or maxheaders <= 0:
                    break

        retcode = proc.poll()
        if (retcode and not allow_failure
            ):  # Non-zero exit code from curl => problem with download
            curlerr = ""
            try:
                curlerr = proc.stderr.read().rstrip("\n")
                curlerr = curlerr.split(None, 2)[2]
            except IndexError:
                pass

            raise ProcessorError("Curl failure: %s (exit code %s)" %
                                 (curlerr, retcode))
Пример #8
0
class VMwareFusion11URLProvider(Processor):
    description = "Provides URL to the latest VMware Fusion update release."
    input_variables = {
        "product_name": {
            "required": False,
            "description": "Default is '%s'." % FUSION,
        },
        "base_url": {
            "required": False,
            "description": "Default is '%s." % VMWARE_BASE_URL,
        },
    }
    output_variables = {
        "url": {
            "description": "URL to the latest VMware Fusion update release.",
        },
        "version": {
            "description":
            "Version to the latest VMware Fusion update release.",
        },
    }

    __doc__ = description

    def core_metadata(self, base_url, product_name, major_version):
        request = urllib2.Request(base_url + product_name)

        foundUrls = {}
        urls = []
        xmlVers = []

        try:
            vsus = urllib2.urlopen(request)
        except URLError, e:
            print e.reason

        data = vsus.read()

        try:
            metaList = ElementTree.fromstring(data)
        except ExpatData:
            print "Unable to parse XML data from string"

        for metadata in metaList:
            url = metadata.find("url")
            urls.append(url.text)

        for someUrl in urls:
            if someUrl.split('/')[1].startswith(major_version):
                foundUrls[someUrl.split('/')[1]] = someUrl

        for foundVer in foundUrls.keys():
            xmlVers.append(foundVer)

        if len(xmlVers) == 0:
            raise ProcessorError("Could not find any versions for the \
                                  major_version '%s'." % major_version)

        xmlVers.sort(key=LooseVersion)
        self.output(xmlVers[-1])
        self.latest = xmlVers[-1]
        core = foundUrls[xmlVers[-1]]

        vsus.close()

        request = urllib2.Request(base_url + core)

        try:
            vLatest = urllib2.urlopen(request)
        except URLError, e:
            print e.reason
    def main(self):
        """Do the main thing"""
        jss_url = self.env.get("JSS_URL")
        policy_category = self.env.get("POLICY_CATEGORY")
        category = self.env.get("PKG_CATEGORY")
        policy_name = self.env.get("policy_name")
        name = self.env.get("NAME")
        version = self.env.get("version")
        pkg_name = self.env.get("pkg_name")

        slack_username = self.env.get("slack_username")
        slack_icon_url = self.env.get("slack_icon_url") or ""
        slack_webhook_url = self.env.get("slack_webhook_url")
        slack_channel = self.env.get("slack_channel") or ""
        slack_icon_emoji = self.env.get("slack_icon_emoji") or ""

        selfservice_policy_name = name
        self.output(f"JSS address: {jss_url}")
        self.output(f"Title: {selfservice_policy_name}")
        self.output(f"Policy: {policy_name}")
        self.output(f"Version: {version}")
        self.output(f"Package: {pkg_name}")
        self.output(f"Package Category: {category}")
        self.output(f"Policy Category: {policy_category}")

        if pkg_name:
            slack_text = ("*New Item uploaded to Jamf Pro:*\n" +
                          f"URL: {jss_url}\n" +
                          f"Title: *{selfservice_policy_name}*\n" +
                          f"Version: *{version}*\n" +
                          f"Category: *{category}*\n" +
                          f"Policy Name: *{policy_name}*\n" +
                          f"Package: *{pkg_name}*")
        else:
            slack_text = ("*New Item uploaded to Jamf Pro:*\n" +
                          f"URL: {jss_url}\n" +
                          f"Title: *{selfservice_policy_name}*\n" +
                          f"Version: *{version}*\n" +
                          f"Category: *{category}*\n" +
                          f"Policy Name: *{policy_name}*\n" +
                          "No new package uploaded")

        slack_data = {
            "text": slack_text,
            "username": slack_username,
        }
        if slack_icon_url:
            slack_data["icon_url"] = slack_icon_url
        if slack_channel:
            slack_data["channel"] = slack_channel
        if slack_icon_emoji:
            slack_data["icon_emoji"] = slack_icon_emoji

        slack_json = json.dumps(slack_data)

        count = 0
        while True:
            count += 1
            self.output(
                "Slack webhook post attempt {}".format(count),
                verbose_level=2,
            )
            r = self.curl(slack_webhook_url, slack_json)
            # check HTTP response
            if self.status_check(r) == "break":
                break
            if count > 5:
                self.output(
                    "Slack webhook send did not succeed after 5 attempts")
                self.output("\nHTTP POST Response Code: {}".format(
                    r.status_code))
                raise ProcessorError("ERROR: Slack webhook failed to send")
            sleep(10)

        # clean up temp files
        self.clear_tmp_dir()
Пример #10
0
    def main(self):
        prod = self.env.get("product_name")
        if prod not in URLS:
            raise ProcessorError(
                "product_name %s is invalid; it must be one of: %s"
                % (prod, ", ".join(URLS))
            )
        url = URLS[prod]
        valid_plats = PLATS
        plat = self.env.get("platform_name")
        if plat not in valid_plats:
            raise ProcessorError(
                "platform_name %s is invalid; it must be one of: %s"
                % (plat, valid_plats)
            )
        self.env["object"] = self.download(url).decode('utf-8')

        substring_version = r"<li>Version: ([\d\.]+)"
        substring_buildid = r"Build id: <a href=\"(/\S+)?/build_history\.html\?build_id\=[0-9]{10}\">([0-9]{10})</a>"
        version = re.search(substring_version, self.env["object"])
        buildid = re.search(substring_buildid, self.env["object"])

        if not version:
            raise ProcessorError("No matching version found on URL: %s" % url)
        if not buildid:
            raise ProcessorError("No matching build ID found on URL: %s" % url)

        self.env["version"] = version.group(1)
        self.env["buildid"] = buildid.group(2)
        if plat == "Eclipse":
            download_url = (
                "https://www.oxygenxml.com/InstData/%s/%s/com.oxygenxml.%s_%s.0.v%s.zip"
                % (prod, plat, prod.lower(), self.env["version"], self.env["buildid"])
            )
        elif prod == "web-author":
            download_url = "http://mirror.oxygenxml.com/InstData/WebAuthor/All/oxygenxml-web-author-all-platforms.zip"
        elif prod == "WebHelp":
            download_url = (
                "https://www.oxygenxml.com/InstData/Editor/Webhelp/oxygen-webhelp.zip"
            )
        elif prod == "Editor":
            if plat == "Windows64":
                download_url = (
                    "http://mirror.oxygenxml.com/InstData/%s/%s/VM/oxygen-64bit.exe"
                    % (prod, plat)
                )
            elif plat == "Linux64":
                download_url = (
                    "https://www.oxygenxml.com/InstData/%s/%s/VM/oxygen-64bit.sh"
                    % (prod, plat)
                )
            elif plat == "MacOSX":
                download_url = (
                    "https://www.oxygenxml.com/InstData/%s/%s/VM/oxygen.dmg"
                    % (prod, plat)
                )
            else:
                download_url = (
                    "http://mirror.oxygenxml.com/InstData/%s/%s/VM/oxygen.tar.gz"
                    % (prod, plat)
                )
        else:
            if plat == "Windows64":
                download_url = (
                    "http://mirror.oxygenxml.com/InstData/%s/%s/VM/oxygen%s-64bit.exe"
                    % (prod, plat, prod)
                )
            elif plat == "Linux64":
                download_url = (
                    "https://www.oxygenxml.com/InstData/%s/%s/VM/oxygen%s-64bit.sh"
                    % (prod, plat, prod)
                )
            elif plat == "MacOSX":
                download_url = (
                    "https://www.oxygenxml.com/InstData/%s/%s/VM/oxygen%s.dmg"
                    % (prod, plat, prod)
                )
            else:
                download_url = (
                    "http://mirror.oxygenxml.com/InstData/%s/%s/VM/oxygen%s.tar.gz"
                    % (prod, plat, prod)
                )

        self.env["url"] = download_url
        self.env["filename"] = re.search(r"/([0-9a-zA-Z-_.]*$)", download_url).group(1)
        self.output("Found Version %s" % self.env["version"])
        self.output("Found Build id %s" % self.env["buildid"])
        self.output("Use URL %s" % self.env["url"])
        self.output("Use filename %s" % self.env["filename"])
    def upload_category(self,
                        jamf_url,
                        category_name,
                        priority,
                        token,
                        obj_id=0):
        """Update category metadata."""

        # build the object
        category_data = {"priority": priority, "name": category_name}
        headers = {
            "authorization": "Bearer {}".format(token),
            "content-type": "application/json",
            "accept": "application/json",
        }
        if obj_id:
            url = "{}/uapi/v1/categories/{}".format(jamf_url, obj_id)
            category_data["name"] = category_name
        else:
            url = "{}/uapi/v1/categories".format(jamf_url)

        http = requests.Session()

        self.output("Uploading category..")

        count = 0
        category_json = json.dumps(category_data)

        # we cannot PUT a category of the same name due to a bug in Jamf Pro (PI-008157).
        # so we have to do a first pass with a temporary different name, then change it back...
        if obj_id:
            category_data_temp = {
                "priority": priority,
                "name": category_name + "_TEMP"
            }
            category_json_temp = json.dumps(category_data_temp)
            while True:
                count += 1
                self.output(
                    "Category upload attempt {}".format(count),
                    verbose_level=2,
                )
                r = http.put(url,
                             headers=headers,
                             data=category_json_temp,
                             timeout=60)
                if r.status_code == 200:
                    self.output(
                        "Temporary category update successful. Waiting before updating again..."
                    )
                    sleep(2)
                    break
                if r.status_code == 409:
                    raise ProcessorError(
                        "ERROR: Temporary category update failed due to a conflict"
                    )
                if count > 5:
                    self.output(
                        "ERROR: Temporary category update did not succeed after 5 attempts"
                    )
                    self.output("\nHTTP POST Response Code: {}".format(
                        r.status_code))
                    raise ProcessorError("ERROR: Category upload failed ")
                sleep(10)

        # write the category. If updating an existing category, this reverts the name to its original.
        while True:
            count += 1
            self.output(
                "Category upload attempt {}".format(count),
                verbose_level=2,
            )
            if obj_id:
                r = http.put(url,
                             headers=headers,
                             data=category_json,
                             timeout=60)
            else:
                r = http.post(url,
                              headers=headers,
                              data=category_json,
                              timeout=60)
            if r.status_code == 201:
                self.output("Category created successfully")
                break
            if r.status_code == 200:
                self.output("Category update successful")
                break
            if r.status_code == 409:
                raise ProcessorError(
                    "ERROR: Category creation failed due to a conflict")
            if count > 5:
                self.output(
                    "ERROR: Category creation did not succeed after 5 attempts"
                )
                self.output("\nHTTP POST Response Code: {}".format(
                    r.status_code))
                raise ProcessorError("ERROR: Category upload failed ")
            sleep(10)
        return r
    def process_matched_json(self, load_json):
        '''
            Get metadata with the aid of self.env['aacp_matched_json']
        '''
        # Applications version, if not APRO
        if not self.env['aacp_application_sap_code'] == 'APRO':
            self.env['version'] = load_json[self.env['aacp_matched_json']['app_json_version_key']]
            self.output(f"version: {self.env['version']}")

        # If the version is unsupported
        if 'unsupported_versions_dict' in self.env['aacp_matched_json']:
            self.output(f"unsupported_versions_dict: "
                        f"{self.env['aacp_matched_json']['unsupported_versions_dict']}")
            for unsupported_version in self.env['aacp_matched_json']['unsupported_versions_dict']:
                if unsupported_version == self.env['version']:
                    raise ProcessorError(
            f"{self.env['aacp_matched_json']['unsupported_versions_dict'][unsupported_version]}")

        # Applications bundle id
        self.env['aacp_application_bundle_id'] = self.env['aacp_matched_json']['app_bundle_id']
        self.output(f"aacp_application_bundle_id: {self.env['aacp_application_bundle_id']}")

        # Applications minimum os, if APRO get from self.env['aacp_matched_json']
        if not self.env['aacp_application_sap_code'] == 'APRO':
            self.env['aacp_application_minimum_os'] = (re.search(self.env['aacp_matched_json']
                                                                 ['minos_regex'],
                                                                 load_json['SystemRequirement']
                                                                 ['CheckCompatibility']
                                                                 ['Content'])[1])
            self.output(f"aacp_application_minimum_os: {self.env['aacp_application_minimum_os']}")
        else:
            self.env['aacp_application_minimum_os'] = self.env['aacp_matched_json']['minos_version']

        # Applications version comparison key
        self.env['aacp_version_compare_key'] = (self.env['aacp_matched_json']
                                                ['version_comparison_key'])
        self.output(f"aacp_version_compare_key: {self.env['aacp_version_compare_key']}")

        # Applications display name
        self.env['aacp_application_display_name'] = self.env['aacp_matched_json']['display_name']
        self.output(f"aacp_application_display_name: {self.env['aacp_application_display_name']}")

        # Full path to the application bundle on disk, as per Terminal etc, not Finder
        self.env['aacp_application_full_path'] = self.env['aacp_matched_json']['app_path']

        # Get description if missing:
        if not 'aacp_application_description' in self.env:
            self.env['aacp_application_description'] = (self.env['aacp_matched_json']
                                                        ['app_description'])
            self.output(f"aacp_application_description: description missing, set from "
                        f"aacp_matched_json: "
                        f"{self.env['aacp_application_description']}")
        # Add a . if missing from the end
        if not self.env['aacp_application_description'].endswith('.'):
            self.env['aacp_application_description'] = (self.env['aacp_application_description']
                                                        + '.')

        # Get additional_blocking_applications
        if 'additional_blocking_applications' in self.env['aacp_matched_json']:
            for additional_blocking_application in (self.env['aacp_matched_json']
                                                    ['additional_blocking_applications']):
                self.env['aacp_blocking_applications'].append(additional_blocking_application)
            self.env['aacp_blocking_applications'] = (
                sorted(set(self.env['aacp_blocking_applications'])))
            self.output(f"aacp_blocking_applications updated: "
                        f"{self.env['aacp_blocking_applications']}")

        # Now we have the deets, let's use them
        self.create_pkginfo()
    def process_optionxml_xml(self):
        '''
            Process the titles optionXML.xml
        '''

        # Var declaration
        self.env['aacp_application_install_lang'] = None

        # Path to titles optionXML.xml
        self.env['aacp_option_xml_path'] = os.path.join(self.env['aacp_install_pkg_path'],
                                                        'Contents', 'Resources', 'optionXML.xml')
        if not os.path.exists(self.env['aacp_option_xml_path']):
            raise ProcessorError(f"ERROR: Cannot find {self.env['aacp_option_xml_path']}... "
                                  "exiting...")
        self.output(f"aacp_option_xml_path: {self.env['aacp_option_xml_path']}")

        # Progress notification
        self.output(f"Processing: {self.env['aacp_option_xml_path']}...")

        # Try to parse option_xml, raise if an issue
        try:
            option_xml = ElementTree.parse(self.env['aacp_option_xml_path'])
        except xml.etree.ElementTree.ParseError as err_msg:
            raise ProcessorError from err_msg

        # Check to see if HDMedia keys set
        for hd_media in option_xml.findall('.//HDMedias/HDMedia'):
            # If we have HDMedia, set vars
            if hd_media.findtext('MediaType') == 'Product':
                self.env['aacp_application_install_lang'] = hd_media.findtext('installLang')
                self.env['aacp_application_sap_code'] = hd_media.findtext('SAPCode')
                self.env['aacp_target_folder'] = hd_media.findtext('TargetFolderName')
                self.env['aacp_application_major_version'] = hd_media.findtext('baseVersion')

        # If no HDMedia is found, then self.env['aacp_application_install_lang'] will be none
        if not self.env['aacp_application_install_lang']:
            # Get vars for RIBS media
            for ribs_media in option_xml.findall('.//Medias/Media'):
                self.env['aacp_application_install_lang'] = ribs_media.findtext('installLang')
                self.env['aacp_application_sap_code'] = ribs_media.findtext('SAPCode')
                self.env['aacp_target_folder'] = ribs_media.findtext('TargetFolderName')
                self.env['aacp_application_major_version'] = ribs_media.findtext('prodVersion')

        # Check for Processor Architecture
        self.env['aacp_application_architecture_type'] = (
            option_xml.findtext('ProcessorArchitecture').lower())
        if not self.env['aacp_application_architecture_type'] in ['arm64', 'macuniversal', 'x64']:
            raise ProcessorError(f"architecture_type: "
                                 f"{self.env['aacp_application_architecture_type']},"
                                 f" is neither arm64, macuniversal nor x64... exiting...")
        if self.env['aacp_application_architecture_type'] == 'x64':
            self.env['aacp_application_architecture_type'] = 'x86_64'

        # Display progress
        self.output(f"aacp_application_sap_code: {self.env['aacp_application_sap_code']}")
        self.output(f"aacp_target_folder: {self.env['aacp_target_folder']}")
        self.output(f"aacp_application_architecture_type: "
                    f"{self.env['aacp_application_architecture_type']}")
        self.output(f"aacp_application_install_lang: {self.env['aacp_application_install_lang']}")
        self.output(f"aacp_application_major_version: {self.env['aacp_application_major_version']}")

        # If the we're looking at Acrobat, then we need to process things differently
        if self.env['aacp_application_sap_code'] == 'APRO':
            self.process_apro_installer()
        else:
            # Set application_json_path
            self.env['aacp_application_json_path'] = os.path.join(self.env['aacp_install_pkg_path'],
                                                                  'Contents/Resources/HD',
                                                                  self.env['aacp_target_folder'],
                                                                  'Application.json')
            # Process HD installer
            self.process_hd_installer()
Пример #14
0
    def main(self):
        """Return a version for file at input_plist_path"""
        # Check if we're trying to read something inside a dmg.
        (dmg_path, dmg, dmg_source_path) = (self.parsePathForDMG(
            self.env['input_plist_path']))
        try:
            if dmg:
                # Mount dmg and copy path inside.
                mount_point = self.mount(dmg_path)
                input_plist_path = os.path.join(mount_point, dmg_source_path)
            else:
                # just use the given path
                input_plist_path = self.env['input_plist_path']
            if not os.path.exists(input_plist_path):
                raise ProcessorError(
                    "File '%s' does not exist or could not be read." %
                    input_plist_path)
            try:
                plist = FoundationPlist.readPlist(input_plist_path)
                minimum_os_version = plist.get("LSMinimumSystemVersion",
                                               "No_Minimum")

                # Set the Maximum OS, if default
                # https://stackoverflow.com/a/1777365
                maximum_os_version = str(self.env['maximum_os_version'])
                if maximum_os_version == 'HOST_OS':
                    # https://stackoverflow.com/a/1777365
                    v, _, _ = platform.mac_ver()
                    maximum_os_version = str('.'.join(v.split('.')[:2]))
                    # Append on .x
                    # maximum_os_version = v + '.x'
                # else:
                #   if not maximum_os_version.endswith(".x"):
                #       maximum_os_version = maximum_os_version + '.x'

                # At this point it's fairly safe to say the Lion (10.7)
                # is an acceptably low number to consider an app not having a minimum OS
                #if int(minimum_os_version.split('.')[1]) is int(self.env['minimum_os_version'].split('.')[1]):
                #    minimum_os_version = 'No_Minimum'

                if not int(minimum_os_version.split('.')[1]) is int(
                        self.env['minimum_os_version'].split('.')[1]):
                    # Create the list of all the OS's between the min and max
                    # It's ok if minimum_os_version contains 3 numbers, since
                    # we only use the second one to create our range
                    # print(minimum_os_version)
                    os_min = int(minimum_os_version.split('.')[1])
                    # You have to add one to the maximum OS version, because the range
                    # appears to start at the minimum, but end one short of the max
                    # print(maximum_os_version)
                    os_max = int(maximum_os_version.split('.')[1]) + 1
                    os_requirement = ''
                    for os_version in range(os_min, os_max):
                        if os_requirement == '':
                            os_requirement = '10.' + str(os_version) + '.x'
                        else:
                            os_requirement = os_requirement + ',10.' + str(
                                os_version) + '.x'

                    self.env['OS_REQUIREMENTS'] = os_requirement
                else:
                    self.env['OS_REQUIREMENTS'] = ''

                # Return minimum and maximum regardless, so they can be used as variables
                # for say, dynamic smart group names
                self.env["OS_MINIMUM"] = str(minimum_os_version)
                self.env["OS_MAXIMUM"] = str(maximum_os_version)
                self.output("OS Version requirements %s in file %s" %
                            (self.env['OS_REQUIREMENTS'], input_plist_path))
                self.output("Minimum OS Version %s in file %s" %
                            (self.env['OS_MINIMUM'], input_plist_path))
                self.output("Maximum OS Version requirements %s in file %s" %
                            (self.env['OS_MAXIMUM'], input_plist_path))

            except FoundationPlist.FoundationPlistException as err:
                raise ProcessorError(err)

        finally:
            if dmg:
                self.unmount(dmg_path)
Пример #15
0
    def get_installer_info(self):
        """Gets info about an installer from MS metadata."""
        # Get the channel UUID, matching against a custom UUID if one is given
        channel_input = self.env.get("channel", DEFAULT_CHANNEL)
        rex = r"^([0-9a-fA-F]{8}-([0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})$"
        match_uuid = re.match(rex, channel_input)
        if not match_uuid and channel_input not in CHANNELS.keys():
            raise ProcessorError(
                "'channel' input variable must be one of: %s or a custom "
                "uuid" % (", ".join(CHANNELS.keys())))
        if match_uuid:
            channel = match_uuid.groups()[0]
        else:
            channel = CHANNELS[channel_input]
        base_url = BASE_URL % (channel,
                               CULTURE_CODE + PROD_DICT[self.env["product"]]['id'])

        # Get metadata URL
        self.output("Requesting xml: %s" % base_url)
        req = urllib2.Request(base_url)
        # Add the MAU User-Agent, since MAU feed server seems to explicitly
        # block a User-Agent of 'Python-urllib/2.7' - even a blank User-Agent
        # string passes.
        req.add_header(
            "User-Agent",
            "Microsoft%20AutoUpdate/3.6.16080300 CFNetwork/760.6.3 Darwin/15.6.0 (x86_64)")

        try:
            fdesc = urllib2.urlopen(req)
            data = fdesc.read()
            fdesc.close()
        except BaseException as err:
            raise ProcessorError("Can't download %s: %s" % (base_url, err))

        metadata = plistlib.readPlistFromString(data)
        item = {}
        # Update feeds for a given 'channel' will have either combo or delta
        # pkg urls, with delta's additionally having a 'FullUpdaterLocation' key.
        # We populate the item dict with the appropriate section of the metadata
        # output, and do string replacement on the pattern of the URL in the
        # case of a Standalone app request.
        if self.env["version"] == "latest" or self.env["version"] == "latest-standalone":
            item = [u for u in metadata if not u.get("FullUpdaterLocation")]
        elif self.env["version"] == "latest-delta":
            item = [u for u in metadata if u.get("FullUpdaterLocation")]
        if not item:
            raise ProcessorError("Could not find an applicable update in "
                                 "update metadata.")
        item = item[0]
        
        if self.env["version"] == "latest-standalone":
            p = re.compile(ur'(^[a-zA-Z0-9:/.-]*_[a-zA-Z]*_)(.*)Updater.pkg')
            url = item["Location"]
            (firstGroup, secondGroup) = re.search(p, url).group(1, 2)
            item["Location"] = firstGroup + secondGroup + "Installer.pkg"

        self.env["url"] = item["Location"]
        self.output("Found URL %s" % self.env["url"])
        self.output("Got update: '%s'" % item["Title"])
        # now extract useful info from the rest of the metadata that could
        # be used in a pkginfo
        pkginfo = {}
        # Get a copy of the description in our locale_id
        all_localizations = item.get("Localized")
        lcid = self.env["locale_id"]
        if lcid not in all_localizations:
            raise ProcessorError(
                "Locale ID %s not found in manifest metadata. Available IDs: "
                "%s. See %s for more details." % (
                    lcid,
                    ", ".join(all_localizations.keys()),
                    LOCALE_ID_INFO_URL))
        manifest_description = all_localizations[lcid]['Short Description']
        # Store the description in a separate output variable and in our pkginfo
        # directly.
        pkginfo["description"] = "<html>%s</html>" % manifest_description
        self.env["description"] = manifest_description

        # Minimum OS version key should exist always, but default to the current
        # minimum as of 16/11/03
        pkginfo["minimum_os_version"] = item.get('Minimum OS', '10.10.5')
        installs_items = self.get_installs_items(item)
        if installs_items:
            pkginfo["installs"] = installs_items

        # Extra work to do if this is a delta updater
        if self.env["version"] == "latest-delta":
            try:
                rel_versions = item["Triggers"]["Registered File"]["VersionsRelative"]
            except KeyError:
                raise ProcessorError("Can't find expected VersionsRelative"
                                     "keys for determining minimum update "
                                     "required for delta update.")
            for expression in rel_versions:
                operator, ver_eval = expression.split()
                if operator == ">=":
                    self.min_delta_version = ver_eval
                    break
            if not self.min_delta_version:
                raise ProcessorError("Not able to determine minimum required "
                                     "version for delta update.")
            # Put minimum_update_version into installs item
            self.output("Adding minimum required version: %s" %
                        self.min_delta_version)
            pkginfo["installs"][0]["minimum_update_version"] = \
                self.min_delta_version
            required_update_name = self.env["NAME"]
            if self.env["munki_required_update_name"]:
                required_update_name = self.env["munki_required_update_name"]
            # Add 'requires' array
            pkginfo["requires"] = ["%s-%s" % (required_update_name,
                                              self.min_delta_version)]

        self.env["version"] = self.get_version(item)
        self.env["minimum_os_version"] = pkginfo["minimum_os_version"]
        self.env["minimum_version_for_delta"] = self.min_delta_version
        self.env["additional_pkginfo"] = pkginfo
        self.env["url"] = item["Location"]
        self.output("Additional pkginfo: %s" % self.env["additional_pkginfo"])
    def getInstallerinfo(self):
        """Gets info about an installer from MS metadata."""
        culture_code = self.env.get("culture_code", CULTURE_CODE)
        produit = self.env.get("product")
        prod_code = PROD_DICT.get(produit)
        base_url = BASE_URL % (culture_code + prod_code)
        version_str = self.env.get("version")
        if not version_str:
            version_str = "latest"
        # Get metadata URL
        req = urllib2.Request(base_url)
        # Add the MAU User-Agent, since MAU feed server seems to explicitly block
        # a User-Agent of 'Python-urllib/2.7' - even a blank User-Agent string
        # passes.
        req.add_header("User-Agent",
            "Microsoft%20AutoUpdate/3.0.6 CFNetwork/720.2.4 Darwin/14.4.0 (x86_64)")
        try:
            f = urllib2.urlopen(req)
            data = f.read()
            f.close()
        except BaseException as err:
            raise ProcessorError("Can't download %s: %s" % (base_url, err))

        metadata = plistlib.readPlistFromString(data)
        if version_str == "latest":
            # Outlook 'update' metadata is a list of dicts.
            # we need to sort by date.
            sorted_metadata = sorted(metadata, key=itemgetter('Date'))
            # choose the last item, which should be most recent.
            item = sorted_metadata[-1]
        else:
            # we've been told to find a specific version. Unfortunately, the
            # Outlook updates metadata items don't have a version attibute.
            # The version is only in text in the update's Title. So we look for
            # that...although as of January 2015 it's not included...
            # Titles would normally be in the format "Outlook x.y.z Update"
            padded_version_str = " " + version_str + " "
            matched_items = [item for item in metadata
                            if padded_version_str in item["Title"]]
            if len(matched_items) != 1:
                raise ProcessorError(
                    "Could not find version %s in update metadata. "
                    "Updates that are available: %s"
                    % (version_str, ", ".join(["'%s'" % item["Title"]
                                               for item in metadata])))
            item = matched_items[0]

        self.env["url"] = item["Location"]
        self.env["pkg_name"] = item["Payload"]
        self.output("Found URL %s" % self.env["url"])
        self.output("Got update: '%s'" % item["Title"])
        # now extract useful info from the rest of the metadata that could
        # be used in a pkginfo
        pkginfo = {}
        # currently ignoring latest dict and cherry-picking en-US, may revisit
        all_localizations = metadata[0].get("Localized")
        pkginfo["description"] = "<html>%s</html>" % all_localizations['1033']['Short Description']
        # pkginfo["display_name"] = item["Title"]
        max_os = self.valueToOSVersionString(item['Max OS'])
        min_os = self.valueToOSVersionString(item['Min OS'])
        if max_os != "0.0.0":
            pkginfo["maximum_os_version"] = max_os
        if min_os != "0.0.0":
            pkginfo["minimum_os_version"] = min_os
        installs_items = self.getInstallsItems(item)
        if installs_items:
            pkginfo["installs"] = installs_items
        if not self.env.get("munki_update_name"):
            pkginfo['name'] = ("%s_2016_Installer" % self.env.get("product"))
        else:
            pkginfo['name'] = self.env["munki_update_name"]
        self.env["additional_pkginfo"] = pkginfo
        # re-setting so we can substitute in %20's for spaces
        self.env["url"] = item["Location"].replace(' ', '%20')
        self.output("Additional pkginfo: %s" % self.env["additional_pkginfo"])
Пример #17
0
 def main(self):
     """Get information about an update"""
     if self.env["version"] not in SUPPORTED_VERSIONS:
         raise ProcessorError("Invalid 'version': supported values are '%s'"
                              % "', '".join(SUPPORTED_VERSIONS))
     self.get_installer_info()
Пример #18
0
    def make_catalog_db(self):
        """Reads the 'all' catalog and returns a dict we can use like a
         database"""

        all_items_path = os.path.join(self.munki_repo, "catalogs", "all")
        if not os.path.exists(all_items_path):
            # might be an error, or might be a brand-new empty repo
            catalogitems = []
        else:
            try:
                with open(all_items_path, "rb") as f:
                    catalogitems = plistlib.load(f)
            except OSError as err:
                raise ProcessorError(
                    f"Error reading 'all' catalog from Munki repo: {err}")

        pkgid_table = {}
        app_table = {}
        installer_item_table = {}
        hash_table = {}
        checksum_table = {}
        files_table = {}

        itemindex = -1
        for item in catalogitems:
            itemindex = itemindex + 1
            name = item.get("name", "NO NAME")
            vers = item.get("version", "NO VERSION")

            if name == "NO NAME" or vers == "NO VERSION":
                # skip this item
                continue

            # add to hash table
            if "installer_item_hash" in item:
                if not item["installer_item_hash"] in hash_table:
                    hash_table[item["installer_item_hash"]] = []
                hash_table[item["installer_item_hash"]].append(itemindex)

            # add to installer item table
            if "installer_item_location" in item:
                installer_item_name = os.path.basename(
                    item["installer_item_location"])
                if installer_item_name not in installer_item_table:
                    installer_item_table[installer_item_name] = {}
                if vers not in installer_item_table[installer_item_name]:
                    installer_item_table[installer_item_name][vers] = []
                installer_item_table[installer_item_name][vers].append(
                    itemindex)

            # add to table of receipts
            for receipt in item.get("receipts", []):
                try:
                    if "packageid" in receipt and "version" in receipt:
                        pkgid = receipt["packageid"]
                        pkgvers = receipt["version"]
                        if pkgid not in pkgid_table:
                            pkgid_table[pkgid] = {}
                        if pkgvers not in pkgid_table[pkgid]:
                            pkgid_table[pkgid][pkgvers] = []
                        pkgid_table[pkgid][pkgvers].append(itemindex)
                except TypeError:
                    # skip this receipt
                    continue

            # add to table of installed applications
            for install in item.get("installs", []):
                try:
                    if install.get("type") in ("application", "bundle"):
                        if "path" in install:
                            if "version_comparison_key" in install:
                                app_version = install[
                                    install["version_comparison_key"]]
                            else:
                                app_version = install[
                                    "CFBundleShortVersionString"]
                            if install["path"] not in app_table:
                                app_table[install["path"]] = {}
                            if vers not in app_table[install["path"]]:
                                app_table[install["path"]][app_version] = []
                            app_table[install["path"]][app_version].append(
                                itemindex)
                    if install.get("type") == "file":
                        if "path" in install:
                            if "md5checksum" in install:
                                cksum = install["md5checksum"]

                                if cksum not in list(checksum_table.keys()):
                                    checksum_table[cksum] = []

                                checksum_table[cksum].append({
                                    "path":
                                    install["path"],
                                    "index":
                                    itemindex
                                })
                            else:
                                path = install["path"]

                                if path not in list(files_table.keys()):
                                    files_table[path] = []

                                files_table[path].append({
                                    "path":
                                    install["path"],
                                    "index":
                                    itemindex
                                })

                except (TypeError, KeyError):
                    # skip this item
                    continue

        pkgdb = {}
        pkgdb["hashes"] = hash_table
        pkgdb["receipts"] = pkgid_table
        pkgdb["applications"] = app_table
        pkgdb["installer_items"] = installer_item_table
        pkgdb["checksums"] = checksum_table
        pkgdb["files"] = files_table
        pkgdb["items"] = catalogitems

        return pkgdb
    def main(self):
        download_dir = os.path.join(self.env["RECIPE_CACHE_DIR"], "downloads")
        login_cookies = os.path.join(download_dir, "login_cookies")
        download_cookies = os.path.join(download_dir, "download_cookies")
        # create download_dir if needed
        if not os.path.exists(download_dir):
            try:
                os.makedirs(download_dir)
            except OSError as err:
                raise ProcessorError("Can't create %s: %s" %
                                     (download_dir, err.strerror))

        self.output("Getting login cookie")
        # We need to POST a request to the auth page to get the
        # 'myacinfo' cookie
        login_curl_opts = [
            "--request",
            "POST",
            "--data",
            "@{}".format(self.env["login_data"]),
            "--cookie-jar",
            login_cookies,
        ]
        self.download(
            url="https://idmsa.apple.com/IDMSWebAuth/authenticate",
            curl_opts=login_curl_opts,
            output="-",
            request_headers=None,
            allow_failure=True,
        )
        self.output("Getting download cookie")
        # Now we need to get the download cookie
        dl_curl_opts = [
            "--request",
            "POST",
            "--cookie",
            login_cookies,
            "--cookie-jar",
            download_cookies,
        ]
        headers = {"Content-length": "0"}
        output = os.path.join(download_dir, "listDownloads.gz")
        if os.path.exists(output):
            # Delete it first
            os.unlink(output)
        self.download(
            url=
            "https://developer.apple.com/services-account/QH65B2/downloadws/listDownloads.action",
            curl_opts=dl_curl_opts,
            output=output,
            request_headers=headers,
            allow_failure=True,
        )
        self.env["download_cookies"] = download_cookies
        try:
            with open(output) as f:
                json.load(f)
                # If we successfully load this as JSON, then we failed
                # to download the gzip list
                raise ProcessorError(
                    "Unable to list downloads. Check your Apple credentials.")
        except IOError:
            raise ProcessorError("Unable to load listDownloads.gz file.")
        except ValueError:
            pass
        # While we're at it, let's unzip the download list
        # It's actually a gunzip, so Unarchiver doesn't work
        # The result is a JSON blob
        self.output("Unzipping download list")
        os.chdir(download_dir)
        if os.path.exists(output.rstrip(".gz")):
            # Delete the file if it's already here
            os.unlink(output.rstrip(".gz"))
        gunzip_cmd = ["/usr/bin/gunzip", output]
        proc = subprocess.Popen(
            gunzip_cmd,
            shell=False,
            bufsize=1,
            stdin=subprocess.PIPE,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
        )
        (stdout, stderr) = proc.communicate()
        if proc.returncode:
            gzerr = stderr.rstrip("\n")
            raise ProcessorError("Gunzip failure: %s (exit code %s)" %
                                 (gzerr, proc.returncode))
Пример #20
0
    def main(self):
        try:
            condition = self.env.get("condition", DEFAULT_CONDITION)
            delay_hours = int(self.env.get("delay_hours", DEFAULT_DELAY_HOURS))
            shard_days = int(self.env.get("shard_days", DEFAULT_SHARD_DAYS))
            working_hours = bool(
                self.env.get("working_hours", DEFAULT_WORKING_HOURS))
            output_string = ""

            now = datetime.datetime.now()
            target_date = now + datetime.timedelta(days=shard_days)
            start_date = now + datetime.timedelta(hours=delay_hours)

            date_format = "%Y-%m-%dT%H:%M:%SZ"

            # We also only deploy monday to friday
            if working_hours:
                # If working hours, we only have 9 hours a day.

                # We are only going to start deploying (or more if delay_hours > 24) at 9am
                if delay_hours > 24:
                    start_date = now + datetime.timedelta(hours=delay_hours)
                    if start_date.hour > 18 and start_date.hour < 9:
                        start_date = start_date.replace(
                            hour=9, minute=00) + datetime.timedelta(days=1)
                    else:
                        start_date = start_date.replace(hour=9, minute=00)

                # make sure it's a working day
                start_date = self.next_working_day(start_date)
                # how many working hours between now and end of shard_days
                increment = datetime.timedelta(minutes=(9 * shard_days * 60) /
                                               10)
                current_deploy_date = start_date
                output_string += "("
                for group in range(0, 10):
                    group = (group + 1) * 10
                    deploy_time = self.next_working_day(current_deploy_date +
                                                        increment)
                    print(("group: {} deploy_time: {}".format(
                        group, deploy_time)))
                    output_string += "({} <= {} AND date > CAST(\"{}\", \"NSDate\")) OR ".format(
                        condition, group, deploy_time.strftime(date_format))
                    current_deploy_date = deploy_time
                output_string = output_string[:-4]
                output_string += ")"
            else:
                # How many do we increment by for each group?
                deploy_time = target_date - start_date
                time_increment = deploy_time / 10

                time_10 = start_date
                # if working_hours is true, make sure the start time is between 9 and 6
                time_20 = start_date + (time_increment * 2)
                time_30 = start_date + (time_increment * 3)
                time_40 = start_date + (time_increment * 4)
                time_50 = start_date + (time_increment * 5)
                time_60 = start_date + (time_increment * 6)
                time_70 = start_date + (time_increment * 7)
                time_80 = start_date + (time_increment * 8)
                time_90 = start_date + (time_increment * 9)
                time_100 = start_date + (time_increment * 10)

                output_string = """({} <= 10 AND date > CAST("{}", "NSDate")) OR {} <= 20 AND date > CAST("{}", "NSDate")) OR {} <= 30 AND date > CAST("{}", "NSDate")) OR {} <= 40 AND date > CAST("{}", "NSDate")) OR {} <= 50 AND date > CAST("{}", "NSDate")) OR {} <= 60 AND date > CAST("{}", "NSDate")) OR {} <= 70 AND date > CAST("{}", "NSDate")) OR {} <= 80 AND date > CAST("{}", "NSDate")) OR {} <= 90 AND date > CAST("{}", "NSDate")) OR {} <= 100 AND date > CAST("{}", "NSDate"))
                """.format(condition, time_10.strftime(date_format), condition,
                           time_20.strftime(date_format), condition,
                           time_30.strftime(date_format), condition,
                           time_40.strftime(date_format), condition,
                           time_50.strftime(date_format), condition,
                           time_60.strftime(date_format), condition,
                           time_70.strftime(date_format), condition,
                           time_80.strftime(date_format), condition,
                           time_90.strftime(date_format), condition,
                           time_100.strftime(date_format))
            # print(output_string)
            self.env["installable_condition"] = output_string
        except BaseException as err:
            # handle unexpected errors here
            exc_type, exc_obj, exc_tb = sys.exc_info()
            error_string = "error: {}, line: {}".format(err, exc_tb.tb_lineno)
            raise ProcessorError(error_string)
    def get_torbrowser_release(self, language, include_prereleases):
        """Return the URL and version of the latest Tor Broswer release."""

        # URL of directory index that lists Tor browser versions.
        # Include trailing slash.
        releases_url = "https://dist.torproject.org/torbrowser/"

        # The string that indicates a release listing in the HTML.
        marker = '<img src="/icons/folder.gif" alt="[DIR]">'

        # Prepare lists for storage of the parsed versions.
        all_versions = []
        stable_versions = []

        # Download the HTML of the releases page.
        try:
            html_string = urllib2.urlopen(releases_url).read()
        except Exception as err:
            raise ProcessorError("Couldn't load %s (%s)" % (releases_url, err))

        html_string = html_string.split("\n")

        # Look for lines containing a directory listing.
        for line in html_string:
            if marker in line:

                # Strip the version number out of the HTML.
                line = line.lstrip(marker)
                start = line.find('/">') + 3
                stop = line.find('/</a>')
                this_version = line[start:stop]

                # Stable versions go in stable_versions and all_versions.
                if re.match(r"^(?:(\d+\.(?:\d+\.)*\d+))$", this_version):
                    stable_versions.append(this_version)
                    all_versions.append(this_version)
                # Non-stable versions go in all_versions only.
                elif re.match(r"^(?:(\d+\.(?:\d+\.)*.+))$", this_version):
                    all_versions.append(this_version)

        # If we didn't find any versions at all, bail out.
        if not all_versions:
            raise ProcessorError(
                "No Tor Browser versions of any kind were found at %s." %
                releases_url)

        # If we didn't find any stable versions (and stable versions were
        # requested), bail out.
        if include_prereleases and not stable_versions:
            raise ProcessorError(
                "No stable Tor Browser versions were found at %s." %
                releases_url)

        # Calculate the latest version number using a simple sort.
        if include_prereleases:
            all_versions.sort()
            latest_version = all_versions[-1]
        else:
            stable_versions.sort()
            latest_version = stable_versions[-1]

        # Construct the URL using the version and language information.
        latest_url = "%s%s/TorBrowser-%s-osx64_%s.dmg" % (
            releases_url, latest_version, latest_version, language)

        # Put a stamp on it and drop it in the mailbox.
        return latest_version, latest_url
    def main(self):

        # Define variables
        archive_path = os.path.join(self.env["archive_path"])
        extract_file_path = self.env.get(
            "extract_file_path",
            os.path.join(self.env.get("RECIPE_CACHE_DIR"), "extractedfile"))
        file_to_extract = self.env["file_to_extract"]
        found_match = False

        # Get a list of files in the archive
        cmd_list_files = '/usr/bin/xar -tf "{}"'.format(archive_path)
        results_list_files = self.runUtility(cmd_list_files)

        if not results_list_files['success']:
            raise ProcessorError(
                "Failed to list the files in the archive:  {archive_path} -- due to error:  {error}"
                .format(archive_path=archive_path,
                        error=results_list_files['stderr']))

        # Split the file names
        list_of_files = (
            results_list_files['stdout'].decode("utf-8")).split('\n')

        # Create destintation directory if it doesn't exist
        if not os.path.exists(extract_file_path):
            try:
                os.mkdir(extract_file_path)
            except OSError as err:
                raise ProcessorError("Can't create {}:  {}".format(
                    extract_file_path, err.strerror))

        # Walk trough the list of files entries
        for filename in [
                item for item in list_of_files
                if fnmatch(item, file_to_extract)
        ]:
            cmd_extract = '/usr/bin/xar -xf "{archive_path}" "{filename}" -C "{extract_file_path}"'.format(
                archive_path=archive_path,
                filename=filename,
                extract_file_path=extract_file_path)
            results_list_files = self.runUtility(cmd_extract)

            if not results_list_files['success']:
                raise ProcessorError(
                    "Failed to extract the archive:  {archive_path} -- due to error:  {error}"
                    .format(archive_path=archive_path,
                            error=results_list_files['stderr']))

            # Path to the extract file
            extracted_file = os.path.join(extract_file_path, filename)

        # Verify file exists
        for filename in os.listdir(extract_file_path):
            if fnmatch(filename, file_to_extract):
                found_match = True

        if found_match == True:
            self.output('extracted_file:  {}'.format(extracted_file))
            self.env["extracted_file"] = extracted_file
        else:
            raise ProcessorError(
                "Cannot find the file:  {}".format(file_to_extract))
Пример #23
0
            if url_handle.info().get("etag"):
                self.env["etag"] = url_handle.info().get("etag")
                xattr.setxattr(
                    pathname, XATTR_ETAG, url_handle.info().get("etag"))
                self.output("Storing new ETag header: %s"
                            % url_handle.info().get("etag"))

            self.output("Downloaded %s" % pathname)
            self.env['url_downloader_summary_result'] = {
                'summary_text': 'The following new items were downloaded:',
                'data': {
                    'download_path': pathname,
                }
            }

            self.report["items"] = {
                "Path": self.env["pathname"]
                }

        except BaseException as err:
            raise ProcessorError(
                "Couldn't download %s (%s)" % (self.env["url"], err))
        finally:
            if url_handle is not None:
                url_handle.close()


if __name__ == "__main__":
    PROCESSOR = URLDownloader()
    PROCESSOR.execute_shell()
    def get_installer_info(self):
        """Gets info about an installer from MS metadata."""
        # Get the channel UUID, matching against a custom UUID if one is given
        channel_input = self.env.get("channel", DEFAULT_CHANNEL)
        rex = r"^([0-9a-fA-F]{8}-([0-9a-fA-F]{4}-){3}[0-9a-fA-F]{12})$"
        match_uuid = re.match(rex, channel_input)
        if not match_uuid and channel_input not in CHANNELS:
            raise ProcessorError(
                "'channel' input variable must be one of: %s or a custom "
                "uuid" % (", ".join(CHANNELS)))
        if match_uuid:
            channel = match_uuid.groups()[0]
        else:
            channel = CHANNELS[channel_input]
        base_url = BASE_URL % (
            channel,
            CULTURE_CODE + PROD_DICT[self.env["product"]]["id"],
        )

        # Get metadata URL
        self.output("Requesting xml: %s" % base_url)
        # Add the MAU User-Agent, since MAU feed server seems to explicitly
        # block a User-Agent of 'Python-urllib/2.7' - even a blank User-Agent
        # string passes.
        headers = {
            "User-Agent": ("Microsoft%20AutoUpdate/3.6.16080300 CFNetwork/"
                           "760.6.3 Darwin/15.6.0 (x86_64)")
        }
        data = self.download(base_url, headers)

        metadata = readPlistFromString(data)
        item = {}
        # Update feeds for a given 'channel' will have either combo or delta
        # pkg urls, with delta's additionally having a 'FullUpdaterLocation'
        # key.
        # We populate the item dict with the appropriate section of the metadata
        # output
        if (self.env["version"] == "latest"
                or self.env["version"] == "latest-standalone"):
            item = [u for u in metadata if not u.get("FullUpdaterLocation")]
        elif self.env["version"] == "latest-delta":
            item = [u for u in metadata if u.get("FullUpdaterLocation")]
        if not item:
            raise ProcessorError("Could not find an applicable update in "
                                 "update metadata.")

        # this just returns the first item; in the case of delta updates this
        # is not guaranteed to be the "latest" delta. Does anybody actually
        # use this?
        item = item[0]

        if self.env["version"] == "latest-standalone":
            # do string replacement on the pattern of the URL in the
            # case of a Standalone app request.
            url = item["Location"]
            updater_suffix = "_Updater.pkg"
            if url.endswith(updater_suffix):
                item["Location"] = url[0:-(
                    len(updater_suffix))] + "_Installer.pkg"
            else:
                raise ProcessorError(
                    "Updater URL in unexpected format; cannot "
                    "determine standalone URL.")

        self.env["url"] = item["Location"]
        self.output("Found URL %s" % self.env["url"])
        self.output("Got update: '%s'" % item["Title"])
        # now extract useful info from the rest of the metadata that could
        # be used in a pkginfo
        pkginfo = {}

        # Minimum OS version key should exist!
        pkginfo["minimum_os_version"] = (
            item.get("Minimum OS")
            or PROD_DICT[self.env["product"]].get("minimum_os") or "10.10.5")

        installs_items = self.get_installs_items(item)
        if installs_items:
            pkginfo["installs"] = installs_items

        # Extra work to do if this is a delta updater
        if self.env["version"] == "latest-delta":
            try:
                rel_versions = item["Triggers"]["Registered File"][
                    "VersionsRelative"]
            except KeyError:
                raise ProcessorError("Can't find expected VersionsRelative"
                                     "keys for determining minimum update "
                                     "required for delta update.")
            for expression in rel_versions:
                operator, ver_eval = expression.split()
                if operator == ">=":
                    self.min_delta_version = ver_eval
                    break
            if not self.min_delta_version:
                raise ProcessorError("Not able to determine minimum required "
                                     "version for delta update.")
            # Put minimum_update_version into installs item
            self.output("Adding minimum required version: %s" %
                        self.min_delta_version)
            pkginfo["installs"][0][
                "minimum_update_version"] = self.min_delta_version
            required_update_name = self.env["NAME"]
            if self.env["munki_required_update_name"]:
                required_update_name = self.env["munki_required_update_name"]
            # Add 'requires' array
            pkginfo["requires"] = [
                "%s-%s" % (required_update_name, self.min_delta_version)
            ]
        elif PROD_DICT[self.env["product"]].get("minimum_update_version"):
            # Put minimum_update_version into installs item as it is specified in PROD_DICT
            self.output(
                "Adding minimum required version: %s" %
                PROD_DICT[self.env["product"]].get("minimum_update_version"))
            pkginfo["installs"][0]["minimum_update_version"] = PROD_DICT[
                self.env["product"]].get("minimum_update_version")

        self.env["version"] = self.get_version(item)
        self.env["minimum_os_version"] = pkginfo["minimum_os_version"]
        self.env["minimum_version_for_delta"] = self.min_delta_version
        self.env["additional_pkginfo"] = pkginfo
        self.env["url"] = item["Location"]
        self.output("Additional pkginfo: %s" % self.env["additional_pkginfo"])
Пример #25
0
    def main(self):
        """Do the main thing here"""
        self.jamf_url = self.env.get("JSS_URL")
        self.jamf_user = self.env.get("API_USERNAME")
        self.jamf_password = self.env.get("API_PASSWORD")
        self.computergroup_name = self.env.get("computergroup_name")
        self.computergroup_template = self.env.get("computergroup_template")
        self.replace = self.env.get("replace_group")
        # handle setting replace in overrides
        if not self.replace or self.replace == "False":
            self.replace = False

        # clear any pre-existing summary result
        if "jamfcomputergroupuploader_summary_result" in self.env:
            del self.env["jamfcomputergroupuploader_summary_result"]
        group_uploaded = False

        # encode the username and password into a basic auth b64 encoded string
        credentials = f"{self.jamf_user}:{self.jamf_password}"
        enc_creds_bytes = b64encode(credentials.encode("utf-8"))
        enc_creds = str(enc_creds_bytes, "utf-8")

        # handle files with no path
        if "/" not in self.computergroup_template:
            found_template = self.get_path_to_file(self.computergroup_template)
            if found_template:
                self.computergroup_template = found_template
            else:
                raise ProcessorError(
                    f"ERROR: Computer Group file {self.computergroup_template} not found"
                )

        # now start the process of uploading the object
        self.output(
            f"Checking for existing '{self.computergroup_name}' on {self.jamf_url}"
        )

        # check for existing - requires obj_name
        obj_type = "computer_group"
        obj_id = self.check_api_obj_id_from_name(self.jamf_url, obj_type,
                                                 self.computergroup_name,
                                                 enc_creds)

        if obj_id:
            self.output(
                f"Computer group '{self.computergroup_name}' already exists: ID {obj_id}"
            )
            if self.replace:
                self.output(
                    "Replacing existing Computer Group as 'replace_group' is set "
                    f"to {self.replace}",
                    verbose_level=1,
                )
                self.upload_computergroup(
                    self.jamf_url,
                    enc_creds,
                    self.computergroup_name,
                    self.computergroup_template,
                    obj_id,
                )
                group_uploaded = True
            else:
                self.output(
                    "Not replacing existing Computer Group. Use replace_group='True' to enforce.",
                    verbose_level=1,
                )
                return
        else:
            # post the item
            self.upload_computergroup(
                self.jamf_url,
                enc_creds,
                self.computergroup_name,
                self.computergroup_template,
            )
            group_uploaded = True

        # output the summary
        self.env["group_uploaded"] = group_uploaded
        if group_uploaded:
            self.env["jamfcomputergroupuploader_summary_result"] = {
                "summary_text":
                ("The following computer groups were created or updated "
                 "in Jamf Pro:"),
                "report_fields": ["group", "template"],
                "data": {
                    "group": self.computergroup_name,
                    "template": self.computergroup_template,
                },
            }
Пример #26
0
    def main(self):
        """Return a path do a DMG  for file at pathname
        """
        # Handle some defaults for archive_path and destination_path
        pathname = self.env.get("archive_path", self.env.get("pathname"))
        if not pathname:
            raise ProcessorError(
                "Expected a 'pathname' input variable but none is set!")

        # Convert to pathlib.Path for further processing
        pathname = pathlib.Path(pathname)
        if not pathname.exists():
            raise ProcessorError(
                f"File from previous processor (pathname: {pathname}) does "
                f"not exist!")

        destination_path = pathlib.Path(
            self.env.get(
                "destination_path",
                pathlib.Path(self.env["RECIPE_CACHE_DIR"], self.env["NAME"]),
            ))
        # Make the destination path
        destination_path.mkdir(parents=True, exist_ok=True)
        if self.env.get("purge_destination"):
            for file_path in destination_path.glob("*"):
                try:
                    if file_path.is_dir() and not file_path.is_symlink():
                        shutil.rmtree(path)
                    else:
                        file_path.unlink()
                except OSError as err:
                    raise ProcessorError(
                        f"Can't remove {file_path}: {err.strerror}")

        try:
            new_pathname = pathname.joinpath(pathname.parent,
                                             pathname.name.split("?")[0])
            # Rename file to new_pathname:
            pathname.rename(new_pathname)
            # Handle dmgs
            if new_pathname.suffix == ".dmg":
                self.env["dmg_path"] = str(new_pathname)
            # Handle other formats
            else:
                file_fmt = self.get_archive_format(str(new_pathname))
                if not file_fmt:
                    raise ProcessorError(
                        "Can't guess archive format for filename "
                        f"'{new_pathname.name}'")
                self.output(
                    f"Guessed archive format '{file_fmt}' from filename "
                    f"'{new_pathname.name}'")
                if file_fmt not in list(EXTNS.keys()):
                    err_msg = ", ".join(list(EXTNS.keys()))
                    raise ProcessorError(
                        f"'{file_fmt}' is not valid for the 'archive_format' "
                        f"variable. Must be one of {err_msg}.")
                # Extract archived file
                self._extract(file_fmt, str(new_pathname),
                              str(destination_path))
                self.output(
                    f"Unarchived '{new_pathname}' to '{destination_path}'")
                # Find DMG at destination_path
                recipe_name = self.env["NAME"]
                file_regex = rf"{recipe_name}.*\.dmg"
                for file_path in destination_path.glob("*.dmg"):
                    if re.match(file_regex, file_path.name):
                        self.env["dmg_path"] = str(file_path)
                        break

            # Make sure we've found a DMG
            if not self.env.get("dmg_path"):
                raise ProcessorError(
                    "Unable to locate a DMG after processing!")

            self.output(f"Found DMG {self.env['dmg_path']} in file {pathname}")
        except ProcessorError:
            raise
        except Exception as ex:
            raise ProcessorError(ex)
Пример #27
0
    def get_feed_data(self, url):
        """Returns an array of dicts, one per update item, structured like:
        version: 1234
        human_version: 1.2.3.4 (optional)
        url: http://download/url.dmg
        minimum_os_version: 10.7 (optional)
        description_data: HTML description for update (optional)
        description_url: URL given by the sparkle:releaseNotesLink element
                         (optional)

        Note: Descriptions may be either given as a URL (usually the case) or as
        raw HTML. We store one or the other rather than doing many GETs for
        metadata we're never going to use. If it's a URL, this must be handled
        by whoever calls this function."""

        # handle custom xmlns and version attributes
        if "alternate_xmlns_url" in self.env:
            xmlns = self.env["alternate_xmlns_url"]
        else:
            xmlns = DEFAULT_XMLNS

        # query string
        if "appcast_query_pairs" in self.env:
            queries = self.env["appcast_query_pairs"]
            new_query = urllib.urlencode([(k, v)
                                          for (k, v) in queries.items()])
            scheme, netloc, path, _, frag = urlparse.urlsplit(url)
            url = urlparse.urlunsplit((scheme, netloc, path, new_query, frag))

        request = urllib2.Request(url=url)

        # request header code borrowed from URLDownloader
        if "appcast_request_headers" in self.env:
            headers = self.env["appcast_request_headers"]
            for header, value in headers.items():
                request.add_header(header, value)

        try:
            url_handle = urllib2.urlopen(request)
        except:
            raise ProcessorError("Can't open URL %s" % request.get_full_url())

        data = url_handle.read()
        try:
            xmldata = ElementTree.fromstring(data)
        except:
            raise ProcessorError("Error parsing XML from appcast feed.")

        items = xmldata.findall("channel/item")

        versions = []
        for item_elem in items:
            enclosure = item_elem.find("enclosure")
            if enclosure is not None:
                item = {}
                # URL-quote the path component to handle spaces, etc.
                # (Panic apps do this)
                url_bits = urlparse.urlsplit(enclosure.get("url"))
                encoded_path = urllib.quote(url_bits.path)
                built_url = (url_bits.scheme + "://" + url_bits.netloc +
                             encoded_path)
                if url_bits.query:
                    built_url += "?" + url_bits.query
                item["url"] = built_url

                item["version"] = enclosure.get("{%s}version" % xmlns)
                if item["version"] is None:
                    # Sparkle tries to guess a version from the download URL for
                    # rare cases where there is no sparkle:version enclosure
                    # attribute, for the format: AppnameOrAnything_1.2.3.zip
                    # https://github.com/andymatuschak/Sparkle/blob/master/
                    #                                  SUAppcastItem.m#L153-L167
                    #
                    # We can even support OmniGroup's alternate appcast format
                    # by cheating and using the '-' as a delimiter to derive
                    # version info
                    filename = os.path.basename(
                        os.path.splitext(item["url"])[0])
                    for delimiter in ['_', '-']:
                        if delimiter in filename:
                            item["version"] = filename.split(delimiter)[-1]
                            break
                # if we still found nothing, fail
                if item["version"] is None:
                    raise ProcessorError(
                        "Can't extract version info from item in feed!")

                human_version = item_elem.find("{%s}shortVersionString")
                if human_version is not None:
                    item["human_version"] = human_version
                min_version = item_elem.find("{%s}minimumSystemVersion" %
                                             xmlns)
                if min_version is not None:
                    item["minimum_os_version"] = min_version.text
                description_elem = item_elem.find("{%s}releaseNotesLink" %
                                                  xmlns)
                if description_elem is not None:
                    item["description_url"] = description_elem.text
                if item_elem.find("description") is not None:
                    item["description_data"] = (
                        item_elem.find("description").text)
                versions.append(item)

        return versions
Пример #28
0
    def package(self):
        '''Build a packaging request, send it to the autopkgserver and get the
        constructed package.'''

        # clear any pre-exising summary result
        if 'pkg_creator_summary_result' in self.env:
            del self.env['pkg_creator_summary_result']

        request = self.env["pkg_request"]
        if not 'pkgdir' in request:
            request['pkgdir'] = self.env['RECIPE_CACHE_DIR']

        # Set variables, and check that all keys are in request.
        for key in ("pkgroot",
                    "pkgname",
                    "pkgtype",
                    "id",
                    "version",
                    "infofile",
                    "resources",
                    "options",
                    "scripts"):
            if not key in request:
                if key in self.env:
                    request[key] = self.env[key]
                elif key in ["infofile", "resources", "options", "scripts"]:
                    # these keys are optional, so empty string value is OK
                    request[key] = ""
                elif key == "pkgtype":
                    # we only support flat packages now
                    request[key] = "flat"
                else:
                    raise ProcessorError("Request key %s missing" % key)

        # Make sure chown dict is present.
        if not "chown" in request:
            request["chown"] = dict()

        # Convert relative paths to absolute.
        for key, value in request.items():
            if key in ("pkgroot", "pkgdir", "infofile", "resources", "scripts"):
                if value and not value.startswith("/"):
                    # search for it
                    request[key] = self.find_path_for_relpath(value)


        # Check for an existing flat package in the output dir and compare its
        # identifier and version to the one we're going to build.
        pkg_path = os.path.join(request['pkgdir'], request['pkgname'] + '.pkg')
        if os.path.exists(pkg_path) and not self.env.get("force_pkg_build"):
            self.output("Package already exists at path %s." % pkg_path)
            self.xar_expand(pkg_path)
            packageinfo_file = os.path.join(self.env['RECIPE_CACHE_DIR'],
                                            'PackageInfo')
            if not os.path.exists(packageinfo_file):
                raise ProcessorError(
                    "Failed to parse existing package, as no PackageInfo "
                    "file count be found in the extracted archive.")

            tree = ET.parse(packageinfo_file)
            root = tree.getroot()
            local_version = root.attrib['version']
            local_id = root.attrib['identifier']

            if (local_version == request['version']
                    and local_id == request['id']):
                self.output("Existing package matches version and identifier, "
                            "not building.")
                self.env["pkg_path"] = pkg_path
                self.env["new_package_request"] = False
                return

        # Send packaging request.
        try:
            self.output("Connecting")
            self.connect()
            self.output("Sending packaging request")
            self.env["new_package_request"] = True
            pkg_path = self.send_request(request)
        finally:
            self.output("Disconnecting")
            self.disconnect()

        # Return path to pkg.
        self.env["pkg_path"] = pkg_path
        self.env["pkg_creator_summary_result"] = {
            'summary_text': 'The following packages were built:',
            'report_fields': ['identifier', 'version', 'pkg_path'],
            'data': {
                'identifier': request['id'],
                'version': request['version'],
                'pkg_path': pkg_path
            }
        }
 def main(self):
     """Get information about an update"""
     if self.env["type"] not in SUPPORTED_TYPES:
         raise ProcessorError("Invalid type: supported values are '%s'" %
                              "', '".join(SUPPORTED_TYPES))
     self.get_installer_info()
Пример #30
0
    def fetch_extended_product_info(self, product, platform, cdn):
        """Fetch extended information about a product such as: manifest,
        proxy (if available), release notes, and icon"""
        extended_info = {}

        # Fetch Icon
        if 'productIcons' in product:
            largest_width = 0
            largest_icon_url = None
            for icon in product['productIcons'].get('icon', []):
                self.output('Considering icon: {}'.format(icon))
                w, h = icon.get('size', '0x0').split('x', 2)
                if int(w) > largest_width:
                    largest_width = int(w)
                    largest_icon_url = icon.get('value')

            self.env['icon_url'] = largest_icon_url

        if 'icon_url' in self.env and self.env.get('fetch_icon', False):
            self.output('Fetching icon from {}'.format(self.env['icon_url']))
            req = urllib2.Request(self.env['icon_url'], headers=HEADERS)
            content = urllib2.urlopen(req).read()

            with open('{}/Icon.png'.format(self.env['RECIPE_CACHE_DIR']), 'w+') as fd:
                fd.write(content)

            extended_info['icon_path'] = '{}/Icon.png'.format(self.env['RECIPE_CACHE_DIR'])
        else:
            self.output('An icon was requested but we were unable to download one.')
            extended_info['icon_path'] = ''

        # Fetch Manifest + Proxy
        if 'urls' in platform['languageSet'][0] and 'manifestURL' in platform['languageSet'][0]['urls']:
            extended_info['manifest_url'] = '{}{}'.format(
                cdn['ccm']['secure'],
                platform['languageSet'][0]['urls'].get('manifestURL')
            )

            if self.env.get('parse_proxy_xml', False):
                self.output('Processor will fetch manifest and proxy xml')
                manifest, proxy = self.fetch_manifest(extended_info['manifest_url'])
                product_version_el = proxy.find('InstallerProperties/Property[@name="ProductVersion"]')
                if product_version_el is None:
                    raise ProcessorError('Could not find ProductVersion in proxy data, aborting.')
                else:
                    self.output('Found version in proxy.xml: {}'.format(product_version_el.text))
                    extended_info['proxy_version'] = product_version_el.text
            else:
                extended_info['proxy_version'] = ''
        else:
            self.output('Did not find a manifest.xml in the product json data')

        # Fetch Release Notes
        if self.env.get('fetch_release_notes', False):
            self.output('Processor will fetch update release notes')
            desc = self.fetch_release_notes(product['id'], product['version'], 'osx10-64', 'en_US')
            rn_etree = ElementTree.fromstring(desc)
            release_notes_el = rn_etree.find('UpdateDescription')

            if release_notes_el is None:
                raise ProcessorError('Could not find UpdateDescription in release notes')

            extended_info['release_notes'] = release_notes_el.text
        else:
            extended_info['release_notes'] = ''

        return extended_info