def get_reader_updater_dmg_url(self, major_version):
        """Returns download URL for Adobe Reader Updater DMG"""

        try:
            version_string = self.download(
                AR_UPDATER_BASE_URL + AR_URL_TEMPLATE % major_version, text=True
            )
        except Exception as err:
            raise ProcessorError("Can't open URL template: %s" % (err))
        version_string = version_string.replace(AR_MAJREV_IDENTIFIER, major_version)

        versioncode = version_string.replace(".", "")
        versioncode = versioncode.replace("\n","")
    
        url = AR_UPDATER_DOWNLOAD_URL % (
            major_version,
            versioncode,
            major_version,
            versioncode,
        )

        return (url, version_string)
Пример #2
0
        def __enter__(self):
            """Opens a connection to the database"""

            try:

                if self.engine == "Chrome":

                    options = webdriver.ChromeOptions()
                    options.add_argument("headless")

                    if self.path:
                        self.web_engine = webdriver.Chrome(
                            executable_path=self.path, chrome_options=options
                        )

                    else:
                        self.web_engine = webdriver.Chrome(chrome_options=options)

            except:
                raise ProcessorError("Failed to load the specified WebDriver engine.")

            return self.web_engine
Пример #3
0
    def main(self):
        """Main."""
        xip_path = self.env["PKG"]
        if self.env.get("output_path"):
            output = self.env["output_path"]
        else:
            output = os.path.join(
                self.env["RECIPE_CACHE_DIR"], self.env["NAME"] + "_unpack"
            )
        if not os.path.isdir(output):
            os.makedirs(output)

        self.output(
            "Extracting xip archive, please be patient, this could take a long time..."
        )
        os.chdir(output)
        cmd = ["/usr/bin/xip", "--expand", xip_path]
        proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        (out, err) = proc.communicate()
        if err:
            raise ProcessorError(err)
        self.output("Finished xip unpack.")
Пример #4
0
    def main(self):
        source_path = self.env["source_path"]
        # Check if we're trying to copy something inside a dmg.
        (dmg_path, dmg, dmg_source_path) = self.parsePathForDMG(source_path)
        try:
            if dmg:
                # Mount dmg and copy path inside.
                mount_point = self.mount(dmg_path)
                source_path = os.path.join(mount_point, dmg_source_path)
            # process path with glob.glob
            matches = glob(source_path)
            if len(matches) == 0:
                raise ProcessorError(
                    "Error processing path '%s' with glob. " % source_path
                )
            matched_source_path = matches[0]
            if len(matches) > 1:
                self.output(
                    "WARNING: Multiple paths match 'source_path' glob '%s':"
                    % source_path
                )
                for match in matches:
                    self.output("  - %s" % match)

            if [c for c in "*?[]!" if c in source_path]:
                self.output(
                    "Using path '%s' matched from globbed '%s'."
                    % (matched_source_path, source_path)
                )

            # do the copy
            self.copy(
                matched_source_path,
                self.env["destination_path"],
                overwrite=self.env.get("overwrite"),
            )
        finally:
            if dmg:
                self.unmount(dmg_path)
    def get_g2m_info(self, base_url):
        """Process the JSON data from the latest release."""

        # Prepare download file path.
        download_dir = os.path.join(self.env["RECIPE_CACHE_DIR"], "downloads")
        meta_path = os.path.join(download_dir, "meta")
        try:
            os.makedirs(download_dir)
        except os.error:
            # Directory already exists
            pass

        # Download JSON feed (or gzipped JSON feed) to a file.
        meta_file = self.download_to_file(base_url, meta_path)

        # Sometimes the base URL is compressed as gzip, sometimes it's not.
        try:
            with open(meta_file, "rb") as f:
                jsondata = json.loads(f.read())
                self.output("Encoding: json")
        except ValueError:
            with gzip.open(meta_file, "rb") as f:
                jsondata = json.loads(f.read())
                self.output("Encoding: gzip")

        max_build = max(jsondata["activeBuilds"], key=lambda x: int(x["buildNumber"]))
        g2m_url = max_build.get("macDownloadUrl")
        if not g2m_url:
            raise ProcessorError(
                "No download URL for the latest release "
                "found in the base_url JSON feed."
            )
        url_parts = g2m_url.split("/")
        url_parts[-1] = "GoToMeeting.dmg"
        g2m_url = "/".join(url_parts)

        g2m_build = str(max_build["buildNumber"])

        return g2m_url, g2m_build
 def readPlist(self, filepath):
     """
     Read a .plist file from filepath.  Return the unpacked root object
     (which is usually a dictionary).
     """
     plistData = NSData.dataWithContentsOfFile_(filepath)
     (
         dataObject,
         dummy_plistFormat,
         error,
     ) = NSPropertyListSerialization.propertyListFromData_mutabilityOption_format_errorDescription_(
         plistData, NSPropertyListMutableContainers, None, None
     )
     if dataObject is None:
         if error:
             error = error.encode("ascii", "ignore")
         else:
             error = "Unknown error"
         errmsg = "%s in file %s" % (error, filepath)
         raise ProcessorError(errmsg)
     else:
         return dataObject
Пример #7
0
    def check_delta(self):
        now = datetime.datetime.now()
        name = f"{self.pkg.patch} Test"
        self.logger.debug(f"About to policy_list, name: {name}")
        policies = self.policy_list()
        self.logger.debug("done policy_list")
        try:
            policy_id = policies[name]
        except KeyError:
            raise ProcessorError("Test policy key missing: {}".format(name))
        self.logger.debug(f"Got valid policy id: {policy_id}")
        policy = self.policy(str(policy_id))
        # self.logger.debug(f"back from policy(): {policy}")
        if policy["general"]["enabled"] == False:
            self.logger.debug("TEST patch policy disabled")
            return False
        else:
            self.logger.debug(
                f"['general']['enabled'] :{policy['general']['enabled']}"
            )
        description = policy["user_interaction"][
            "self_service_description"
        ].split()
        # we may have found a patch policy with no proper description yet
        if len(description) != 3:
            return False
        title, datestr = description[1:]

        date = datetime.datetime.strptime(datestr, "(%Y-%m-%d)")
        delta = now - date
        self.logger.debug(f"    Description:{description}")
        self.logger.debug(f"    Datestr    :{datestr}")
        self.logger.debug(f"    Date       :{date}")
        self.logger.debug(f"    Delta      :{delta.days}")
        self.logger.debug(f"    PkgDelta   :{self.pkg.delta}")

        if delta.days >= self.pkg.delta:
            return True
        return False
    def main(self):
        cp = ConfigParser.SafeConfigParser()
        cp.optionxform = str

        sort = sorted(self.env['properties'].items(), key=lambda t: t[0])
        properties = OrderedDict(sort)

        for key, value in properties.iteritems():
            cp.set('', str(key), value)
        # Write the file out
        with open(self.env['file_path'], 'wb') as f:
            try:
                cp.write(EqualsSpaceRemover(f))
            except IOError as err:
                raise ProcessorError(err)
        # Now delete the first line, the section header
        with open(self.env['file_path'], 'rb') as old:
            lines = old.readlines()
        lines[0] = "# Generated by AutoPkg\n"
        with open(self.env['file_path'], 'wb') as new:
            for line in lines:
                new.write(line)
Пример #9
0
 def find_template(self):
     '''Searches for the template, looking in the recipe directory
     and parent recipe directories if needed.'''
     template_path = self.env['template_path']
     if os.path.exists(template_path):
         return template_path
     elif not template_path.startswith("/"):
         recipe_dir = self.env.get('RECIPE_DIR')
         search_dirs = [recipe_dir]
         if self.env.get("PARENT_RECIPES"):
             # also look in the directories containing the parent recipes
             parent_recipe_dirs = list(
                 set([
                     os.path.dirname(item)
                     for item in self.env["PARENT_RECIPES"]
                 ]))
             search_dirs.extend(parent_recipe_dirs)
         for directory in search_dirs:
             test_item = os.path.join(directory, template_path)
             if os.path.exists(test_item):
                 return test_item
     raise ProcessorError("Can't find %s" % template_path)
Пример #10
0
    def send_request(self, request):
        '''Send an install request to autopkginstalld'''
        self.socket.send(FoundationPlist.writePlistToString(request))
        with os.fdopen(self.socket.fileno()) as fileref:
            while True:
                data = fileref.readline()
                if data:
                    if data.startswith("OK:"):
                        return data.replace("OK:", "").rstrip()
                    elif data.startswith("ERROR:"):
                        break
                    else:
                        self.output(data.rstrip())
                else:
                    break

        errors = data.rstrip().split("\n")
        if not errors:
            errors = ["ERROR:No reply from autopkginstalld (crash?), "
                      "check system logs"]
        raise ProcessorError(
            ", ".join([s.replace("ERROR:", "") for s in errors]))
Пример #11
0
    def main(self):
        # disable module debug logging if we're not looking for debug logging
        if not self.env.get('verbose') == '3':
            logger = logging.getLogger()
            logger.setLevel(logging.WARNING)
            s1_logger = logging.getLogger('MgmtSdk')
            s1_logger.setLevel(logging.WARNING)
        s1_management = Management(
            hostname=self.env.get('S1_CONSOLE_HOSTNAME'),
            api_token=self.env.get('S1_API_TOKEN'))
        updates_json = self.get_s1_updates(s1_management,
                                           self.env.get('S1_PACKAGE_STATUS'),
                                           self.env.get('S1_PACKAGE_VERSION'))
        try:
            s1_package = updates_json["data"][0]
        except IndexError:
            raise ProcessorError("No packages were found.")

        self.env['url'] = s1_package['link']
        self.env['filename'] = s1_package['fileName']
        self.env['version'] = s1_package['version']
        self.env['sha1'] = s1_package['sha1']
Пример #12
0
    def get_kyngchaos_dmg_url(self, base_url, product_name, version):

        product_dir = ''

        # Expand product name to full product name (IE: gdal -> GDAL_Complete)
        # and get product directory from product type
        if product_name == 'qgis':
            product_dir = product_name
            product_name = product_name.upper()
        elif product_name == 'gdal':
            product_dir = 'frameworks'
            product_name = product_name.upper() + '_Complete'
        elif product_name == 'numpy':
            product_dir = 'python'
            product_name = 'NumPy'
        elif product_name == 'matplotlib':
            product_dir = 'python'

        else:
            raise ValueError('product name not recognized')

        if version == 'latest':
            version = self.get_latest_version(product_name, product_dir)

        filename = product_name + '-' + version + '.dmg'

        # Construct download URL.
        dmg_url = "/".join((base_url, product_dir, filename))

        # Try to open download link.
        try:
            f = urllib2.urlopen(dmg_url)
            f.close()
        except BaseException as e:
            raise ProcessorError("Can't download %s: %s" % (dmg_url, e))

        # Return URL.
        return dmg_url
Пример #13
0
    def policy_list(self):
        """ get the list of patch policies from JP and turn it into a dictionary """

        # let's use the cookies to make sure we hit the
        # same server for every request.
        # the complication here is that ordinary and Premium Jamfers
        # get two DIFFERENT cookies for this.

        # the front page will give us the cookies
        r = requests.get(self.base)
        cookie_value = r.cookies.get("APBALANCEID")
        if cookie_value:
            # we are NOT premium Jamf Cloud
            self.cookies = dict(APBALANCEID=cookie_value)
            c_cookie = "APBALANCEID=%s", cookie_value
            self.logger.debug("APBALANCEID found")
        else:
            cookie_value = r.cookies["AWSALB"]
            self.cookies = dict(AWSALB=cookie_value)
            c_cookie = "AWSALB=%s", cookie_value
            self.logger.debug("APBALANCEID not found")

        url = self.base + "/patchpolicies"
        ret = requests.get(
            url, auth=self.auth, headers=self.hdrs, cookies=self.cookies
        )
        self.logger.debug(
            "GET policy list url: %s status: %s" % (url, ret.status_code)
        )
        if ret.status_code != 200:
            raise ProcessorError(
                "GET failed URL: %s Err: %s" % (url, ret.status_code)
            )
        # turn the list into a dictionary keyed on the policy name
        d = {}
        for p in ret.json()["patch_policies"]:
            d[p["name"]] = p["id"]
        return d
    def get_url(self, version):
        '''Get the URL of the TrueCrypt DMG

        The TrueCrypt website has an HTML form that, when POSTed, returns
        a 302 redirect to the actual DMG download. Handle all of that, as
        ugly as it is, using urllib2.
        '''

        # no easy way to *not* follow redirects with urllib2, so do this
        class NoRedirectHandler(urllib2.HTTPRedirectHandler):
            def redirect_request(self, req, fp, code, msg, hdrs, newurl):
                pass

        submit_form = {
            'DownloadVersion': version,
            'MacOSXDownload': 'Download',
        }

        try:
            req = urllib2.Request(DLS_URL, urlencode(submit_form))

            opener = urllib2.build_opener(NoRedirectHandler)

            f = opener.open(req)
            content = f.read()
            f.close()
        except Exception as e:
            if isinstance(e, urllib2.HTTPError) and e.code == 302:
                url = e.headers['Location']
            else:
                raise ProcessorError('Could not retrieve URL: %s' % DLS_URL)

        # hack to re-assemble URL with urlencoded filename part
        url_split = url.split('/')
        new_url = '/'.join(url_split[0:3]) + '/'
        new_url += pathname2url('/'.join(url_split[3:]))

        return new_url
Пример #15
0
class PkgExtractor(DmgMounter):
    description = (
        "Extracts the contents of a bundle-style pkg (possibly on a disk image) to pkgroot."
    )
    input_variables = {
        "pkg_path": {
            "required": True,
            "description": "Path to a package.",
        },
        "extract_root": {
            "required": True,
            "description":
            "Path to where the new package root will be created.",
        },
    }
    output_variables = {}
    __doc__ = description

    def extract_payload(self, pkg_path, extract_root):
        '''Extract package contents to extract_root, preserving intended
         directory structure'''
        info_plist = os.path.join(pkg_path, "Contents/Info.plist")
        archive_path = os.path.join(pkg_path, "Contents/Archive.pax.gz")
        if not os.path.exists(info_plist):
            raise ProcessorError("Info.plist not found in pkg")
        if not os.path.exists(archive_path):
            raise ProcessorError("Archive.pax.gz not found in pkg")

        if os.path.exists(extract_root):
            try:
                shutil.rmtree(extract_root)
            except (OSError, IOError), err:
                raise ProcessorError("Failed to remove extract_root: %s" % err)

        try:
            info = FoundationPlist.readPlist(info_plist)
        except FoundationPlist.FoundationPlistException, err:
            raise ProcessorError("Failed to read Info.plist: %s" % err)
Пример #16
0
    def main(self):
        '''Do our processor task!'''
        target_os = self.env.get("target_os", TARGET_DEFAULT)
        major_version = self.env["major_version"]
        get_version = self.env.get("version", VERSION_DEFAULT)
        if major_version not in SUPPORTED_VERS:
            raise ProcessorError(
                "major_version %s not one of those supported: %s"
                % (major_version, ", ".join(SUPPORTED_VERS)))
       
        # Adobe require a target OS X version to be passed to the URL on more recent updates
        target_os_parsed = self.process_target_os(target_os)
        #global _URL_VARS global statement not needed to modify a key/value pair
        _URL_VARS["MAJREV"] = major_version
        _URL_VARS["OS_VER_MAJ"] = target_os_parsed[0]
        _URL_VARS["OS_VER_MIN"] = target_os_parsed[1]

        munki_update_name = self.env.get("munki_update_name", "")
        if not munki_update_name:
            munki_update_name = self.process_url_vars(MUNKI_UPDATE_NAME_DEFAULT)
        (url, version, prev_version) = self.get_acrobat_metadata(get_version)

        version_parsed = self.process_version(version)

        new_pkginfo = {}

        # if our required version is something other than a base version
        # should match a version ending in '.0.0', '.00.0', '.00.00', etc.
        if not re.search(r"\.[0]+\.[0]+", prev_version):
            new_pkginfo["requires"] = ["%s-%s"
                                       % (munki_update_name, prev_version)]
            self.output("Update requires previous version: %s" % prev_version)
        new_pkginfo["minimum_os_version"] = "%s.0" % target_os
        new_pkginfo["version"] = version
        self.env["additional_pkginfo"] = new_pkginfo
        self.env["url"] = url
        self.env["version"] = version
        self.output("Found URL %s" % self.env["url"])
    def main(self):
        try:
            url = ""
            update = self.getLatestFilemakerInstaller()
            should_be_full = self.env.get("do_full_installer")
            url = ""
            if should_be_full == 1:
                update["version"] = self.version_matcher(update["url"])
                # extract the version from the URL string - this is a weird setup...
                url = ("http://fmdl.filemaker.com/maint/107-85rel/fmp_%s.dmg" %
                       update["version"])
            else:
                update["version"] = self.version_matcher(update["url"])
                url = update["url"]

            self.output("URL found '%s'" % url, verbose_level=2)
            self.env["version"] = update["version"]
            self.env["url"] = url
            self.env["package_name"] = update["name"]
            self.env["package_file"] = os.path.basename(urlsplit(url).path)
        except Exception as err:
            # handle unexpected errors here
            raise ProcessorError(err)
    def main(self):
        '''Find the last version number and URL'''

        if 'source_url' in self.env:
            self.source_url = self.env['source_url']

        if 'url_pattern' in self.env:
            self.url_pattern = self.env['url_pattern']

        try:
            all_downlaod_URLs_per_version = self.get_all_downlaod_URLs_per_version(
            )

            last_version = self.get_highest_version(
                all_downlaod_URLs_per_version.keys())
            last_version_url = all_downlaod_URLs_per_version[last_version]
        except Exception as e:
            raise ProcessorError("Could not get a download URL: %s" % e)

        self.env["version"] = last_version
        self.env["url"] = urlparse.urljoin(self.source_url, last_version_url)
        self.output("Found download URL for version %s: %s" %
                    (self.env["version"], self.env["url"]))
    def get_latest_search_url(self):
        """Scrapes for the latest search url listed on download link"""

        # Setup CURL_PATH, if not already
        if not 'CURL_PATH' in self.env:
            self.env['CURL_PATH'] = '/usr/bin/curl'

        platform = self.env.get('platform', DEFAULT_PLATFORM)
        re_searchurl = re.compile(
            r'%s/thank-you\?thank-you=personal&os=%s&nid=[0-9]+' %
            (DOWNLOAD_URL, platform))

        search_url, smatch_dict = self.get_url_and_search(
            DOWNLOAD_URL, re_searchurl)

        if not search_url:
            raise ProcessorError("Can't find search_url from %s." %
                                 DOWNLOAD_URL)
        else:
            search_url = HTMLParser.HTMLParser().unescape(search_url)
            self.output("Search URL: %s" % search_url)

        return search_url
Пример #20
0
    def fetch_manifest(self, manifest_url):
        """Fetch the manifest.xml at manifest_url which contains asset download and proxy data information.
        Not all products have a proxy_data element

        :returns A tuple of (manifest, proxy) ElementTree objects
        """
        self.output('Fetching manifest.xml from {}'.format(manifest_url))
        req = urllib2.Request(manifest_url, headers=HEADERS)
        content = urllib2.urlopen(req).read()

        # Write out the manifest for debugging purposes
        with open('{}/manifest.xml'.format(self.env['RECIPE_CACHE_DIR']), 'w+') as fd:
            fd.write(content)

        manifest = ElementTree.fromstring(content)

        proxy_data_url_el = manifest.find('asset_list/asset/proxy_data')
        if proxy_data_url_el is None:
            raise ProcessorError('Could not find proxy data URL in manifest, aborting since your package requires it.')

        proxy_data = self.fetch_proxy_data(proxy_data_url_el.text)

        return manifest, proxy_data
Пример #21
0
    def _find_pkginfo_files_in_repo(self, pkginfo, file_extension="plist"):
        """Returns the full path to pkginfo file in the repo."""

        destination_path = os.path.join(self.env["MUNKI_REPO"], "pkgsinfo")

        if not os.path.exists(destination_path):
            raise ProcessorError(
                f"Did not find pkgsinfo directory at {destination_path}"
            )

        if len(file_extension) > 0:
            file_extension = "." + file_extension.strip(".")

        pkginfo_basename = f"{pkginfo['name']}-{pkginfo['version'].strip()}"

        file_list = glob.iglob(
            os.path.join(
                destination_path, "**", f"{pkginfo_basename}*{file_extension}"
            ),
            recursive=True,
        )

        return file_list
Пример #22
0
    def main(self):
        """Return a download URL and info for a Unity3D component"""

        revision, version = self.get_latest_version()

        try:
            data = urllib2.urlopen("%s/%s/unity-%s-osx.ini" % (BASE_URL, revision, version))
        except BaseException as err:
            raise ProcessorError(
                "Unexpected error retrieving ini file: '%s'" % err)

        parser = SafeConfigParser()
        parser.readfp(data)

        component_name = self.env.get('component_name', DEFAULT_COMPONENT)

        for key, value in parser.items(component_name):
            if key == 'url':
                self.env['url'] = "%s/%s/%s" % (BASE_URL, revision, value)
            else:
                self.env[key] = value

            self.output("%s: %s = %s" % (component_name, key, self.env.get(key)))
Пример #23
0
    def main(self):
        """Provide a yWorks product download URL"""
        product_name = self.env["product_name"]
        # http://www.yworks.com/products/yed/demo/yEd-CurrentVersion.txt
        base_url = "http://www.yworks.com/products"
        check_url = "%s/%s/demo/%s-CurrentVersion.txt" % (
            base_url, product_name.lower(), product_name)

        # Get the text file
        try:
            fref = urllib2.urlopen(check_url)
            txt = fref.read()
            fref.close()
        except BaseException as err:
            raise ProcessorError("Can't download %s: %s" % (check_url, err))

# Create download link
        latest = txt.rstrip()
        base_prod_url = "http://www.yworks.com/products"
        download_url = "%s/%s/demo/%s-%s_with-JRE8.dmg" % (
            base_prod_url, product_name.lower(), product_name, latest)
        self.env["url"] = download_url
        self.output("Found URL as %s" % self.env["url"])
    def curl_filename(self, url, curl_path=None):
        """Retrieves the remote file to determine the proper filename"""
        curl_args = [
            '--silent', '--location', '--head', '--write-out',
            '%{url_effective}', '--url', url, '--output', '/dev/null'
        ]

        if curl_path is None:
            curl_path = [self.env['CURL_PATH']]
        curl_cmd = curl_path + curl_args
        self.output(' '.join(curl_cmd), verbose_level=3)
        proc = subprocess.Popen(curl_cmd,
                                shell=False,
                                bufsize=1,
                                stdin=subprocess.PIPE,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)

        (file_url, e) = proc.communicate()
        if e:
            raise ProcessorError(e)
        filename = file_url.rpartition("/")[2]
        return file_url, filename
Пример #25
0
    def get_version(self, filepath):
        if os.path.exists(filepath):
            #try to determine the version

            version_basename = os.path.basename(filepath)

            # is it an app bundle?
            if version_basename.endswith(".app"):
                filepath = os.path.join(filepath, "Contents", "Info.plist")
        else:
            self.output("Cannot determine version. %s does not exist." % (filepath))
            return None

        try:
            plist = plistlib.readPlist(filepath)
            version_key = self.env.get("plist_version_key", "CFBundleShortVersionString")
            version = plist.get(version_key, None)
            self.output("Found version %s in file %s" % (version, filepath))

        except Exception as err:
            raise ProcessorError(err)

        return version
Пример #26
0
    def mount_smb_share(self, smb_path, mount_point):
        """Mount the smb share"""
        if not os.path.exists(mount_point):
            os.makedirs(mount_point)

        cmd = [
            "/sbin/mount",
            "-t",
            "smbfs",
            smb_path,
            mount_point,
        ]

        proc = subprocess.Popen(cmd,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)
        (cmd_out, cmd_err) = proc.communicate()
        if cmd_out:
            self.output("Result:\n%s" % (cmd_out.decode("ascii")))
        elif cmd_err:
            raise ProcessorError(cmd_err.decode("ascii"))
        else:
            self.output("%s mounted" % mount_point)
Пример #27
0
 def main(self):
     distribution_path = "{}/{}".format(self.env['unpacked_path'],
                                        "Distribution")
     self.output(
         "Distribution file: {distribution_path}".format(
             distribution_path=distribution_path), 3)
     distribution = ET.parse(distribution_path)
     pkg_ref_xpath = './/pkg-ref[@id="{}"][@version]'.format(
         self.env['PKGID'])
     pkg = getattr(distribution.find(pkg_ref_xpath), 'text', None)
     if pkg is None:
         raise ProcessorError("PKGID was not found.")
     pkg_path = pkg.lstrip('#')
     self.output("Package file: {pkg_path}".format(pkg_path=pkg_path), 3)
     packageinfo_path = "{}/{}/{}".format(self.env['unpacked_path'],
                                          pkg_path, "PackageInfo")
     self.output(
         "PackageInfo file: {packageinfo_path}".format(
             packageinfo_path=packageinfo_path), 3)
     packageinfo = ET.parse(packageinfo_path)
     packageinfo_xpath = '[@identifier="{}"]'.format(self.env['PKGID'])
     packageinfo.find(packageinfo_xpath).set('version', self.env['VERSION'])
     packageinfo.write(packageinfo_path)
Пример #28
0
    def find_path_for_relpath(self, relpath):
        '''Searches for the relative path.
        Search order is:
            RECIPE_CACHE_DIR
            RECIPE_DIR
            PARENT_RECIPE directories'''
        cache_dir = self.env.get('RECIPE_CACHE_DIR')
        recipe_dir = self.env.get('RECIPE_DIR')
        search_dirs = [cache_dir, recipe_dir]
        if self.env.get("PARENT_RECIPES"):
            # also look in the directories containing the parent recipes
            parent_recipe_dirs = list(
                set([
                    os.path.dirname(item)
                    for item in self.env["PARENT_RECIPES"]
                ]))
            search_dirs.extend(parent_recipe_dirs)
        for directory in search_dirs:
            test_item = os.path.join(directory, relpath)
            if os.path.exists(test_item):
                return os.path.normpath(test_item)

        raise ProcessorError("Can't find %s" % relpath)
Пример #29
0
    def validate_tools(self, print_path=False):

        self.relaxed_version_check = self.env.get('FW_RELAX_VERSION', False)

        self.client = FWAdminClient(admin_name=self.env['FW_ADMIN_USER'],
                                    admin_pwd=self.env['FW_ADMIN_PASSWORD'],
                                    server_host=self.env['FW_SERVER_HOST'],
                                    server_port=self.env['FW_SERVER_PORT'],
                                    print_output=False)

        if print_path:
            print "Path to Admin Tool:", FWAdminClient.get_admin_tool_path()

        self.version = self.client.get_version()
        self.major, self.minor, self.patch = self.version.split('.')
        if int(self.major) < 10:
            if self.relaxed_version_check:
                self.output(
                    "FileWave Version 10.0 must be installed - you have version %s"
                    % (self.version))
            else:
                raise ProcessorError(
                    "FileWave Version 10.0 must be installed - you have version %s"
                    % (self.version))

        self.can_list_filesets = "No"
        self.exit_status_message = "VALIDATION OK"
        self.exception = None

        try:
            the_filesets = self.client.get_filesets()
            count_filesets = sum(1 for i in the_filesets)
            self.can_list_filesets = "Yes" if count_filesets >= 0 else "No"
        except CalledProcessError, e:
            self.exception = e
            self.exit_status_message = FWAdminClient.ExitStatusDescription[
                e.returncode][1]
    def upload_policy(self,
                      jamf_url,
                      enc_creds,
                      policy_name,
                      template_xml,
                      obj_id=None):
        """Upload policy"""
        # if we find an object ID we put, if not, we post
        if obj_id:
            url = "{}/JSSResource/policies/id/{}".format(jamf_url, obj_id)
        else:
            url = "{}/JSSResource/policies/id/0".format(jamf_url)

        self.output("Uploading Policy...")

        count = 0
        while True:
            count += 1
            self.output("Policy upload attempt {}".format(count),
                        verbose_level=2)
            method = "PUT" if obj_id else "POST"
            r = self.curl(method, url, enc_creds, template_xml)
            # check HTTP response
            if self.status_check(r, "Policy", policy_name) == "break":
                break
            if count > 5:
                self.output(
                    "WARNING: Policy upload did not succeed after 5 attempts")
                self.output("\nHTTP POST Response Code: {}".format(
                    r.status_code))
                raise ProcessorError("ERROR: Policy upload failed ")
            sleep(30)

        # clean up temp files
        self.clear_tmp_dir()

        return r