コード例 #1
0
ファイル: Validate.py プロジェクト: SrikanthSrinivasan/Arelle
 def validateRssFeed(self):
     self.modelXbrl.info("info", "RSS Feed", modelDocument=self.modelXbrl)
     from arelle.FileSource import openFileSource
     for rssItem in self.modelXbrl.modelDocument.rssItems:
         self.modelXbrl.info("info", _("RSS Item %(accessionNumber)s %(formType)s %(companyName)s %(period)s"),
             modelObject=rssItem, accessionNumber=rssItem.accessionNumber, formType=rssItem.formType, companyName=rssItem.companyName, period=rssItem.period)
         modelXbrl = None
         try:
             modelXbrl = ModelXbrl.load(self.modelXbrl.modelManager, 
                                        openFileSource(rssItem.zippedUrl, self.modelXbrl.modelManager.cntlr),
                                        _("validating"))
             self.instValidator.validate(modelXbrl, self.modelXbrl.modelManager.formulaOptions.typedParameters())
             self.instValidator.close()
             rssItem.setResults(modelXbrl)
             self.modelXbrl.modelManager.viewModelObject(self.modelXbrl, rssItem.objectId())
             for pluginXbrlMethod in pluginClassMethods("Validate.RssItem"):
                 pluginXbrlMethod(self, modelXbrl, rssItem)
             modelXbrl.close()
         except Exception as err:
             self.modelXbrl.error("exception",
                 _("RSS item validation exception: %(error)s, instance: %(instance)s"),
                 modelXbrl=(self.modelXbrl, modelXbrl),
                 instance=rssItem.zippedUrl, error=err,
                 exc_info=True)
             try:
                 self.instValidator.close()
                 if modelXbrl is not None:
                     modelXbrl.close()
             except Exception as err:
                 pass
         del modelXbrl  # completely dereference
コード例 #2
0
ファイル: ModelManager.py プロジェクト: acsone/Arelle
 def getReportNameFromSchemaRef(self, entryPoint):
     # walk the taxonomies enabled in order to find a report name for the
     # specified schema reference (entry point)
     from arelle.FileSource import openFileSource
     from arelle.PackageManager import parsePackage, packagesConfig
     for i, packageInfo in enumerate(sorted(packagesConfig.get("packages", []),
                                            key=lambda packageInfo: packageInfo.get("name")),
                                     start=1):
         name = packageInfo.get("name", "package{}".format(i))
         #self.cntlr.showStatus(_("Looking in  {0} ").format(name), 1000)
         URL = packageInfo.get("URL")
         if name and URL and packageInfo.get("status") == "enabled":
             filesource = openFileSource(URL, cntlr=self.cntlr) 
             filenames = filesource.dir
             if filenames is not None:   # an IO or other error can return None
                 metadataFiles = filesource.taxonomyPackageMetadataFiles
                 metadataFile = metadataFiles[0]
                 metadata = filesource.url + os.sep + metadataFile
                 taxonomyPackage = parsePackage(self.cntlr, filesource, metadata,
                                                     os.sep.join(os.path.split(metadata)[:-1]) + os.sep)
                 nameToUrls = taxonomyPackage["nameToUrls"]
                 for reportName, reportInfo in nameToUrls.items():
                     if reportInfo[1] == entryPoint:
                         return reportName
     return None
         
     
コード例 #3
0
 def getReportNameFromSchemaRef(self, entryPoint):
     # walk the taxonomies enabled in order to find a report name for the
     # specified schema reference (entry point)
     from arelle.FileSource import openFileSource
     from arelle.PackageManager import parsePackage, packagesConfig
     for i, packageInfo in enumerate(sorted(
             packagesConfig.get("packages", []),
             key=lambda packageInfo: packageInfo.get("name")),
                                     start=1):
         name = packageInfo.get("name", "package{}".format(i))
         #self.cntlr.showStatus(_("Looking in  {0} ").format(name), 1000)
         URL = packageInfo.get("URL")
         if name and URL and packageInfo.get("status") == "enabled":
             filesource = openFileSource(URL, cntlr=self.cntlr)
             filenames = filesource.dir
             if filenames is not None:  # an IO or other error can return None
                 metadataFiles = filesource.taxonomyPackageMetadataFiles
                 metadataFile = metadataFiles[0]
                 metadata = filesource.url + os.sep + metadataFile
                 taxonomyPackage = parsePackage(
                     self.cntlr, filesource, metadata,
                     os.sep.join(os.path.split(metadata)[:-1]) + os.sep)
                 nameToUrls = taxonomyPackage["nameToUrls"]
                 for reportName, reportInfo in nameToUrls.items():
                     if reportInfo[1] == entryPoint:
                         return reportName
     return None
コード例 #4
0
def deprecatedConceptDatesFile(modelManager, abbrNs, latestTaxonomyDoc):
    cntlr = modelManager.cntlr
    _fileName = resourcesFilePath(
        modelManager,
        abbrNs.partition("/")[0] + "-deprecated-concepts.json")
    _deprecatedLabelRole = latestTaxonomyDoc["deprecatedLabelRole"]
    _deprecatedDateMatchPattern = latestTaxonomyDoc["deprecationDatePattern"]
    if os.path.exists(_fileName):
        return _fileName
    # load labels and store file name
    modelManager.addToLog(_("loading {} deprecated concepts into {}").format(
        abbrNs, _fileName),
                          messageCode="info")
    deprecatedConceptDates = {}
    from arelle import ModelXbrl
    for latestTaxonomyLabelFile in flattenSequence(
            latestTaxonomyDoc["deprecatedLabels"]):
        # load without SEC/EFM validation (doc file would not be acceptable)
        priorValidateDisclosureSystem = modelManager.validateDisclosureSystem
        modelManager.validateDisclosureSystem = False
        deprecationsInstance = ModelXbrl.load(
            modelManager,
            # "http://xbrl.fasb.org/us-gaap/2012/elts/us-gaap-doc-2012-01-31.xml",
            # load from zip (especially after caching) is incredibly faster
            openFileSource(latestTaxonomyLabelFile, cntlr),
            _("built deprecations table in cache"))
        modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
        if deprecationsInstance is None:
            modelManager.addToLog(_("%(name)s documentation not loaded"),
                                  messageCode="arelle:notLoaded",
                                  messageArgs={
                                      "modelXbrl": val,
                                      "name": _abbrNs
                                  })
        else:
            # load deprecations
            for labelRel in deprecationsInstance.relationshipSet(
                    XbrlConst.conceptLabel).modelRelationships:
                modelLabel = labelRel.toModelObject
                conceptName = labelRel.fromModelObject.name
                if modelLabel.role == _deprecatedLabelRole:
                    match = _deprecatedDateMatchPattern.match(modelLabel.text)
                    if match is not None:
                        date = match.group(1)
                        if date:
                            deprecatedConceptDates[conceptName] = date

            jsonStr = _STR_UNICODE(
                json.dumps(
                    OrderedDict(
                        ((k, v)
                         for k, v in sorted(deprecatedConceptDates.items())
                         )),  # sort in json file
                    ensure_ascii=False,
                    indent=0))  # might not be unicode in 2.7
            saveFile(cntlr, _fileName, jsonStr)  # 2.7 gets unicode this way
            deprecationsInstance.close()
            del deprecationsInstance  # dereference closed modelXbrl
コード例 #5
0
ファイル: PackageManager.py プロジェクト: rmutia/Arelle
def packageInfo(URL, reload=False):
    # TODO several directories, eg User Application Data
    packageFilename = _cntlr.webCache.getfilename(URL, reload=reload, normalize=True)
    if packageFilename:
        filesource = None
        try:
            global openFileSource
            if openFileSource is None:
                from arelle.FileSource import openFileSource
            filesource = openFileSource(packageFilename, _cntlr)
            if filesource.isZip:
                metadataFiles = filesource.taxonomyPackageMetadataFiles
                if len(metadataFiles) != 1:
                    raise IOError(
                        _("Taxonomy package contained more than one metadata file: {0}.").format(
                            ", ".join(metadataFiles)
                        )
                    )
                metadataFile = metadataFiles[0]
                metadata = filesource.file(filesource.url + os.sep + metadataFile)[0]
                metadataFilePrefix = os.sep.join(os.path.split(metadataFile)[:-1])
                if metadataFilePrefix:
                    metadataFilePrefix += os.sep
                metadataFilePrefix = filesource.baseurl + os.sep + metadataFilePrefix
            elif os.path.basename(filesource.url) == ".taxonomyPackage.xml":  # individual manifest file
                metadataFile = metadata = filesource.url
                metadataFilePrefix = os.sep.join(os.path.split(metadataFile)[:-1])
                if metadataFilePrefix:
                    metadataFilePrefix += os.sep
            else:
                raise IOError(
                    _("File must be a taxonomy package (zip file) or manifest (.taxonomyPackage.xml): {0}.").format(
                        metadataFile
                    )
                )
            parsedPackage = parsePackage(_cntlr, metadata)
            package = {
                "name": parsedPackage["name"],
                "status": "enabled",
                "version": parsedPackage["version"],
                "fileDate": time.strftime("%Y-%m-%dT%H:%M:%S UTC", time.gmtime(os.path.getmtime(packageFilename))),
                "URL": URL,
                "description": parsedPackage["description"],
                "remappings": dict(
                    (
                        prefix,
                        remapping if isHttpUrl(remapping) else (metadataFilePrefix + remapping.replace("/", os.sep)),
                    )
                    for prefix, remapping in parsedPackage["remappings"].items()
                ),
            }
            filesource.close()
            return package
        except EnvironmentError:
            pass
        if filesource:
            filesource.close()
    return None
コード例 #6
0
def _make_cache(val, ugt, cntlr, ugt_default_dimensions_json_file):
    """
    Creates a new caches for the Taxonomy default dimensions

    :param val: ValidateXbrl to be validated
    :type val: :class: '~arelle.ValidateXbrl.ValidateXbrl'
    :param ugt: Taxonomy to check
    :type ugt: str
    :param ugt_default_dimensions_json_file: location to save json default
        dimensions
    :type ugt_default_dimensions_json_file: str
    :return: no explicit return, but saves caches for dqc_us_0041
    :rtype: None
    """
    started_at = time.time()
    ugt_entry_xsd = ugt["entryXsd"]
    val.usgaapDefaultDimensions = {}
    prior_validate_disclosure_system = (
        val.modelXbrl.modelManager.validateDisclosureSystem)
    val.modelXbrl.modelManager.validateDisclosureSystem = False
    ugt_entry_xsd_instance = (
        ModelXbrl.load(
            val.modelXbrl.modelManager,
            openFileSource(ugt_entry_xsd, cntlr),
            _("opened us-gaap entry xsd")  # noqa
        ))
    val.modelXbrl.modelManager.validateDisclosureSystem = (
        prior_validate_disclosure_system)

    if ugt_entry_xsd_instance is None:
        val.modelXbrl.error(
            "arelle:notLoaded",
            _("US-GAAP entry xsd not loaded: %(file)s"),  # noqa
            modelXbrl=val,
            file=os.path.basename(ugt_entry_xsd))

    else:
        model_relationships = (ugt_entry_xsd_instance.relationshipSet(
            XbrlConst.dimensionDefault).modelRelationships)
        for default_dim_rel in model_relationships:
            if _default_dim_rel_is_instance(default_dim_rel):
                from_name = default_dim_rel.fromModelObject.name
                to_name = default_dim_rel.toModelObject.name
                val.usgaapDefaultDimensions[from_name] = to_name
        json_str = str(
            json.dumps(val.usgaapDefaultDimensions,
                       ensure_ascii=False,
                       indent=0))  # might not be unicode in 2.7
        # 2.7 gets unicode this way
        saveFile(cntlr, ugt_default_dimensions_json_file, json_str)
        ugt_entry_xsd_instance.close()
        del ugt_entry_xsd_instance  # dereference closed modelXbrl
    val.modelXbrl.profileStat(
        _("build default dimensions cache"),  # noqa
        time.time() - started_at)
コード例 #7
0
def runOpenInlineDocumentSetMenuCommand(cntlr,
                                        runInBackground=False,
                                        saveTargetFiling=False):
    filenames = cntlr.uiFileDialog(
        "open",
        multiple=True,
        title=_("arelle - Multi-open inline XBRL file(s)"),
        initialdir=cntlr.config.setdefault("fileOpenDir", "."),
        filetypes=[(_("XBRL files"), "*.*")],
        defaultextension=".xbrl")
    if os.sep == "\\":
        filenames = [f.replace("/", "\\") for f in filenames]

    if not filenames:
        filename = ""
    elif len(filenames) == 1 and (filenames[0].endswith(".zip")
                                  or filenames[0].endswith(".tar.gz")):
        # get archive file names
        from arelle.FileSource import openFileSource
        filesource = openFileSource(filenames[0], cntlr)
        if filesource.isArchive:
            from arelle import DialogOpenArchive
            archiveEntries = DialogOpenArchive.askArchiveFile(cntlr,
                                                              filesource,
                                                              multiselect=True)
            if archiveEntries:
                ixdsFirstFile = archiveEntries[0]
                _archiveFilenameParts = archiveFilenameParts(ixdsFirstFile)
                if _archiveFilenameParts is not None:
                    ixdsDir = _archiveFilenameParts[
                        0]  # it's a zip or package, use zip file name as head of ixds
                else:
                    ixdsDir = os.path.dirname(ixdsFirstFile)
                docsetSurrogatePath = os.path.join(ixdsDir, IXDS_SURROGATE)
                filename = docsetSurrogatePath + IXDS_DOC_SEPARATOR.join(
                    archiveEntries)
            else:
                filename = None
        filesource.close()
    elif len(filenames) >= MINIMUM_IXDS_DOC_COUNT:
        ixdsFirstFile = filenames[0]
        _archiveFilenameParts = archiveFilenameParts(ixdsFirstFile)
        if _archiveFilenameParts is not None:
            ixdsDir = _archiveFilenameParts[
                0]  # it's a zip or package, use zip file name as head of ixds
        else:
            ixdsDir = os.path.dirname(ixdsFirstFile)
        docsetSurrogatePath = os.path.join(ixdsDir, IXDS_SURROGATE)
        filename = docsetSurrogatePath + IXDS_DOC_SEPARATOR.join(filenames)
    else:
        filename = filenames[0]
    if filename is not None:
        cntlr.fileOpenFile(filename)
コード例 #8
0
def packageInfo(URL, reload=False):
    #TODO several directories, eg User Application Data
    packageFilename = _cntlr.webCache.getfilename(URL, reload=reload, normalize=True)
    if packageFilename:
        from arelle.FileSource import TAXONOMY_PACKAGE_FILE_NAMES
        filesource = None
        try:
            global openFileSource
            if openFileSource is None:
                from arelle.FileSource import openFileSource
            filesource = openFileSource(packageFilename, _cntlr)
            if filesource.isZip:
                metadataFiles = filesource.taxonomyPackageMetadataFiles
                ''' allow multiple
                if len(metadataFiles) != 1:
                    raise IOError(_("Taxonomy package contained more than one metadata file: {0}.")
                                  .format(', '.join(metadataFiles)))
                '''
                metadataFile = metadataFiles[0]
                metadata = filesource.file(filesource.url + os.sep + metadataFile)[0]
                metadataFilePrefix = os.sep.join(os.path.split(metadataFile)[:-1])
                if metadataFilePrefix:
                    metadataFilePrefix += os.sep
                metadataFilePrefix = filesource.baseurl + os.sep +  metadataFilePrefix
            elif os.path.basename(filesource.url) in TAXONOMY_PACKAGE_FILE_NAMES: # individual manifest file
                metadataFile = metadata = filesource.url
                metadataFilePrefix = os.sep.join(os.path.split(metadataFile)[:-1])
                if metadataFilePrefix:
                    metadataFilePrefix += os.sep
            else:
                raise IOError(_("File must be a taxonomy package (zip file), catalog file, or manifest (): {0}.")
                              .format(metadataFile, ', '.join(TAXONOMY_PACKAGE_FILE_NAMES)))
            parsedPackage = parsePackage(_cntlr, metadata)
            package = {'name': parsedPackage['name'],
                       'status': 'enabled',
                       'version': parsedPackage['version'],
                       'fileDate': time.strftime('%Y-%m-%dT%H:%M:%S UTC', time.gmtime(os.path.getmtime(packageFilename))),
                       'URL': URL,
                       'description': parsedPackage['description'],
                       'remappings': dict(
                            (prefix, 
                             remapping if isHttpUrl(remapping)
                             else (metadataFilePrefix +remapping.replace("/", os.sep)))
                            for prefix, remapping in parsedPackage["remappings"].items()),
                       }
            filesource.close()
            return package
        except EnvironmentError:
            pass
        if filesource:
            filesource.close()
    return None
コード例 #9
0
ファイル: PackageManager.py プロジェクト: joyanta/Arelle
def packageInfo(URL, reload=False):
    #TODO several directories, eg User Application Data
    packageFilename = _cntlr.webCache.getfilename(URL, reload=reload, normalize=True)
    if packageFilename:
        from arelle.FileSource import TAXONOMY_PACKAGE_FILE_NAMES
        filesource = None
        try:
            global openFileSource
            if openFileSource is None:
                from arelle.FileSource import openFileSource
            filesource = openFileSource(packageFilename, _cntlr)
            if filesource.isZip:
                metadataFiles = filesource.taxonomyPackageMetadataFiles
                ''' allow multiple
                if len(metadataFiles) != 1:
                    raise IOError(_("Taxonomy package contained more than one metadata file: {0}.")
                                  .format(', '.join(metadataFiles)))
                '''
                metadataFile = metadataFiles[0]
                metadata = filesource.file(filesource.url + os.sep + metadataFile)[0]
                metadataFilePrefix = os.sep.join(os.path.split(metadataFile)[:-1])
                if metadataFilePrefix:
                    metadataFilePrefix += os.sep
                metadataFilePrefix = filesource.baseurl + os.sep +  metadataFilePrefix
            elif os.path.basename(filesource.url) in TAXONOMY_PACKAGE_FILE_NAMES: # individual manifest file
                metadataFile = metadata = filesource.url
                metadataFilePrefix = os.sep.join(os.path.split(metadataFile)[:-1])
                if metadataFilePrefix:
                    metadataFilePrefix += os.sep
            else:
                raise IOError(_("File must be a taxonomy package (zip file), catalog file, or manifest (): {0}.")
                              .format(packageFilename, ', '.join(TAXONOMY_PACKAGE_FILE_NAMES)))
            parsedPackage = parsePackage(_cntlr, metadata)
            package = {'name': parsedPackage['name'],
                       'status': 'enabled',
                       'version': parsedPackage['version'],
                       'fileDate': time.strftime('%Y-%m-%dT%H:%M:%S UTC', time.gmtime(os.path.getmtime(packageFilename))),
                       'URL': URL,
                       'description': parsedPackage['description'],
                       'remappings': dict(
                            (prefix, 
                             remapping if isHttpUrl(remapping)
                             else (metadataFilePrefix +remapping.replace("/", os.sep)))
                            for prefix, remapping in parsedPackage["remappings"].items()),
                       }
            filesource.close()
            return package
        except EnvironmentError:
            pass
        if filesource:
            filesource.close()
    return None
コード例 #10
0
ファイル: CntlrWinMain.py プロジェクト: 8maki/Arelle
 def webOpen(self, *ignore):
     if not self.okayToContinue():
         return
     url = DialogURL.askURL(self.parent)
     if url:
         self.updateFileHistory(url, False)
         filesource = openFileSource(url,self)
         if filesource.isArchive and not filesource.selection: # or filesource.isRss:
             from arelle import DialogOpenArchive
             url = DialogOpenArchive.askArchiveFile(self, filesource)
         self.updateFileHistory(url, False)
         thread = threading.Thread(target=lambda: self.backgroundLoadXbrl(filesource,False))
         thread.daemon = True
         thread.start()
コード例 #11
0
def _create_config(val):
    """
    Creates the configs needed for dqc_us_0001

    :param val: ValidateXbrl needed in order to save the cache
    :type val: :class: '~arelle.ValidateXbrl.ValidateXbrl'
    :return: no explicit return but creates and saves configs in
        dqc_us_rule\resources\DQC_US_0001
    :rtype: None
    """
    val.ugtNamespace = None
    cntlr = val.modelXbrl.modelManager.cntlr
    year = _EARLIEST_US_GAAP_YEAR
    config = _load_config(_DQC_01_AXIS_FILE)
    # Create a list of axes in the base config file

    for ugt in _UGT_DOCS:
        # create taxonomy specific name
        config_json_file = os.path.join(os.path.dirname(__file__), 'resources',
                                        'DQC_US_0001',
                                        'dqc_0001_{}.json'.format(str(year)))
        # copy the base config file
        working_json_file = config
        ugtEntryXsd = ugt["entryXsd"]
        prior_vds = val.modelXbrl.modelManager.validateDisclosureSystem
        val.modelXbrl.modelManager.validateDisclosureSystem = False
        dimLoadingInstance = ModelXbrl.load(val.modelXbrl.modelManager,
                                            openFileSource(ugtEntryXsd, cntlr),
                                            ("built us-gaap member cache"))
        val.modelXbrl.modelManager.validateDisclosureSystem = prior_vds

        for axis, info in config.items():
            info['defined_members'] = defaultdict(set)
            axisConcept = dimLoadingInstance.nameConcepts.get(axis,
                                                              (None, ))[0]
            if axisConcept is not None:
                working_json_file[axis]['defined_members'] = sorted(
                    _tr_mem(val, ugt, dimLoadingInstance, axisConcept,
                            XbrlConst.dimensionDomain, None, set()))
        json_str = str(
            json.dumps(OrderedDict(sorted(working_json_file.items())),
                       ensure_ascii=False,
                       indent=4))
        saveFile(cntlr, config_json_file, json_str)
        dimLoadingInstance.close()
        del dimLoadingInstance
        year += 1
コード例 #12
0
def runOpenInlineDocumentSetMenuCommand(cntlr,
                                        filenames,
                                        runInBackground=False,
                                        saveTargetFiling=False):
    if os.sep == "\\":
        filenames = [f.replace("/", "\\") for f in filenames]

    if not filenames:
        filename = ""
    elif len(filenames) == 1 and any(filenames[0].endswith(s)
                                     for s in archiveFilenameSuffixes):
        # get archive file names
        from arelle.FileSource import openFileSource
        filesource = openFileSource(filenames[0], cntlr)
        if filesource.isArchive:
            from arelle import DialogOpenArchive
            archiveEntries = DialogOpenArchive.askArchiveFile(cntlr,
                                                              filesource,
                                                              multiselect=True)
            if archiveEntries:
                ixdsFirstFile = archiveEntries[0]
                _archiveFilenameParts = archiveFilenameParts(ixdsFirstFile)
                if _archiveFilenameParts is not None:
                    ixdsDir = _archiveFilenameParts[
                        0]  # it's a zip or package, use zip file name as head of ixds
                else:
                    ixdsDir = os.path.dirname(ixdsFirstFile)
                docsetSurrogatePath = os.path.join(ixdsDir, IXDS_SURROGATE)
                filename = docsetSurrogatePath + IXDS_DOC_SEPARATOR.join(
                    archiveEntries)
            else:
                filename = None
        filesource.close()
    elif len(filenames) >= MINIMUM_IXDS_DOC_COUNT:
        ixdsFirstFile = filenames[0]
        _archiveFilenameParts = archiveFilenameParts(ixdsFirstFile)
        if _archiveFilenameParts is not None:
            ixdsDir = _archiveFilenameParts[
                0]  # it's a zip or package, use zip file name as head of ixds
        else:
            ixdsDir = os.path.dirname(ixdsFirstFile)
        docsetSurrogatePath = os.path.join(ixdsDir, IXDS_SURROGATE)
        filename = docsetSurrogatePath + IXDS_DOC_SEPARATOR.join(filenames)
    else:
        filename = filenames[0]
    if filename is not None:
        cntlr.fileOpenFile(filename)
コード例 #13
0
def unpackEIS(cntlr, eisFile, unpackToDir):
    from arelle.FileSource import openFileSource
    filesource = openFileSource(eisFile, cntlr, checkIfXmlIsEis=True)
    if not filesource.isArchive:
        return
    import os, io
    
    unpackedFiles = []
    
    for file in filesource.dir:
        fIn, encoding = filesource.file(os.path.join(eisFile,file))
        with open(os.path.join(unpackToDir, file), "w", encoding=encoding) as fOut:
            fOut.write(fIn.read())
            unpackedFiles.append(file)
        fIn.close()
                
    cntlr.addToLog("[info:unpackEIS] Unpacked files " + ', '.join(unpackedFiles))
コード例 #14
0
ファイル: NewSolvencyReport.py プロジェクト: acsone/Arelle
def getReportNameFromEntryPoint(cntlr, entryPoint):
    packageInfo = getCurrentEnabledTaxonomyPackageInfo()
    if packageInfo is not None:
        URL = packageInfo.get("URL")
        filesource = openFileSource(URL, cntlr=cntlr) 
        filenames = filesource.dir
        if filenames is not None:   # an IO or other error can return None
            metadataFiles = filesource.taxonomyPackageMetadataFiles
            metadataFile = metadataFiles[0]
            metadata = filesource.url + os.sep + metadataFile
            taxonomyPackage = parsePackage(cntlr, filesource, metadata,
                                                os.sep.join(os.path.split(metadata)[:-1]) + os.sep)
            nameToUrls = taxonomyPackage["nameToUrls"]
            for reportName, reportInfo in nameToUrls.items():
                if reportInfo[1] == entryPoint:
                    return reportName
    return None                 
コード例 #15
0
def unpackEIS(cntlr, eisFile, unpackToDir):
    from arelle.FileSource import openFileSource
    filesource = openFileSource(eisFile, cntlr, checkIfXmlIsEis=True)
    if not filesource.isArchive:
        cntlr.addToLog("[info:unpackEIS] Not recognized as an EIS file: " + eisFile)
        return
    import os, io
    
    unpackedFiles = []
    
    for file in filesource.dir:
        fIn, encoding = filesource.file(os.path.join(eisFile,file))
        with open(os.path.join(unpackToDir, file), "w", encoding=encoding) as fOut:
            fOut.write(fIn.read())
            unpackedFiles.append(file)
        fIn.close()
                
    cntlr.addToLog("[info:unpackEIS] Unpacked files " + ', '.join(unpackedFiles))
コード例 #16
0
def getReportNameFromEntryPoint(cntlr, entryPoint):
    packageInfo = getCurrentEnabledTaxonomyPackageInfo()
    if packageInfo is not None:
        URL = packageInfo.get("URL")
        filesource = openFileSource(URL, cntlr=cntlr)
        filenames = filesource.dir
        if filenames is not None:  # an IO or other error can return None
            metadataFiles = filesource.taxonomyPackageMetadataFiles
            metadataFile = metadataFiles[0]
            metadata = filesource.url + os.sep + metadataFile
            taxonomyPackage = parsePackage(
                cntlr, filesource, metadata,
                os.sep.join(os.path.split(metadata)[:-1]) + os.sep)
            nameToUrls = taxonomyPackage["nameToUrls"]
            for reportName, reportInfo in nameToUrls.items():
                if reportInfo[1] == entryPoint:
                    return reportName
    return None
コード例 #17
0
ファイル: CntlrWinMain.py プロジェクト: 8maki/Arelle
 def fileOpenFile(self, filename, importToDTS=False):
     if filename:
         filesource = None
         # check for archive files
         filesource = openFileSource(filename,self)
         if filesource.isArchive and not filesource.selection: # or filesource.isRss:
             from arelle import DialogOpenArchive
             filename = DialogOpenArchive.askArchiveFile(self, filesource)
             
     if filename:
         if importToDTS:
             self.config["importOpenDir"] = os.path.dirname(filename)
         else:
             if not filename.startswith("http://"):
                 self.config["fileOpenDir"] = os.path.dirname(filename)
         self.updateFileHistory(filename, importToDTS)
         thread = threading.Thread(target=lambda: self.backgroundLoadXbrl(filesource,importToDTS))
         thread.daemon = True
         thread.start()
コード例 #18
0
ファイル: Util.py プロジェクト: Arelle/Arelle
def deprecatedConceptDatesFile(modelManager, abbrNs, latestTaxonomyDoc):
    if latestTaxonomyDoc is None:
        return None
    if not abbrNs: # none for an unexpected namespace pattern
        return None
    cntlr = modelManager.cntlr
    _fileName = resourcesFilePath(modelManager, abbrNs.partition("/")[0] + "-deprecated-concepts.json")
    _deprecatedLabelRole = latestTaxonomyDoc["deprecatedLabelRole"]
    _deprecatedDateMatchPattern = latestTaxonomyDoc["deprecationDatePattern"]
    if os.path.exists(_fileName):
        return _fileName
    # load labels and store file name
    modelManager.addToLog(_("loading {} deprecated concepts into {}").format(abbrNs, _fileName), messageCode="info")
    deprecatedConceptDates = {}
    # load without SEC/EFM validation (doc file would not be acceptable)
    priorValidateDisclosureSystem = modelManager.validateDisclosureSystem
    modelManager.validateDisclosureSystem = False
    from arelle import ModelXbrl
    deprecationsInstance = ModelXbrl.load(modelManager, 
          # "http://xbrl.fasb.org/us-gaap/2012/elts/us-gaap-doc-2012-01-31.xml",
          # load from zip (especially after caching) is incredibly faster
          openFileSource(latestTaxonomyDoc["deprecatedLabels"], cntlr), 
          _("built deprecations table in cache"))
    modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
    if deprecationsInstance is None:
        modelManager.addToLog(
            _("%(name)s documentation not loaded"),
            messageCode="arelle:notLoaded", messageArgs={"modelXbrl": val, "name":_abbrNs})
    else:   
        # load deprecations
        for labelRel in deprecationsInstance.relationshipSet(XbrlConst.conceptLabel).modelRelationships:
            modelLabel = labelRel.toModelObject
            conceptName = labelRel.fromModelObject.name
            if modelLabel.role == _deprecatedLabelRole:
                match = _deprecatedDateMatchPattern.match(modelLabel.text)
                if match is not None:
                    date = match.group(1)
                    if date:
                        deprecatedConceptDates[conceptName] = date
        jsonStr = _STR_UNICODE(json.dumps(deprecatedConceptDates, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
        saveFile(cntlr, _fileName, jsonStr)  # 2.7 gets unicode this way
        deprecationsInstance.close()
        del deprecationsInstance # dereference closed modelXbrl
コード例 #19
0
ファイル: Validate.py プロジェクト: leedstyh/Arelle
 def validateRssFeed(self):
     self.modelXbrl.info("info", "RSS Feed", modelDocument=self.modelXbrl)
     from arelle.FileSource import openFileSource
     reloadCache = getattr(self.modelXbrl, "reloadCache", False)
     for rssItem in self.modelXbrl.modelDocument.rssItems:
         if getattr(rssItem, "skipRssItem", False):
             self.modelXbrl.info("info", _("skipping RSS Item %(accessionNumber)s %(formType)s %(companyName)s %(period)s"),
                 modelObject=rssItem, accessionNumber=rssItem.accessionNumber, formType=rssItem.formType, companyName=rssItem.companyName, period=rssItem.period)
             continue
         self.modelXbrl.info("info", _("RSS Item %(accessionNumber)s %(formType)s %(companyName)s %(period)s"),
             modelObject=rssItem, accessionNumber=rssItem.accessionNumber, formType=rssItem.formType, companyName=rssItem.companyName, period=rssItem.period)
         modelXbrl = None
         try:
             modelXbrl = ModelXbrl.load(self.modelXbrl.modelManager, 
                                        openFileSource(rssItem.zippedUrl, self.modelXbrl.modelManager.cntlr, reloadCache=reloadCache),
                                        _("validating"), rssItem=rssItem)
             for pluginXbrlMethod in pluginClassMethods("RssItem.Xbrl.Loaded"):  
                 pluginXbrlMethod(modelXbrl, {}, rssItem)      
             if getattr(rssItem, "doNotProcessRSSitem", False) or modelXbrl.modelDocument is None:
                 modelXbrl.close()
                 continue # skip entry based on processing criteria
             self.instValidator.validate(modelXbrl, self.modelXbrl.modelManager.formulaOptions.typedParameters())
             self.instValidator.close()
             rssItem.setResults(modelXbrl)
             self.modelXbrl.modelManager.viewModelObject(self.modelXbrl, rssItem.objectId())
             for pluginXbrlMethod in pluginClassMethods("Validate.RssItem"):
                 pluginXbrlMethod(self, modelXbrl, rssItem)
             modelXbrl.close()
         except Exception as err:
             self.modelXbrl.error("exception:" + type(err).__name__,
                 _("RSS item validation exception: %(error)s, instance: %(instance)s"),
                 modelXbrl=(self.modelXbrl, modelXbrl),
                 instance=rssItem.zippedUrl, error=err,
                 exc_info=True)
             try:
                 self.instValidator.close()
                 if modelXbrl is not None:
                     modelXbrl.close()
             except Exception as err:
                 pass
         del modelXbrl  # completely dereference
コード例 #20
0
ファイル: Validate.py プロジェクト: Arelle/Arelle
 def validateRssFeed(self):
     self.modelXbrl.info("info", "RSS Feed", modelDocument=self.modelXbrl)
     from arelle.FileSource import openFileSource
     reloadCache = getattr(self.modelXbrl, "reloadCache", False)
     for rssItem in self.modelXbrl.modelDocument.rssItems:
         if getattr(rssItem, "skipRssItem", False):
             self.modelXbrl.info("info", _("skipping RSS Item %(accessionNumber)s %(formType)s %(companyName)s %(period)s"),
                 modelObject=rssItem, accessionNumber=rssItem.accessionNumber, formType=rssItem.formType, companyName=rssItem.companyName, period=rssItem.period)
             continue
         self.modelXbrl.info("info", _("RSS Item %(accessionNumber)s %(formType)s %(companyName)s %(period)s"),
             modelObject=rssItem, accessionNumber=rssItem.accessionNumber, formType=rssItem.formType, companyName=rssItem.companyName, period=rssItem.period)
         modelXbrl = None
         try:
             modelXbrl = ModelXbrl.load(self.modelXbrl.modelManager, 
                                        openFileSource(rssItem.zippedUrl, self.modelXbrl.modelManager.cntlr, reloadCache=reloadCache),
                                        _("validating"), rssItem=rssItem)
             for pluginXbrlMethod in pluginClassMethods("RssItem.Xbrl.Loaded"):  
                 pluginXbrlMethod(modelXbrl, {}, rssItem)      
             if getattr(rssItem, "doNotProcessRSSitem", False) or modelXbrl.modelDocument is None:
                 modelXbrl.close()
                 continue # skip entry based on processing criteria
             self.instValidator.validate(modelXbrl, self.modelXbrl.modelManager.formulaOptions.typedParameters(self.modelXbrl.prefixedNamespaces))
             self.instValidator.close()
             rssItem.setResults(modelXbrl)
             self.modelXbrl.modelManager.viewModelObject(self.modelXbrl, rssItem.objectId())
             for pluginXbrlMethod in pluginClassMethods("Validate.RssItem"):
                 pluginXbrlMethod(self, modelXbrl, rssItem)
             modelXbrl.close()
         except Exception as err:
             self.modelXbrl.error("exception:" + type(err).__name__,
                 _("RSS item validation exception: %(error)s, instance: %(instance)s"),
                 modelXbrl=(self.modelXbrl, modelXbrl),
                 instance=rssItem.zippedUrl, error=err,
                 exc_info=True)
             try:
                 self.instValidator.close()
                 if modelXbrl is not None:
                     modelXbrl.close()
             except Exception as err:
                 pass
         del modelXbrl  # completely dereference
コード例 #21
0
def buildUgtFullRelsFiles(modelXbrl, dqcRules):
    from arelle import ModelXbrl
    modelManager = modelXbrl.modelManager
    cntlr = modelXbrl.modelManager.cntlr
    conceptRule = (
        "http://fasb.org/dqcrules/arcrole/concept-rule",  # FASB arcrule
        "http://fasb.org/dqcrules/arcrole/rule-concept")
    rule0015 = "http://fasb.org/us-gaap/role/dqc/0015"
    # load without SEC/EFM validation (doc file would not be acceptable)
    priorValidateDisclosureSystem = modelManager.validateDisclosureSystem
    modelManager.validateDisclosureSystem = False
    for ugtAbbr, (ugtEntireUrl, dqcrtUrl) in latestEntireUgt.items():
        modelManager.addToLog(_("loading {} Entire UGT {}").format(
            ugtAbbr, ugtEntireUrl),
                              messageCode="info")
        ugtRels = {}
        ugtRels["calcs"] = ugtCalcs = {}
        ugtRels["axes"] = ugtAxes = defaultdict(set)
        ugtInstance = ModelXbrl.load(
            modelManager,
            # "http://xbrl.fasb.org/us-gaap/2012/elts/us-gaap-doc-2012-01-31.xml",
            # load from zip (especially after caching) is incredibly faster
            openFileSource(ugtEntireUrl, cntlr),
            _("built dqcrt table in cache"))
        if ugtInstance is None:
            modelManager.addToLog(_("%(name)s documentation not loaded"),
                                  messageCode="arelle:notLoaded",
                                  messageArgs={
                                      "modelXbrl": val,
                                      "name": ugtAbbr
                                  })
        else:
            # load signwarnings from DQC 0015
            calcRelSet = ugtInstance.relationshipSet(XbrlConst.summationItem)
            for rel in calcRelSet.modelRelationships:
                _fromQn = rel.fromModelObject.qname
                _toQn = rel.toModelObject.qname
                ugtCalcs.setdefault(rel.weight, {}).setdefault(
                    _fromQn.prefix,
                    {}).setdefault(_fromQn.localName,
                                   {}).setdefault(_toQn.prefix,
                                                  set()).add(_toQn.localName)
            for w in ugtCalcs.values():
                for fNs in w.values():
                    for fLn in fNs.values():
                        for tNs in fLn.keys():
                            fLn[tNs] = sorted(
                                fLn[tNs])  # change set to array for json
            dimDomRelSet = ugtInstance.relationshipSet(
                XbrlConst.dimensionDomain)
            axesOfInterest = set()
            for rule in dqcRules["DQC.US.0001"]["rules"].values():
                axesOfInterest.add(rule["axis"])
                for ruleAxesEntry in ("additional-axes", "unallowed-axes"):
                    for additionalAxis in rule.get(ruleAxesEntry, ()):
                        axesOfInterest.add(additionalAxis)
            for rel in dimDomRelSet.modelRelationships:
                axisConcept = rel.fromModelObject
                if axisConcept.name in axesOfInterest:
                    addDomMems(rel, ugtAxes[axisConcept.name], True)
            for axis in tuple(ugtAxes.keys()):
                ugtAxes[axis] = sorted(
                    ugtAxes[axis])  # change set to array for json
            ugtInstance.close()
            del ugtInstance  # dereference closed modelXbrl

            if dqcrtUrl:  # none for 2019
                modelManager.addToLog(_("loading {} DQC Rules {}").format(
                    ugtAbbr, dqcrtUrl),
                                      messageCode="info")
                dqcrtInstance = ModelXbrl.load(
                    modelManager,
                    # "http://xbrl.fasb.org/us-gaap/2012/elts/us-gaap-doc-2012-01-31.xml",
                    # load from zip (especially after caching) is incredibly faster
                    openFileSource(dqcrtUrl, cntlr),
                    _("built dqcrt table in cache"))
                if dqcrtInstance is None:
                    modelManager.addToLog(
                        _("%(name)s documentation not loaded"),
                        messageCode="arelle:notLoaded",
                        messageArgs={
                            "modelXbrl": val,
                            "name": ugtAbbr
                        })
                else:
                    ugtRels["DQC.US.0015"] = dqc0015 = defaultdict(dict)
                    # load DQC 0015
                    dqcRelSet = dqcrtInstance.relationshipSet(
                        (
                            "http://fasb.org/dqcrules/arcrole/concept-rule",  # FASB arcrule
                            "http://fasb.org/dqcrules/arcrole/rule-concept"),
                        "http://fasb.org/us-gaap/role/dqc/0015")
                    for dqc0015obj, headEltName in (
                        ("conceptNames", "Dqc_0015_ListOfElements"),
                        ("excludedMemberNames",
                         "Dqc_0015_ExcludeNonNegMembersAbstract"),
                        ("excludedAxesMembers",
                         "Dqc_0015_ExcludeNonNegAxisAbstract"),
                        ("excludedAxesMembers",
                         "Dqc_0015_ExcludeNonNegAxisMembersAbstract"),
                        ("excludedMemberStrings",
                         "Dqc_0015_ExcludeNonNegMemberStringsAbstract")):
                        headElts = dqcrtInstance.nameConcepts.get(
                            headEltName, ())
                        for headElt in headElts:
                            if dqc0015obj == "excludedMemberStrings":
                                for refRel in dqcrtInstance.relationshipSet(
                                        XbrlConst.conceptReference
                                ).fromModelObject(headElt):
                                    for refPart in refRel.toModelObject.iterchildren(
                                            "{*}allowableSubString"):
                                        for subStr in refPart.text.split():
                                            dqc0015[dqc0015obj].setdefault(
                                                "*", []).append(
                                                    subStr
                                                )  # applies to any namespace
                            else:
                                for ruleRel in dqcRelSet.fromModelObject(
                                        headElt):
                                    elt = ruleRel.toModelObject
                                    if dqc0015obj in ("conceptNames",
                                                      "excludedMemberNames"):
                                        dqc0015[dqc0015obj].setdefault(
                                            elt.qname.prefix,
                                            []).append(elt.name)
                                    else:
                                        l = dqc0015[dqc0015obj].setdefault(
                                            elt.qname.prefix,
                                            {}).setdefault(elt.name, {})
                                        if headEltName == "Dqc_0015_ExcludeNonNegAxisAbstract":
                                            l["*"] = None
                                        else:
                                            for memRel in dqcRelSet.fromModelObject(
                                                    elt):
                                                l.setdefault(
                                                    memRel.toModelObject.qname.
                                                    prefix, []).append(
                                                        memRel.toModelObject.
                                                        name)
                    dqc0015["conceptRuleIDs"] = conceptRuleIDs = {}
                    for rel in dqcrtInstance.relationshipSet(
                            XbrlConst.conceptReference).modelRelationships:
                        if rel.toModelObject.role == "http://fasb.org/us-gaap/role/dqc/ruleID":
                            conceptRuleIDs.setdefault(
                                elt.qname.prefix,
                                {})[rel.fromModelObject.name] = int(
                                    rel.toModelObject.stringValue.rpartition(
                                        ".")[2])

                dqcrtInstance.close()
                del dqcrtInstance  # dereference closed modelXbrl

                def sortDqcLists(obj):
                    if isinstance(obj, list):
                        obj.sort()
                    elif isinstance(obj, dict):
                        for objVal in obj.values():
                            sortDqcLists(objVal)

                sortDqcLists(dqc0015)
            jsonStr = _STR_UNICODE(
                json.dumps(ugtRels, ensure_ascii=False,
                           indent=2))  # might not be unicode in 2.7
            _ugtRelsFileName = resourcesFilePath(
                modelManager,
                "us-gaap-rels-{}.json".format(ugtAbbr.rpartition("/")[2]))
            saveFile(cntlr, _ugtRelsFileName,
                     jsonStr)  # 2.7 gets unicode this way

    modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
コード例 #22
0
def packageInfo(cntlr, URL, reload=False, packageManifestName=None, errors=[]):
    #TODO several directories, eg User Application Data
    packageFilename = _cntlr.webCache.getfilename(URL,
                                                  reload=reload,
                                                  normalize=True)
    if packageFilename:
        from arelle.FileSource import TAXONOMY_PACKAGE_FILE_NAMES
        filesource = None
        try:
            global openFileSource
            if openFileSource is None:
                from arelle.FileSource import openFileSource
            filesource = openFileSource(packageFilename, _cntlr)
            # allow multiple manifests [[metadata, prefix]...] for multiple catalogs
            packages = []
            packageFiles = []
            if filesource.isZip:
                validateTaxonomyPackage(cntlr, filesource, packageFiles,
                                        errors)
                if not packageFiles:
                    # look for pre-PWD packages
                    _dir = filesource.dir
                    _metaInf = '{}/META-INF/'.format(
                        os.path.splitext(os.path.basename(packageFilename))[0])
                    if packageManifestName:
                        # pre-pwd
                        packageFiles = [
                            fileName for fileName in _dir
                            if fnmatch(fileName, packageManifestName)
                        ]
                    elif _metaInf + 'taxonomyPackage.xml' in _dir:
                        # PWD taxonomy packages
                        packageFiles = [_metaInf + 'taxonomyPackage.xml']
                    elif 'META-INF/taxonomyPackage.xml' in _dir:
                        # root-level META-INF taxonomy packages
                        packageFiles = ['META-INF/taxonomyPackage.xml']
                if len(packageFiles) < 1:
                    raise IOError(
                        _("Taxonomy package contained no metadata file: {0}.").
                        format(', '.join(packageFiles)))
                # if current package files found, remove any nonconforming package files
                if any(pf.startswith('_metaInf')
                       for pf in packageFiles) and any(
                           not pf.startswith(_metaInf) for pf in packageFiles):
                    packageFiles = [
                        pf for pf in packageFiles if pf.startswith(_metaInf)
                    ]
                elif any(pf.startswith('META-INF/')
                         for pf in packageFiles) and any(
                             not pf.startswith('META-INF/')
                             for pf in packageFiles):
                    packageFiles = [
                        pf for pf in packageFiles if pf.startswith('META-INF/')
                    ]

                for packageFile in packageFiles:
                    packageFileUrl = filesource.url + os.sep + packageFile
                    packageFilePrefix = os.sep.join(
                        os.path.split(packageFile)[:-1])
                    if packageFilePrefix:
                        packageFilePrefix += os.sep
                    packageFilePrefix = filesource.baseurl + os.sep + packageFilePrefix
                    packages.append(
                        [packageFileUrl, packageFilePrefix, packageFile])
            else:
                cntlr.addToLog(_("Taxonomy package is not a zip file."),
                               messageCode="tpe:invalidArchiveFormat",
                               file=os.path.basename(packageFilename),
                               level=logging.ERROR)
                errors.append("tpe:invalidArchiveFormat")
                if (os.path.basename(
                        filesource.url) in TAXONOMY_PACKAGE_FILE_NAMES
                        or  # individual manifest file
                    (os.path.basename(filesource.url) == "taxonomyPackage.xml"
                     and os.path.basename(os.path.dirname(
                         filesource.url)) == "META-INF")):
                    packageFile = packageFileUrl = filesource.url
                    packageFilePrefix = os.path.dirname(packageFile)
                    if packageFilePrefix:
                        packageFilePrefix += os.sep
                    packages.append([packageFileUrl, packageFilePrefix, ""])
                else:
                    raise IOError(
                        _("File must be a taxonomy package (zip file), catalog file, or manifest (): {0}."
                          ).format(packageFilename,
                                   ', '.join(TAXONOMY_PACKAGE_FILE_NAMES)))
            remappings = {}
            packageNames = []
            descriptions = []
            for packageFileUrl, packageFilePrefix, packageFile in packages:
                parsedPackage = parsePackage(_cntlr, filesource,
                                             packageFileUrl, packageFilePrefix,
                                             errors)
                if parsedPackage:
                    packageNames.append(parsedPackage['name'])
                    if parsedPackage.get('description'):
                        descriptions.append(parsedPackage['description'])
                    for prefix, remapping in parsedPackage["remappings"].items(
                    ):
                        if prefix not in remappings:
                            remappings[prefix] = remapping
                        else:
                            cntlr.addToLog(
                                "Package mapping duplicate rewrite start string %(rewriteStartString)s",
                                messageArgs={"rewriteStartString": prefix},
                                messageCode="arelle.packageDuplicateMapping",
                                file=os.path.basename(URL),
                                level=logging.ERROR)
                            errors.append("arelle.packageDuplicateMapping")
            if not parsedPackage:
                return None
            package = {
                'name':
                ", ".join(packageNames),
                'status':
                'enabled',
                'version':
                parsedPackage.get('version'),
                'license':
                parsedPackage.get('license'),
                'fileDate':
                time.strftime('%Y-%m-%dT%H:%M:%S UTC',
                              time.gmtime(os.path.getmtime(packageFilename))),
                'URL':
                URL,
                'entryPoints':
                parsedPackage.get('entryPoints', {}),
                'manifestName':
                packageManifestName,
                'description':
                "; ".join(descriptions),
                'publisher':
                parsedPackage.get('publisher'),
                'publisherURL':
                parsedPackage.get('publisherURL'),
                'publisherCountry':
                parsedPackage.get('publisherCountry'),
                'publicationDate':
                parsedPackage.get('publicationDate'),
                'supersededTaxonomyPackages':
                parsedPackage.get('supersededTaxonomyPackages'),
                'versioningReports':
                parsedPackage.get('versioningReports'),
                'remappings':
                remappings,
            }
            filesource.close()
            return package
        except (EnvironmentError, etree.XMLSyntaxError):
            pass
        if filesource:
            filesource.close()
    return None
コード例 #23
0
def _create_cache(val):
    """
    Creates the caches needed for dqc_us_0018

    :param val: ValidateXbrl needed in order to save the cache
    :type val: :class: '~arelle.ValidateXbrl.ValidateXbrl'
    :return: no explicit return but creates and saves a cache in
        dqc_us_rule\resources\DQC_US_0018
    :rtype: None
    """
    val.ugtNamespace = None
    cntlr = val.modelXbrl.modelManager.cntlr
    year = _EARLIEST_US_GAAP_YEAR

    for ugt in ugtDocs:
        deprecations_json_file = os.path.join(
            os.path.dirname(__file__),
            'resources',
            'DQC_US_0018',
            '{}_deprecated-concepts.json'.format(str(year))
        )

        if not os.path.isfile(deprecations_json_file):
            ugt_doc_lb = ugt["docLB"]
            val.usgaapDeprecations = {}
            disclosure_system = (
                val.modelXbrl.modelManager.validateDisclosureSystem
            )

            prior_validate_disclosure_system = disclosure_system
            val.modelXbrl.modelManager.validateDisclosureSystem = False
            deprecations_instance = ModelXbrl.load(
                val.modelXbrl.modelManager,
                openFileSource(ugt_doc_lb, cntlr),
                _("built deprecations table in cache")  # noqa
            )
            val.modelXbrl.modelManager.validateDisclosureSystem = (
                prior_validate_disclosure_system
            )

            if deprecations_instance is not None:
                dep_label = 'http://www.xbrl.org/2009/role/deprecatedLabel'
                dep_date_label = (
                    'http://www.xbrl.org/2009/role/deprecatedDateLabel'
                )
                concept_label = XbrlConst.conceptLabel
                relationship_set = (
                    deprecations_instance.relationshipSet(concept_label)
                )
                model_relationships = relationship_set.modelRelationships

                for labelRel in model_relationships:
                    model_documentation = labelRel.toModelObject
                    concept = labelRel.fromModelObject.name

                    if model_documentation.role == dep_label:
                        val.usgaapDeprecations[concept] = (
                            model_documentation.text,
                            val.usgaapDeprecations.get(concept, ('', ''))[0]
                        )
                    elif model_documentation.role == dep_date_label:
                        val.usgaapDeprecations[concept] = (
                            model_documentation.text,
                            val.usgaapDeprecations.get(concept, ('', ''))[1]
                        )
                json_str = str(
                    json.dumps(
                        val.usgaapDeprecations,
                        ensure_ascii=False, indent=0
                    )
                )  # might not be unicode in 2.7
                saveFile(cntlr, deprecations_json_file, json_str)
                deprecations_instance.close()
                del deprecations_instance  # dereference closed modelXbrl
        year += 1
コード例 #24
0
ファイル: PackageManager.py プロジェクト: PabTorre/Arelle
def packageInfo(cntlr, URL, reload=False, packageManifestName=None):
    #TODO several directories, eg User Application Data
    packageFilename = _cntlr.webCache.getfilename(URL, reload=reload, normalize=True)
    if packageFilename:
        from arelle.FileSource import TAXONOMY_PACKAGE_FILE_NAMES
        filesource = None
        try:
            global openFileSource
            if openFileSource is None:
                from arelle.FileSource import openFileSource
            filesource = openFileSource(packageFilename, _cntlr)
            # allow multiple manifests [[metadata, prefix]...] for multiple catalogs
            packages = []
            if filesource.isZip:
                _dir = filesource.dir
                _metaInf = '{}/META-INF/'.format(
                            os.path.splitext(os.path.basename(packageFilename))[0])
                if packageManifestName:
                    packageFiles = [fileName
                                    for fileName in filesource.dir
                                    if fnmatch(fileName, packageManifestName)]
                elif _metaInf + 'taxonomyPackage.xml' in _dir:
                    # PWD taxonomy packages
                    packageFiles = [_metaInf + 'taxonomyPackage.xml']
                else:
                    # early generation taxonomy packages
                    packageFiles = filesource.taxonomyPackageMetadataFiles
                if len(packageFiles) < 1:
                    raise IOError(_("Taxonomy package contained no metadata file: {0}.")
                                  .format(', '.join(packageFiles)))
                # if current package files found, remove any nonconforming package files
                if any(pf.startswith(_metaInf) for pf in packageFiles) and any(not pf.startswith(_metaInf) for pf in packageFiles):
                    packageFiles = [pf for pf in packageFiles if pf.startswith(_metaInf)]
                    
                for packageFile in packageFiles:
                    packageFileUrl = filesource.url + os.sep + packageFile
                    packageFilePrefix = os.sep.join(os.path.split(packageFile)[:-1])
                    if packageFilePrefix:
                        packageFilePrefix += os.sep
                    packageFilePrefix = filesource.baseurl + os.sep +  packageFilePrefix
                    packages.append([packageFileUrl, packageFilePrefix, packageFile])
            elif (os.path.basename(filesource.url) in TAXONOMY_PACKAGE_FILE_NAMES or # individual manifest file
                  (os.path.basename(filesource.url) == "taxonomyPackage.xml" and 
                   os.path.basename(os.path.dirname(filesource.url)) == "META-INF")):
                packageFile = packageFileUrl = filesource.url
                packageFilePrefix = os.path.dirname(packageFile)
                if packageFilePrefix:
                    packageFilePrefix += os.sep
                packages.append([packageFileUrl, packageFilePrefix, ""])
            else:
                raise IOError(_("File must be a taxonomy package (zip file), catalog file, or manifest (): {0}.")
                              .format(packageFilename, ', '.join(TAXONOMY_PACKAGE_FILE_NAMES)))
            remappings = {}
            packageNames = []
            descriptions = []
            for packageFileUrl, packageFilePrefix, packageFile in packages:
                parsedPackage = parsePackage(_cntlr, filesource, packageFileUrl, packageFilePrefix)
                packageNames.append(parsedPackage['name'])
                if parsedPackage.get('description'):
                    descriptions.append(parsedPackage['description'])
                for prefix, remapping in parsedPackage["remappings"].items():
                    if prefix not in remappings:
                        remappings[prefix] = remapping
                    else:
                        cntlr.addToLog("Package mapping duplicate rewrite start string %(rewriteStartString)s",
                                       messageArgs={"rewriteStartString": prefix},
                                       messageCode="arelle.packageDuplicateMapping",
                                       file=os.path.basename(URL),
                                       level=logging.ERROR)
            package = {'name': ", ".join(packageNames),
                       'status': 'enabled',
                       'version': parsedPackage['version'],
                       'fileDate': time.strftime('%Y-%m-%dT%H:%M:%S UTC', time.gmtime(os.path.getmtime(packageFilename))),
                       'URL': URL,
                       'manifestName': packageManifestName,
                       'description': "; ".join(descriptions),
                       'remappings': remappings,
                       }
            filesource.close()
            return package
        except EnvironmentError:
            pass
        if filesource:
            filesource.close()
    return None
コード例 #25
0
def setup(val):
    val.linroleDefinitionIsDisclosure = re.compile(r"-\s+Disclosure\s+-\s",
                                                   re.IGNORECASE)
    val.linkroleDefinitionStatementSheet = re.compile(r"[^-]+-\s+Statement\s+-\s+.*", # no restriction to type of statement
                                                      re.IGNORECASE)
    val.ugtNamespace = None
    cntlr = val.modelXbrl.modelManager.cntlr
    # load deprecated concepts for filed year of us-gaap
    for ugt in ugtDocs:
        ugtNamespace = ugt["namespace"]
        if ugtNamespace in val.modelXbrl.namespaceDocs and len(val.modelXbrl.namespaceDocs[ugtNamespace]) > 0:
            val.ugtNamespace = ugtNamespace
            usgaapDoc = val.modelXbrl.namespaceDocs[ugtNamespace][0]
            deprecationsJsonFile = usgaapDoc.filepathdir + os.sep + "deprecated-concepts.json"
            file = None
            try:
                file = openFileStream(cntlr, deprecationsJsonFile, 'rt', encoding='utf-8')
                val.usgaapDeprecations = json.load(file)
                file.close()
            except Exception:
                if file:
                    file.close()
                val.modelXbrl.modelManager.addToLog(_("loading us-gaap {0} deprecated concepts into cache").format(ugt["year"]))
                startedAt = time.time()
                ugtDocLB = ugt["docLB"]
                val.usgaapDeprecations = {}
                # load without SEC/EFM validation (doc file would not be acceptable)
                priorValidateDisclosureSystem = val.modelXbrl.modelManager.validateDisclosureSystem
                val.modelXbrl.modelManager.validateDisclosureSystem = False
                deprecationsInstance = ModelXbrl.load(val.modelXbrl.modelManager, 
                      # "http://xbrl.fasb.org/us-gaap/2012/elts/us-gaap-doc-2012-01-31.xml",
                      # load from zip (especially after caching) is incredibly faster
                      openFileSource(ugtDocLB, cntlr), 
                      _("built deprecations table in cache"))
                val.modelXbrl.modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
                if deprecationsInstance is None:
                    val.modelXbrl.error("arelle:notLoaded",
                        _("US-GAAP documentation not loaded: %(file)s"),
                        modelXbrl=val, file=os.path.basename(ugtDocLB))
                else:   
                    # load deprecations
                    for labelRel in deprecationsInstance.relationshipSet(XbrlConst.conceptLabel).modelRelationships:
                        modelDocumentation = labelRel.toModelObject
                        conceptName = labelRel.fromModelObject.name
                        if modelDocumentation.role == 'http://www.xbrl.org/2009/role/deprecatedLabel':
                            val.usgaapDeprecations[conceptName] = (val.usgaapDeprecations.get(conceptName, ('',''))[0], modelDocumentation.text)
                        elif modelDocumentation.role == 'http://www.xbrl.org/2009/role/deprecatedDateLabel':
                            val.usgaapDeprecations[conceptName] = (modelDocumentation.text, val.usgaapDeprecations.get(conceptName, ('',''))[1])
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapDeprecations, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, deprecationsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    deprecationsInstance.close()
                    del deprecationsInstance # dereference closed modelXbrl
                val.modelXbrl.profileStat(_("build us-gaap deprecated concepts cache"), time.time() - startedAt)
            ugtCalcsJsonFile = usgaapDoc.filepathdir + os.sep + "ugt-calculations.json"
            ugtDefaultDimensionsJsonFile = usgaapDoc.filepathdir + os.sep + "ugt-default-dimensions.json"
            file = None
            try:
                file = openFileStream(cntlr, ugtCalcsJsonFile, 'rt', encoding='utf-8')
                val.usgaapCalculations = json.load(file)
                file.close()
                file = openFileStream(cntlr, ugtDefaultDimensionsJsonFile, 'rt', encoding='utf-8')
                val.usgaapDefaultDimensions = json.load(file)
                file.close()
            except Exception:
                if file:
                    file.close()
                val.modelXbrl.modelManager.addToLog(_("loading us-gaap {0} calculations and default dimensions into cache").format(ugt["year"]))
                startedAt = time.time()
                ugtEntryXsd = ugt["entryXsd"]
                val.usgaapCalculations = {}
                val.usgaapDefaultDimensions = {}
                # load without SEC/EFM validation (doc file would not be acceptable)
                priorValidateDisclosureSystem = val.modelXbrl.modelManager.validateDisclosureSystem
                val.modelXbrl.modelManager.validateDisclosureSystem = False
                calculationsInstance = ModelXbrl.load(val.modelXbrl.modelManager, 
                      # "http://xbrl.fasb.org/us-gaap/2012/entire/us-gaap-entryPoint-std-2012-01-31.xsd",
                      # load from zip (especially after caching) is incredibly faster
                      openFileSource(ugtEntryXsd, cntlr), 
                      _("built us-gaap calculations cache"))
                val.modelXbrl.modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
                if calculationsInstance is None:
                    val.modelXbrl.error("arelle:notLoaded",
                        _("US-GAAP calculations not loaded: %(file)s"),
                        modelXbrl=val, file=os.path.basename(ugtEntryXsd))
                else:   
                    # load calculations
                    for ELR in calculationsInstance.relationshipSet(XbrlConst.summationItem).linkRoleUris:
                        elrRelSet = calculationsInstance.relationshipSet(XbrlConst.summationItem, ELR)
                        definition = ""
                        for roleType in calculationsInstance.roleTypes.get(ELR,()):
                            definition = roleType.definition
                            break
                        isStatementSheet = bool(val.linkroleDefinitionStatementSheet.match(definition))
                        elrUgtCalcs = {"#roots": [c.name for c in elrRelSet.rootConcepts],
                                       "#definition": definition,
                                       "#isStatementSheet": isStatementSheet}
                        for relFrom, rels in elrRelSet.fromModelObjects().items():
                            elrUgtCalcs[relFrom.name] = [rel.toModelObject.name for rel in rels]
                        val.usgaapCalculations[ELR] = elrUgtCalcs
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapCalculations, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, ugtCalcsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    # load default dimensions
                    for defaultDimRel in calculationsInstance.relationshipSet(XbrlConst.dimensionDefault).modelRelationships:
                        if defaultDimRel.fromModelObject is not None and defaultDimRel.toModelObject is not None:
                            val.usgaapDefaultDimensions[defaultDimRel.fromModelObject.name] = defaultDimRel.toModelObject.name
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapDefaultDimensions, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, ugtDefaultDimensionsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    calculationsInstance.close()
                    del calculationsInstance # dereference closed modelXbrl
                val.modelXbrl.profileStat(_("build us-gaap calculations and default dimensions cache"), time.time() - startedAt)
            break
    val.deprecatedFactConcepts = defaultdict(list)
    val.deprecatedDimensions = defaultdict(list)
    val.deprecatedMembers = defaultdict(list)
コード例 #26
0
def _create_cache(val):
    """
    Creates the caches needed for dqc_us_0018

    :param val: ValidateXbrl needed in order to save the cache
    :type val: :class: '~arelle.ValidateXbrl.ValidateXbrl'
    :return: no explicit return but creates and saves a cache in
        dqc_us_rule\resources\DQC_US_0018
    :rtype: None
    """
    val.ugtNamespace = None
    cntlr = val.modelXbrl.modelManager.cntlr
    year = _EARLIEST_US_GAAP_YEAR

    for ugt in ugtDocs:
        deprecations_json_file = os.path.join(
            os.path.dirname(__file__),
            'resources',
            'DQC_US_0018',
            '{}_deprecated-concepts.json'.format(str(year))
        )

        if not os.path.isfile(deprecations_json_file):
            ugt_doc_lb = ugt["docLB"]
            val.usgaapDeprecations = {}
            disclosure_system = (
                val.modelXbrl.modelManager.validateDisclosureSystem
            )

            prior_validate_disclosure_system = disclosure_system
            val.modelXbrl.modelManager.validateDisclosureSystem = False
            deprecations_instance = ModelXbrl.load(
                val.modelXbrl.modelManager,
                openFileSource(ugt_doc_lb, cntlr),
                _("built deprecations table in cache")  # noqa
            )
            val.modelXbrl.modelManager.validateDisclosureSystem = (
                prior_validate_disclosure_system
            )

            if deprecations_instance is not None:
                dep_label = 'http://www.xbrl.org/2009/role/deprecatedLabel'
                dep_date_label = (
                    'http://www.xbrl.org/2009/role/deprecatedDateLabel'
                )
                concept_label = XbrlConst.conceptLabel
                relationship_set = (
                    deprecations_instance.relationshipSet(concept_label)
                )
                model_relationships = relationship_set.modelRelationships

                for labelRel in model_relationships:
                    model_documentation = labelRel.toModelObject
                    concept = labelRel.fromModelObject.name

                    if model_documentation.role == dep_label:
                        val.usgaapDeprecations[concept] = (
                            model_documentation.text,
                            val.usgaapDeprecations.get(concept, ('', ''))[0]
                        )
                    elif model_documentation.role == dep_date_label:
                        val.usgaapDeprecations[concept] = (
                            model_documentation.text,
                            val.usgaapDeprecations.get(concept, ('', ''))[1]
                        )
                json_str = str(
                    json.dumps(
                        val.usgaapDeprecations,
                        ensure_ascii=False, indent=0
                    )
                )  # might not be unicode in 2.7
                saveFile(cntlr, deprecations_json_file, json_str)
                deprecations_instance.close()
                del deprecations_instance  # dereference closed modelXbrl
        year += 1
コード例 #27
0
def _make_cache(val, ugt, cntlr, ugt_default_dimensions_json_file):
    """
    Creates a new caches for the Taxonomy default dimensions

    :param val: ValidateXbrl to be validated
    :type val: :class: '~arelle.ValidateXbrl.ValidateXbrl'
    :param ugt: Taxonomy to check
    :type ugt: str
    :param ugt_default_dimensions_json_file: location to save json default
        dimensions
    :type ugt_default_dimensions_json_file: str
    :return: no explicit return, but saves caches for dqc_us_0041
    :rtype: None
    """
    started_at = time.time()
    ugt_entry_xsd = ugt["entryXsd"]
    val.usgaapDefaultDimensions = {}
    prior_validate_disclosure_system = (
        val.modelXbrl.modelManager.validateDisclosureSystem
    )
    val.modelXbrl.modelManager.validateDisclosureSystem = False
    ugt_entry_xsd_instance = (
        ModelXbrl.load(
            val.modelXbrl.modelManager,
            openFileSource(ugt_entry_xsd, cntlr),
            _("opened us-gaap entry xsd")  # noqa
        )
    )
    val.modelXbrl.modelManager.validateDisclosureSystem = (
        prior_validate_disclosure_system
    )

    if ugt_entry_xsd_instance is None:
        val.modelXbrl.error(
            "arelle:notLoaded",
            _("US-GAAP entry xsd not loaded: %(file)s"),  # noqa
            modelXbrl=val,
            file=os.path.basename(ugt_entry_xsd)
        )

    else:
        model_relationships = (
            ugt_entry_xsd_instance.relationshipSet(
                XbrlConst.dimensionDefault
            ).modelRelationships
        )
        for default_dim_rel in model_relationships:
            if _default_dim_rel_is_instance(default_dim_rel):
                from_name = default_dim_rel.fromModelObject.name
                to_name = default_dim_rel.toModelObject.name
                val.usgaapDefaultDimensions[from_name] = to_name
        json_str = str(
            json.dumps(
                val.usgaapDefaultDimensions,
                ensure_ascii=False,
                indent=0
            )
        )  # might not be unicode in 2.7
        # 2.7 gets unicode this way
        saveFile(cntlr, ugt_default_dimensions_json_file, json_str)
        ugt_entry_xsd_instance.close()
        del ugt_entry_xsd_instance  # dereference closed modelXbrl
    val.modelXbrl.profileStat(
        _("build default dimensions cache"),  # noqa
        time.time() - started_at
    )
コード例 #28
0
 def watchCycle(self):
     while not self.stopRequested:
         rssWatchOptions = self.rssModelXbrl.modelManager.rssWatchOptions
         
         # check rss expiration
         rssHeaders = self.cntlr.webCache.getheaders(self.rssModelXbrl.modelManager.rssWatchOptions.get("feedSourceUri"))
         expires = parseRfcDatetime(rssHeaders.get("expires"))
         reloadNow = True # texpires and expires > datetime.datetime.now()
         
         # reload rss feed
         self.rssModelXbrl.reload('checking RSS items', reloadCache=reloadNow)
         if self.stopRequested: break
         # setup validator
         postLoadActions = []
         if rssWatchOptions.get("validateDisclosureSystemRules"):
             self.instValidator = ValidateFiling.ValidateFiling(self.rssModelXbrl)
             postLoadActions.append(_("validating"))
         elif rssWatchOptions.get("validateXbrlRules") or rssWatchOptions.get("validateFormulaAssertions"):
             self.instValidator = ValidateXbrl.ValidateXbrl(self.rssModelXbrl)
             postLoadActions.append(_("validating"))
             if (rssWatchOptions.get("validateFormulaAssertions")):
                 postLoadActions.append(_("running formulas"))
         else:
             self.instValidator = None
            
         matchTextExpr = rssWatchOptions.get("matchTextExpr") 
         if matchTextExpr:
             matchPattern = re.compile(matchTextExpr)
             postLoadActions.append(_("matching text"))
         else:
             matchPattern= None
         postLoadAction = ', '.join(postLoadActions)
         
         # anything to check new filings for
         if (rssWatchOptions.get("validateDisclosureSystemRules") or
             rssWatchOptions.get("validateXbrlRules") or
             rssWatchOptions.get("validateCalcLinkbase") or
             rssWatchOptions.get("validateFormulaAssertions") or
             rssWatchOptions.get("alertMatchedFactText") or
             any(pluginXbrlMethod(rssWatchOptions)
                 for pluginXbrlMethod in pluginClassMethods("RssWatch.HasWatchAction"))
             ):
             # form keys in ascending order of pubdate
             pubDateRssItems = []
             for rssItem in self.rssModelXbrl.modelDocument.rssItems:
                 pubDateRssItems.append((rssItem.pubDate,rssItem.objectId()))
             
             for pubDate, rssItemObjectId in sorted(pubDateRssItems):
                 rssItem = self.rssModelXbrl.modelObject(rssItemObjectId)
                 # update ui thread via modelManager (running in background here)
                 self.rssModelXbrl.modelManager.viewModelObject(self.rssModelXbrl, rssItem.objectId())
                 if self.stopRequested:
                     break
                 latestPubDate = XmlUtil.datetimeValue(rssWatchOptions.get("latestPubDate"))
                 if (latestPubDate and 
                     rssItem.pubDate < latestPubDate):
                     continue
                 try:
                     # try zipped URL if possible, else expanded instance document
                     modelXbrl = ModelXbrl.load(self.rssModelXbrl.modelManager, 
                                                openFileSource(rssItem.zippedUrl, self.cntlr),
                                                postLoadAction)
                     if self.stopRequested:
                         modelXbrl.close()
                         break
                     
                     emailAlert = False
                     if modelXbrl.modelDocument is None:
                         modelXbrl.error("arelle.rssWatch",
                                         _("RSS item %(company)s %(form)s document not loaded: %(date)s"),
                                         modelXbrl=modelXbrl, company=rssItem.companyName, 
                                         form=rssItem.formType, date=rssItem.filingDate)
                         rssItem.status = "not loadable"
                     else:
                         # validate schema, linkbase, or instance
                         if self.stopRequested:
                             modelXbrl.close()
                             break
                         if self.instValidator:
                             self.instValidator.validate(modelXbrl)
                             if modelXbrl.errors and rssWatchOptions.get("alertValiditionError"):
                                 emailAlert = True
                         for pluginXbrlMethod in pluginClassMethods("RssWatch.DoWatchAction"):  
                             pluginXbrlMethod(modelXbrl, rssWatchOptions, rssItem)      
                         # check match expression
                         if matchPattern:
                             for fact in modelXbrl.factsInInstance:
                                 v = fact.value
                                 if v is not None:
                                     m = matchPattern.search(v)
                                     if m:
                                         fr, to = m.span()
                                         msg = _("Fact Variable {0}\n context {1}\n matched text: {2}").format( 
                                                 fact.qname, fact.contextID, v[max(0,fr-20):to+20])
                                         modelXbrl.info("arelle.rssInfo",
                                                        msg,
                                                        modelXbrl=modelXbrl) # msg as code passes it through to the status
                                         if rssWatchOptions.get("alertMatchedFactText"):
                                             emailAlert = True
                                     
                         if (rssWatchOptions.get("formulaFileUri") and rssWatchOptions.get("validateFormulaAssertions") and
                             self.instValidator): 
                             # attach formulas
                             ModelDocument.load(modelXbrl, rssWatchOptions["formulaFileUri"])
                             ValidateFormula.validate(self.instValidator)
                             
                     rssItem.setResults(modelXbrl)
                     modelXbrl.close()
                     del modelXbrl  # completely dereference
                     self.rssModelXbrl.modelManager.viewModelObject(self.rssModelXbrl, rssItem.objectId())
                     if rssItem.assertionUnsuccessful and rssWatchOptions.get("alertAssertionUnsuccessful"):
                         emailAlert = True
                     
                     msg = _("Filing CIK {0}\n "
                              "company {1}\n "
                              "published {2}\n "
                              "form type {3}\n "
                              "filing date {4}\n "
                              "period {5}\n "
                              "year end {6}\n "
                              "results: {7}").format(
                              rssItem.cikNumber,
                              rssItem.companyName,
                              rssItem.pubDate,
                              rssItem.formType,
                              rssItem.filingDate,
                              rssItem.period,
                              rssItem.fiscalYearEnd,
                              rssItem.status)
                     self.rssModelXbrl.info("arelle:rssWatch", msg, modelXbrl=self.rssModelXbrl)
                     emailAddress = rssWatchOptions.get("emailAddress")
                     if emailAlert and emailAddress:
                         self.rssModelXbrl.modelManager.showStatus(_("sending e-mail alert"))
                         import smtplib
                         from email.mime.text import MIMEText
                         emailMsg = MIMEText(msg)
                         emailMsg["Subject"] = _("Arelle RSS Watch alert on {0}").format(rssItem.companyName)
                         emailMsg["From"] = emailAddress
                         emailMsg["To"] = emailAddress
                         smtp = smtplib.SMTP()
                         smtp.sendmail(emailAddress, [emailAddress], emailMsg.as_string())
                         smtp.quit()
                     self.rssModelXbrl.modelManager.showStatus(_("RSS item {0}, {1} completed, status {2}").format(rssItem.companyName, rssItem.formType, rssItem.status), 3500)
                     self.rssModelXbrl.modelManager.cntlr.rssWatchUpdateOption(rssItem.pubDate.strftime('%Y-%m-%dT%H:%M:%S'))
                 except Exception as err:
                     self.rssModelXbrl.error("arelle.rssError",
                                             _("RSS item %(company)s, %(form)s, %(date)s, exception: %(error)s"),
                                             modelXbrl=self.rssModelXbrl, company=rssItem.companyName, 
                                             form=rssItem.formType, date=rssItem.filingDate, error=err,
                                             exc_info=True)
                 if self.stopRequested: break
         if self.stopRequested: 
             self.cntlr.showStatus(_("RSS watch, stop requested"), 10000)
         else:
             import time
             time.sleep(600)
         
     self.thread = None  # close thread
     self.stopRequested = False
     
             
コード例 #29
0
ファイル: PackageManager.py プロジェクト: asteria277/Arelle
def packageInfo(cntlr, URL, reload=False, packageManifestName=None, errors=[]):
    #TODO several directories, eg User Application Data
    packageFilename = _cntlr.webCache.getfilename(URL, reload=reload, normalize=True)
    if packageFilename:
        from arelle.FileSource import TAXONOMY_PACKAGE_FILE_NAMES
        filesource = None
        try:
            global openFileSource
            if openFileSource is None:
                from arelle.FileSource import openFileSource
            filesource = openFileSource(packageFilename, _cntlr)
            # allow multiple manifests [[metadata, prefix]...] for multiple catalogs
            packages = []
            packageFiles = []
            if filesource.isZip:
                _dir = filesource.dir
                # single top level directory
                topLevelDirectories = set(f.partition('/')[0] for f in _dir)
                if len(topLevelDirectories) != 1:
                    cntlr.addToLog(_("Taxonomy package contains %(count)s top level directories:  %(topLevelDirectories)s"),
                                   messageArgs={"count": len(topLevelDirectories),
                                                "topLevelDirectories": ', '.join(sorted(topLevelDirectories))},
                                   messageCode="tpe:invalidDirectoryStructure",
                                   file=os.path.basename(packageFilename),
                                   level=logging.ERROR)
                    errors.append("tpe:invalidDirectoryStructure")
                elif not any('/META-INF/' in f for f in _dir):
                    cntlr.addToLog(_("Taxonomy package does not contain a subdirectory META-INF"),
                                   messageCode="tpe:metadataDirectoryNotFound",
                                   file=os.path.basename(packageFilename),
                                   level=logging.ERROR)
                    errors.append("tpe:metadataDirectoryNotFound")
                elif any(f.endswith('/META-INF/taxonomyPackage.xml') for f in _dir):
                    packageFiles = [f for f in _dir if f.endswith('/META-INF/taxonomyPackage.xml')]
                else:
                    cntlr.addToLog(_("Taxonomy package does not contain a metadata file */META-INF/taxonomyPackage.xml"),
                                   messageCode="tpe:metadataFileNotFound",
                                   file=os.path.basename(packageFilename),
                                   level=logging.ERROR)
                    errors.append("tpe:metadataFileNotFound")
                if not packageFiles:
                    # look for pre-PWD packages
                    _metaInf = '{}/META-INF/'.format(
                                os.path.splitext(os.path.basename(packageFilename))[0])
                    if packageManifestName:
                        # pre-pwd
                        packageFiles = [fileName
                                        for fileName in _dir
                                        if fnmatch(fileName, packageManifestName)]
                    elif _metaInf + 'taxonomyPackage.xml' in _dir:
                        # PWD taxonomy packages
                        packageFiles = [_metaInf + 'taxonomyPackage.xml']
                    elif 'META-INF/taxonomyPackage.xml' in _dir:
                        # root-level META-INF taxonomy packages
                        packageFiles = ['META-INF/taxonomyPackage.xml']
                    else:
                        # early generation taxonomy packages
                        packageFiles = filesource.taxonomyPackageMetadataFiles
                if len(packageFiles) < 1:
                    raise IOError(_("Taxonomy package contained no metadata file: {0}.")
                                  .format(', '.join(packageFiles)))
                # if current package files found, remove any nonconforming package files
                if any(pf.startswith('_metaInf') for pf in packageFiles) and any(not pf.startswith(_metaInf) for pf in packageFiles):
                    packageFiles = [pf for pf in packageFiles if pf.startswith(_metaInf)]
                elif any(pf.startswith('META-INF/') for pf in packageFiles) and any(not pf.startswith('META-INF/') for pf in packageFiles):
                    packageFiles = [pf for pf in packageFiles if pf.startswith('META-INF/')]
                    
                for packageFile in packageFiles:
                    packageFileUrl = filesource.url + os.sep + packageFile
                    packageFilePrefix = os.sep.join(os.path.split(packageFile)[:-1])
                    if packageFilePrefix:
                        packageFilePrefix += os.sep
                    packageFilePrefix = filesource.baseurl + os.sep +  packageFilePrefix
                    packages.append([packageFileUrl, packageFilePrefix, packageFile])
            else:
                cntlr.addToLog(_("Taxonomy package is not a zip file."),
                               messageCode="tpe:invalidArchiveFormat",
                               file=os.path.basename(packageFilename),
                               level=logging.ERROR)
                errors.append("tpe:invalidArchiveFormat")
                if (os.path.basename(filesource.url) in TAXONOMY_PACKAGE_FILE_NAMES or # individual manifest file
                      (os.path.basename(filesource.url) == "taxonomyPackage.xml" and 
                       os.path.basename(os.path.dirname(filesource.url)) == "META-INF")):
                    packageFile = packageFileUrl = filesource.url
                    packageFilePrefix = os.path.dirname(packageFile)
                    if packageFilePrefix:
                        packageFilePrefix += os.sep
                    packages.append([packageFileUrl, packageFilePrefix, ""])
                else:
                    raise IOError(_("File must be a taxonomy package (zip file), catalog file, or manifest (): {0}.")
                                  .format(packageFilename, ', '.join(TAXONOMY_PACKAGE_FILE_NAMES)))
            remappings = {}
            packageNames = []
            descriptions = []
            for packageFileUrl, packageFilePrefix, packageFile in packages:
                parsedPackage = parsePackage(_cntlr, filesource, packageFileUrl, packageFilePrefix, errors)
                packageNames.append(parsedPackage['name'])
                if parsedPackage.get('description'):
                    descriptions.append(parsedPackage['description'])
                for prefix, remapping in parsedPackage["remappings"].items():
                    if prefix not in remappings:
                        remappings[prefix] = remapping
                    else:
                        cntlr.addToLog("Package mapping duplicate rewrite start string %(rewriteStartString)s",
                                       messageArgs={"rewriteStartString": prefix},
                                       messageCode="arelle.packageDuplicateMapping",
                                       file=os.path.basename(URL),
                                       level=logging.ERROR)
                        errors.append("arelle.packageDuplicateMapping")
            if not parsedPackage:
                return None
            package = {'name': ", ".join(packageNames),
                       'status': 'enabled',
                       'version': parsedPackage.get('version'),
                       'license': parsedPackage.get('license'),
                       'fileDate': time.strftime('%Y-%m-%dT%H:%M:%S UTC', time.gmtime(os.path.getmtime(packageFilename))),
                       'URL': URL,
                       'manifestName': packageManifestName,
                       'description': "; ".join(descriptions),
                       'publisher': parsedPackage.get('publisher'), 
                       'publisherURL': parsedPackage.get('publisherURL'),
                       'publisherCountry': parsedPackage.get('publisherCountry'), 
                       'publicationDate': parsedPackage.get('publicationDate'),
                       'supersededTaxonomyPackages': parsedPackage.get('supersededTaxonomyPackages'), 
                       'versioningReports': parsedPackage.get('versioningReports'),
                       'remappings': remappings,
                       }
            filesource.close()
            return package
        except (EnvironmentError, etree.XMLSyntaxError):
            pass
        if filesource:
            filesource.close()
    return None
コード例 #30
0
ファイル: PackageManager.py プロジェクト: 1zaak/Arelle
def packageInfo(URL, reload=False, packageManifestName=None):
    #TODO several directories, eg User Application Data
    packageFilename = _cntlr.webCache.getfilename(URL,
                                                  reload=reload,
                                                  normalize=True)
    if packageFilename:
        from arelle.FileSource import TAXONOMY_PACKAGE_FILE_NAMES
        filesource = None
        try:
            global openFileSource
            if openFileSource is None:
                from arelle.FileSource import openFileSource
            filesource = openFileSource(packageFilename, _cntlr)
            # allow multiple manifests [[metadata, prefix]...] for multiple catalogs
            packages = []
            if filesource.isZip:
                if packageManifestName:
                    packageFiles = [
                        fileName for fileName in filesource.dir
                        if fnmatch(fileName, packageManifestName)
                    ]
                else:
                    packageFiles = filesource.taxonomyPackageMetadataFiles
                if len(packageFiles) < 1:
                    raise IOError(
                        _("Taxonomy package contained no metadata file: {0}.").
                        format(', '.join(packageFiles)))
                for packageFile in packageFiles:
                    packageFileUrl = filesource.file(filesource.url + os.sep +
                                                     packageFile)[0]
                    packageFilePrefix = os.sep.join(
                        os.path.split(packageFile)[:-1])
                    if packageFilePrefix:
                        packageFilePrefix += os.sep
                    packageFilePrefix = filesource.baseurl + os.sep + packageFilePrefix
                    packages.append([packageFileUrl, packageFilePrefix])
            elif os.path.basename(
                    filesource.url
            ) in TAXONOMY_PACKAGE_FILE_NAMES:  # individual manifest file
                packageFile = packageFileUrl = filesource.url
                packageFilePrefix = os.sep.join(
                    os.path.split(packageFile)[:-1])
                if packageFilePrefix:
                    packageFilePrefix += os.sep
                packages.append([packageFileUrl, packageFilePrefix])
            else:
                raise IOError(
                    _("File must be a taxonomy package (zip file), catalog file, or manifest (): {0}."
                      ).format(packageFilename,
                               ', '.join(TAXONOMY_PACKAGE_FILE_NAMES)))
            remappings = {}
            packageNames = []
            descriptions = []
            for packageFileUrl, packageFilePrefix in packages:
                parsedPackage = parsePackage(_cntlr, packageFileUrl)
                packageNames.append(parsedPackage['name'])
                if parsedPackage.get('description'):
                    descriptions.append(parsedPackage['description'])
                for prefix, remapping in parsedPackage["remappings"].items():
                    remappings[prefix] = (remapping
                                          if isHttpUrl(remapping) else
                                          (packageFilePrefix +
                                           remapping.replace("/", os.sep)))
            package = {
                'name':
                ", ".join(packageNames),
                'status':
                'enabled',
                'version':
                parsedPackage['version'],
                'fileDate':
                time.strftime('%Y-%m-%dT%H:%M:%S UTC',
                              time.gmtime(os.path.getmtime(packageFilename))),
                'URL':
                URL,
                'manifestName':
                packageManifestName,
                'description':
                "; ".join(descriptions),
                'remappings':
                remappings,
            }
            filesource.close()
            return package
        except EnvironmentError:
            pass
        if filesource:
            filesource.close()
    return None
コード例 #31
0
def setup(val):
    if not val.validateLoggingSemantic:  # all checks herein are SEMANTIC
        return

    val.linroleDefinitionIsDisclosure = re.compile(r"-\s+Disclosure\s+-\s",
                                                   re.IGNORECASE)
    val.linkroleDefinitionStatementSheet = re.compile(r"[^-]+-\s+Statement\s+-\s+.*", # no restriction to type of statement
                                                      re.IGNORECASE)
    val.ugtNamespace = None
    cntlr = val.modelXbrl.modelManager.cntlr
    # load deprecated concepts for filed year of us-gaap
    for ugt in ugtDocs:
        ugtNamespace = ugt["namespace"]
        if ugtNamespace in val.modelXbrl.namespaceDocs and len(val.modelXbrl.namespaceDocs[ugtNamespace]) > 0:
            val.ugtNamespace = ugtNamespace
            usgaapDoc = val.modelXbrl.namespaceDocs[ugtNamespace][0]
            deprecationsJsonFile = usgaapDoc.filepathdir + os.sep + "deprecated-concepts.json"
            file = None
            try:
                file = openFileStream(cntlr, deprecationsJsonFile, 'rt', encoding='utf-8')
                val.usgaapDeprecations = json.load(file)
                file.close()
            except Exception:
                if file:
                    file.close()
                val.modelXbrl.modelManager.addToLog(_("loading us-gaap {0} deprecated concepts into cache").format(ugt["year"]))
                startedAt = time.time()
                ugtDocLB = ugt["docLB"]
                val.usgaapDeprecations = {}
                # load without SEC/EFM validation (doc file would not be acceptable)
                priorValidateDisclosureSystem = val.modelXbrl.modelManager.validateDisclosureSystem
                val.modelXbrl.modelManager.validateDisclosureSystem = False
                deprecationsInstance = ModelXbrl.load(val.modelXbrl.modelManager, 
                      # "http://xbrl.fasb.org/us-gaap/2012/elts/us-gaap-doc-2012-01-31.xml",
                      # load from zip (especially after caching) is incredibly faster
                      openFileSource(ugtDocLB, cntlr), 
                      _("built deprecations table in cache"))
                val.modelXbrl.modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
                if deprecationsInstance is None:
                    val.modelXbrl.error("arelle:notLoaded",
                        _("US-GAAP documentation not loaded: %(file)s"),
                        modelXbrl=val, file=os.path.basename(ugtDocLB))
                else:   
                    # load deprecations
                    for labelRel in deprecationsInstance.relationshipSet(XbrlConst.conceptLabel).modelRelationships:
                        modelDocumentation = labelRel.toModelObject
                        conceptName = labelRel.fromModelObject.name
                        if modelDocumentation.role == 'http://www.xbrl.org/2009/role/deprecatedLabel':
                            val.usgaapDeprecations[conceptName] = (val.usgaapDeprecations.get(conceptName, ('',''))[0], modelDocumentation.text)
                        elif modelDocumentation.role == 'http://www.xbrl.org/2009/role/deprecatedDateLabel':
                            val.usgaapDeprecations[conceptName] = (modelDocumentation.text, val.usgaapDeprecations.get(conceptName, ('',''))[1])
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapDeprecations, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, deprecationsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    deprecationsInstance.close()
                    del deprecationsInstance # dereference closed modelXbrl
                val.modelXbrl.profileStat(_("build us-gaap deprecated concepts cache"), time.time() - startedAt)
            ugtCalcsJsonFile = usgaapDoc.filepathdir + os.sep + "ugt-calculations.json"
            ugtDefaultDimensionsJsonFile = usgaapDoc.filepathdir + os.sep + "ugt-default-dimensions.json"
            file = None
            try:
                file = openFileStream(cntlr, ugtCalcsJsonFile, 'rt', encoding='utf-8')
                val.usgaapCalculations = json.load(file)
                file.close()
                file = openFileStream(cntlr, ugtDefaultDimensionsJsonFile, 'rt', encoding='utf-8')
                val.usgaapDefaultDimensions = json.load(file)
                file.close()
            except Exception:
                if file:
                    file.close()
                val.modelXbrl.modelManager.addToLog(_("loading us-gaap {0} calculations and default dimensions into cache").format(ugt["year"]))
                startedAt = time.time()
                ugtEntryXsd = ugt["entryXsd"]
                val.usgaapCalculations = {}
                val.usgaapDefaultDimensions = {}
                # load without SEC/EFM validation (doc file would not be acceptable)
                priorValidateDisclosureSystem = val.modelXbrl.modelManager.validateDisclosureSystem
                val.modelXbrl.modelManager.validateDisclosureSystem = False
                calculationsInstance = ModelXbrl.load(val.modelXbrl.modelManager, 
                      # "http://xbrl.fasb.org/us-gaap/2012/entire/us-gaap-entryPoint-std-2012-01-31.xsd",
                      # load from zip (especially after caching) is incredibly faster
                      openFileSource(ugtEntryXsd, cntlr), 
                      _("built us-gaap calculations cache"))
                val.modelXbrl.modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
                if calculationsInstance is None:
                    val.modelXbrl.error("arelle:notLoaded",
                        _("US-GAAP calculations not loaded: %(file)s"),
                        modelXbrl=val, file=os.path.basename(ugtEntryXsd))
                else:   
                    # load calculations
                    for ELR in calculationsInstance.relationshipSet(XbrlConst.summationItem).linkRoleUris:
                        elrRelSet = calculationsInstance.relationshipSet(XbrlConst.summationItem, ELR)
                        definition = ""
                        for roleType in calculationsInstance.roleTypes.get(ELR,()):
                            definition = roleType.definition
                            break
                        isStatementSheet = bool(val.linkroleDefinitionStatementSheet.match(definition))
                        elrUgtCalcs = {"#roots": [c.name for c in elrRelSet.rootConcepts],
                                       "#definition": definition,
                                       "#isStatementSheet": isStatementSheet}
                        for relFrom, rels in elrRelSet.fromModelObjects().items():
                            elrUgtCalcs[relFrom.name] = [rel.toModelObject.name for rel in rels]
                        val.usgaapCalculations[ELR] = elrUgtCalcs
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapCalculations, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, ugtCalcsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    # load default dimensions
                    for defaultDimRel in calculationsInstance.relationshipSet(XbrlConst.dimensionDefault).modelRelationships:
                        if defaultDimRel.fromModelObject is not None and defaultDimRel.toModelObject is not None:
                            val.usgaapDefaultDimensions[defaultDimRel.fromModelObject.name] = defaultDimRel.toModelObject.name
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapDefaultDimensions, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, ugtDefaultDimensionsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    calculationsInstance.close()
                    del calculationsInstance # dereference closed modelXbrl
                val.modelXbrl.profileStat(_("build us-gaap calculations and default dimensions cache"), time.time() - startedAt)
            break
    val.deprecatedFactConcepts = defaultdict(list)
    val.deprecatedDimensions = defaultdict(list)
    val.deprecatedMembers = defaultdict(list)
コード例 #32
0
def loadDqc0015signwarningRules(modelXbrl):
    conceptRule = "http://fasb.org/dqcrules/arcrole/concept-rule" # FASB arcrule
    rule0015 = "http://fasb.org/us-gaap/role/dqc/0015"
    modelManager = modelXbrl.modelManager
    cntlr = modelXbrl.modelManager.cntlr
    # check for cached completed signwarnings
    _signwarningsFileName = resourcesFilePath(modelManager, "signwarnings.json")
    if os.path.exists(_signwarningsFileName): 
        _file = openFileStream(modelManager.cntlr, _signwarningsFileName, 'rt', encoding='utf-8')
        signwarnings = json.load(_file) # {localName: date, ...}
        _file.close()
        return signwarnings
    # load template rules
    _fileName = resourcesFilePath(modelManager, "signwarnings-template.json")
    if _fileName:
        _file = openFileStream(modelXbrl.modelManager.cntlr, _fileName, 'rt', encoding='utf-8')
        signwarnings = json.load(_file, object_pairs_hook=OrderedDict) # {localName: date, ...}
        _file.close()

    # load rules and add to signwarnings template
    for dqcAbbr, dqcrtUrl in latestDqcrtDocs.items():
        modelManager.addToLog(_("loading {} DQC Rules {}").format(dqcAbbr, dqcrtUrl), messageCode="info")
        # load without SEC/EFM validation (doc file would not be acceptable)
        priorValidateDisclosureSystem = modelManager.validateDisclosureSystem
        modelManager.validateDisclosureSystem = False
        from arelle import ModelXbrl
        dqcrtInstance = ModelXbrl.load(modelManager, 
              # "http://xbrl.fasb.org/us-gaap/2012/elts/us-gaap-doc-2012-01-31.xml",
              # load from zip (especially after caching) is incredibly faster
              openFileSource(dqcrtUrl, cntlr), 
              _("built dqcrt table in cache"))
        modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
        if dqcrtInstance is None:
            modelManager.addToLog(
                _("%(name)s documentation not loaded"),
                messageCode="arelle:notLoaded", messageArgs={"modelXbrl": val, "name":dqcAbbr})
        else:   
            # load signwarnings from DQC 0015
            dqcRelSet = dqcrtInstance.relationshipSet(conceptRule, rule0015)
            for signWrnObj, headEltName in (("conceptNames", "Dqc_0015_ListOfElements"),
                                            ("excludedMemberNames", "Dqc_0015_ExcludeNonNegMembersAbstract"),
                                            ("excludedAxesMembers", "Dqc_0015_ExcludeNonNegAxisAbstract"),
                                            ("excludedAxesMembers", "Dqc_0015_ExcludeNonNegAxisMembersAbstract"),
                                            ("excludedMemberStrings", "Dqc_0015_ExcludeNonNegMemberStringsAbstract")):
                headElts = dqcrtInstance.nameConcepts.get(headEltName,())
                for headElt in headElts:
                    if signWrnObj == "excludedMemberStrings":
                        for refRel in dqcrtInstance.relationshipSet(XbrlConst.conceptReference).fromModelObject(headElt):
                            for refPart in refRel.toModelObject.iterchildren("{*}allowableSubString"):
                                for subStr in refPart.text.split():
                                    signwarnings[signWrnObj].setdefault(nsAbbr, []).append(subStr)
                    else:
                        for ruleRel in dqcRelSet.fromModelObject(headElt):
                            elt = ruleRel.toModelObject
                            nsAbbr = abbreviatedNamespace(elt.qname.namespaceURI)
                            if signWrnObj in ("conceptNames", "excludedMemberNames"):
                                signwarnings[signWrnObj].setdefault(nsAbbr, []).append(elt.name)
                            else:
                                l = signwarnings[signWrnObj].setdefault(nsAbbr, {}).setdefault(elt.name, [])
                                if headEltName == "Dqc_0015_ExcludeNonNegAxisAbstract":
                                    l.append("*")
                                else:
                                    for memRel in dqcRelSet.fromModelObject(elt):
                                        l.append(memRel.toModelObject.name)
            jsonStr = _STR_UNICODE(json.dumps(signwarnings, ensure_ascii=False, indent=2)) # might not be unicode in 2.7
            saveFile(cntlr, _signwarningsFileName, jsonStr)  # 2.7 gets unicode this way
            dqcrtInstance.close()
            del dqcrtInstance # dereference closed modelXbrl
    return signwarnings
コード例 #33
0
ファイル: PackageManager.py プロジェクト: namitkewat/Arelle
def packageInfo(URL, reload=False, packageManifestName=None):
    #TODO several directories, eg User Application Data
    packageFilename = _cntlr.webCache.getfilename(URL, reload=reload, normalize=True)
    if packageFilename:
        from arelle.FileSource import TAXONOMY_PACKAGE_FILE_NAMES
        filesource = None
        try:
            global openFileSource
            if openFileSource is None:
                from arelle.FileSource import openFileSource
            filesource = openFileSource(packageFilename, _cntlr)
            # allow multiple manifests [[metadata, prefix]...] for multiple catalogs
            packages = []
            if filesource.isZip:
                if packageManifestName:
                    packageFiles = [fileName
                                    for fileName in filesource.dir
                                    if fnmatch(fileName, packageManifestName)]
                else:
                    packageFiles = filesource.taxonomyPackageMetadataFiles
                if len(packageFiles) < 1:
                    raise IOError(_("Taxonomy package contained no metadata file: {0}.")
                                  .format(', '.join(packageFiles)))
                for packageFile in packageFiles:
                    packageFileUrl = filesource.file(filesource.url + os.sep + packageFile)[0]
                    packageFilePrefix = os.sep.join(os.path.split(packageFile)[:-1])
                    if packageFilePrefix:
                        packageFilePrefix += os.sep
                    packageFilePrefix = filesource.baseurl + os.sep +  packageFilePrefix
                    packages.append([packageFileUrl, packageFilePrefix])
            elif os.path.basename(filesource.url) in TAXONOMY_PACKAGE_FILE_NAMES: # individual manifest file
                packageFile = packageFileUrl = filesource.url
                packageFilePrefix = os.sep.join(os.path.split(packageFile)[:-1])
                if packageFilePrefix:
                    packageFilePrefix += os.sep
                packages.append([packageFileUrl, packageFilePrefix])
            else:
                raise IOError(_("File must be a taxonomy package (zip file), catalog file, or manifest (): {0}.")
                              .format(packageFilename, ', '.join(TAXONOMY_PACKAGE_FILE_NAMES)))
            remappings = {}
            packageNames = []
            descriptions = []
            for packageFileUrl, packageFilePrefix in packages:    
                parsedPackage = parsePackage(_cntlr, packageFileUrl)
                packageNames.append(parsedPackage['name'])
                if parsedPackage.get('description'):
                    descriptions.append(parsedPackage['description'])
                for prefix, remapping in parsedPackage["remappings"].items():
                    remappings[prefix] = (remapping if isHttpUrl(remapping)
                                          else (packageFilePrefix +remapping.replace("/", os.sep)))
            package = {'name': ", ".join(packageNames),
                       'status': 'enabled',
                       'version': parsedPackage['version'],
                       'fileDate': time.strftime('%Y-%m-%dT%H:%M:%S UTC', time.gmtime(os.path.getmtime(packageFilename))),
                       'URL': URL,
                       'manifestName': packageManifestName,
                       'description': "; ".join(descriptions),
                       'remappings': remappings,
                       }
            filesource.close()
            return package
        except EnvironmentError:
            pass
        if filesource:
            filesource.close()
    return None