Beispiel #1
0
def loadAuthorityValidations(modelXbrl):
    _file = openFileStream(modelXbrl.modelManager.cntlr,
                           resourcesFilePath(modelXbrl.modelManager,
                                             "authority-validations.json"),
                           'rt',
                           encoding='utf-8')
    validations = json.load(_file)  # {localName: date, ...}
    _file.close()
    return validations
Beispiel #2
0
def loadOtherStandardTaxonomies(modelXbrl, val):
    _file = openFileStream(modelXbrl.modelManager.cntlr, resourcesFilePath(modelXbrl.modelManager, "other-standard-taxonomies.json"), 'rt', encoding='utf-8')
    otherStandardTaxonomies = json.load(_file) # {localName: date, ...}
    _file.close()
    otherStandardNsPrefixes = otherStandardTaxonomies.get("taxonomyPrefixes",{})
    return set(doc.targetNamespace
               for doc in modelXbrl.urlDocs.values()
               if doc.targetNamespace and 
               doc.targetNamespace not in val.disclosureSystem.standardTaxonomiesDict
               and any(doc.targetNamespace.startswith(nsPrefix) for nsPrefix in otherStandardNsPrefixes))
Beispiel #3
0
def loadDeprecatedConceptDates(val, deprecatedConceptDates):  
    for modelDocument in val.modelXbrl.urlDocs.values():
        ns = modelDocument.targetNamespace
        abbrNs = abbreviatedWildNamespace(ns)
        latestTaxonomyDoc = latestTaxonomyDocs.get(abbrNs)
        _fileName = deprecatedConceptDatesFile(val.modelXbrl.modelManager, abbrNs, latestTaxonomyDoc)
        if _fileName:
            _file = openFileStream(val.modelXbrl.modelManager.cntlr, _fileName, 'rt', encoding='utf-8')
            _deprecatedConceptDates = json.load(_file) # {localName: date, ...}
            _file.close()
            for localName, date in _deprecatedConceptDates.items():
                deprecatedConceptDates[qname(ns, localName)] = date
Beispiel #4
0
def loadDeprecatedConceptDates(val, deprecatedConceptDates):  
    for modelDocument in val.modelXbrl.urlDocs.values():
        ns = modelDocument.targetNamespace
        abbrNs = abbreviatedWildNamespace(ns)
        latestTaxonomyDoc = latestTaxonomyDocs.get(abbrNs)
        _fileName = deprecatedConceptDatesFile(val.modelXbrl.modelManager, abbrNs, latestTaxonomyDoc)
        if _fileName:
            _file = openFileStream(val.modelXbrl.modelManager.cntlr, _fileName, 'rt', encoding='utf-8')
            _deprecatedConceptDates = json.load(_file) # {localName: date, ...}
            _file.close()
            for localName, date in _deprecatedConceptDates.items():
                deprecatedConceptDates[qname(ns, localName)] = date
Beispiel #5
0
def loadTaxonomyCompatibility(modelXbrl):
    _file = openFileStream(modelXbrl.modelManager.cntlr, resourcesFilePath(modelXbrl.modelManager, "taxonomy-compatibility.json"), 'rt', encoding='utf-8')
    compat = json.load(_file, object_pairs_hook=OrderedDict) # preserve order of keys
    _file.close()
    tc = compat["taxonomy-classes"]
    cc = compat["compatible-classes"]
    def refTx(txAbbrs):
        return [refTx(tc[txAbbr[1:]]) if txAbbr.startswith("@") else txAbbr for txAbbr in txAbbrs]
    for k in cc.keys():
        cc[k] = set(flattenSequence(refTx(cc[k])))
    compat["checked-taxonomies"] = set(flattenSequence([t for t in cc.items()]))
    return compat
Beispiel #6
0
def loadIxTransformRegistries(modelXbrl):
    _file = openFileStream(modelXbrl.modelManager.cntlr,
                           resourcesFilePath(
                               modelXbrl.modelManager,
                               "ixbrl-transform-registries.json"),
                           'rt',
                           encoding='utf-8')
    ixTrRegistries = json.load(
        _file, object_pairs_hook=OrderedDict)  # preserve order of keys
    _file.close()
    ixTrRegistries.pop("copyright", None)
    ixTrRegistries.pop("description", None)
    return ixTrRegistries
Beispiel #7
0
def loadCustomAxesReplacements(modelXbrl): # returns match expression, standard patterns
    _file = openFileStream(modelXbrl.modelManager.cntlr, resourcesFilePath(modelXbrl.modelManager, "axiswarnings.json"), 'rt', encoding='utf-8')
    axiswarnings = json.load(_file) # {localName: date, ...}
    _file.close()
    standardAxes = {}
    matchPattern = []
    for i, (standardAxis, customAxisPattern) in enumerate(axiswarnings.items()):
        if standardAxis not in ("#", "copyright", "description"):
            patternName = "_{}".format(i)
            standardAxes[patternName] = standardAxis
            matchPattern.append("(?P<{}>^{}$)".format(patternName, customAxisPattern))
    return attrdict(standardAxes=standardAxes, 
                    customNamePatterns=re.compile("|".join(matchPattern)))
Beispiel #8
0
def loadCustomAxesReplacements(modelXbrl): # returns match expression, standard patterns
    _file = openFileStream(modelXbrl.modelManager.cntlr, resourcesFilePath(modelXbrl.modelManager, "axiswarnings.json"), 'rt', encoding='utf-8')
    axiswarnings = json.load(_file) # {localName: date, ...}
    _file.close()
    standardAxes = {}
    matchPattern = []
    for i, (standardAxis, customAxisPattern) in enumerate(axiswarnings.items()):
        if standardAxis not in ("#", "copyright", "description"):
            patternName = "_{}".format(i)
            standardAxes[patternName] = standardAxis
            matchPattern.append("(?P<{}>^{}$)".format(patternName, customAxisPattern))
    return attrdict(standardAxes=standardAxes, 
                    customNamePatterns=re.compile("|".join(matchPattern)))
Beispiel #9
0
def loadNonNegativeFacts(modelXbrl, dqcRules, ugtRels):
    # for us-gaap newer than 2020 use DQCRT non-negative facts.
    if dqcRules and ugtRels:  # not used before 2020
        if usgaapYear(
                modelXbrl) == "2020" and "dqcrt-2021-usgaap-2020" not in (
                    modelXbrl.modelManager.disclosureSystem.options or ""):
            dqcRules.clear()  # remove dqc rules
            return ugtRels[
                "DQC.US.0015"]  # use 20.1 2020 nonNegFacts test and warning
        return None  # use all available DQCRT tests
    # for us-gaap < dqcyear use EFM non-negative warning  insead of DQC rule
    _file = openFileStream(modelXbrl.modelManager.cntlr,
                           resourcesFilePath(modelXbrl.modelManager,
                                             "signwarnings.json"),
                           'rt',
                           encoding='utf-8')
    signwarnings = json.load(_file)  # {localName: date, ...}
    _file.close()
    concepts = set()
    excludedMembers = set()
    excludedMemberStrings = set()
    excludedAxesMembers = defaultdict(set)
    for modelDocument in modelXbrl.urlDocs.values():
        ns = modelDocument.targetNamespace  # set up non neg lookup by full NS
        for abbrNs in (abbreviatedNamespace(ns),
                       abbreviatedNamespace(ns, WILD)):
            nsMatch = False
            for exName, exSet, isQName in (("conceptNames", concepts,
                                            True), ("excludedMemberNames",
                                                    excludedMembers, True),
                                           ("excludedMemberStrings",
                                            excludedMemberStrings, False)):
                for localName in signwarnings[exName].get(abbrNs, ()):
                    exSet.add(qname(ns, localName) if isQName else localName)
                    nsMatch = True
            for localDimName, localMemNames in signwarnings[
                    "excludedAxesMembers"].get(abbrNs, EMPTY_DICT).items():
                for localMemName in localMemNames:
                    excludedAxesMembers[qname(ns, localDimName)].add(
                        qname(ns, localMemName) if localMemName != "*" else "*"
                    )
                    nsMatch = True
            if nsMatch:
                break  # use explicit year rules if available, else generic year rules
    return attrdict(concepts=concepts,
                    excludedAxesMembers=excludedAxesMembers,
                    excludedMembers=excludedMembers,
                    excludedMemberNamesPattern=re.compile(
                        "|".join(excludedMemberStrings), re.IGNORECASE)
                    if excludedMemberStrings else None)
Beispiel #10
0
def loadNonNegativeFacts(modelXbrl):
    _file = openFileStream(modelXbrl.modelManager.cntlr, resourcesFilePath(modelXbrl.modelManager, "signwarnings.json"), 'rt', encoding='utf-8')
    signwarnings = json.load(_file) # {localName: date, ...}
    _file.close()
    concepts = set()
    excludedAxesMembers = defaultdict(set)
    for modelDocument in modelXbrl.urlDocs.values():
        ns = modelDocument.targetNamespace # set up non neg lookup by full NS
        for abbrNs in (abbreviatedNamespace(ns), abbreviatedWildNamespace(ns)):
            for localName in signwarnings["conceptNames"].get(abbrNs, ()):
                concepts.add(qname(ns, localName))
            for localDimName, localMemNames in signwarnings["excludedAxesMembers"].get(abbrNs, EMPTY_DICT).items():
                for localMemName in localMemNames:
                    excludedAxesMembers[qname(ns, localDimName)].add(qname(ns, localMemName) if localMemName != "*" else None)
    return attrdict(concepts=concepts, excludedAxesMembers=excludedAxesMembers)
Beispiel #11
0
def loadNonNegativeFacts(modelXbrl):
    _file = openFileStream(modelXbrl.modelManager.cntlr, resourcesFilePath(modelXbrl.modelManager, "signwarnings.json"), 'rt', encoding='utf-8')
    signwarnings = json.load(_file) # {localName: date, ...}
    _file.close()
    concepts = set()
    excludedAxesMembers = defaultdict(set)
    for modelDocument in modelXbrl.urlDocs.values():
        ns = modelDocument.targetNamespace # set up non neg lookup by full NS
        for abbrNs in (abbreviatedNamespace(ns), abbreviatedWildNamespace(ns)):
            for localName in signwarnings["conceptNames"].get(abbrNs, ()):
                concepts.add(qname(ns, localName))
            for localDimName, localMemNames in signwarnings["excludedAxesMembers"].get(abbrNs, EMPTY_DICT).items():
                for localMemName in localMemNames:
                    excludedAxesMembers[qname(ns, localDimName)].add(qname(ns, localMemName) if localMemName != "*" else "*")
    return attrdict(concepts=concepts, excludedAxesMembers=excludedAxesMembers)
def _load_cache(val):
    """
    Loads the cached taxonomy default demensions. If the file isn't cached yet
    it will create a new cache

    :param val: ValidateXbrl to be validated
    :type val: :class: '~arelle.ValidateXbrl.ValidateXbrl'
    :return: no explicit return, but loads caches for dqc_us_0041
    :rtype: None
    """
    val.linroleDefinitionIsDisclosure = (
        re.compile(r"-\s+Disclosure\s+-\s", re.IGNORECASE)
    )

    val.linkroleDefinitionStatementSheet = (
        re.compile(r"[^-]+-\s+Statement\s+-\s+.*", re.IGNORECASE)
    )  # no restriction to type of statement

    val.ugtNamespace = None
    cntlr = val.modelXbrl.modelManager.cntlr

    year = _EARLIEST_GAAP_YEAR

    for ugt in ugtDocs:
        ugt_namespace = ugt["namespace"]
        if _is_in_namespace(val, ugt_namespace):
            ugt_default_dimensions_json_file = os.path.join(
                os.path.dirname(__file__),
                'resources',
                'DQC_US_0041',
                '{}_ugt-default-dimensions.json'.format(str(year))
            )

            file = None

            try:
                file = openFileStream(
                    cntlr, ugt_default_dimensions_json_file,
                    'rt', encoding='utf-8'
                )

                val.usgaapDefaultDimensions = json.load(file)
                file.close()

            except FileNotFoundError:  # noqa
                if file:
                    file.close()
        year += 1
def _load_cache(val):
    """
    Loads the needed deprecated concepts cache into memory

    :param val: ValidateXbrl to load the concepts into
    :type val: :class:'~arelle.ValidateXbrl.ValidateXbrl'
    :return: Return True if cache exists, False otherwise.
    :rtype: bool
    """
    cntlr = val.modelXbrl.modelManager.cntlr
    year = _EARLIEST_US_GAAP_YEAR

    for ugt in ugtDocs:
        ugt_namespace = ugt["namespace"]
        if ((ugt_namespace in val.modelXbrl.namespaceDocs and
             len(val.modelXbrl.namespaceDocs[ugt_namespace]) > 0)):
            val.ugtNamespace = ugt_namespace
            deprecations_json_file = os.path.join(
                os.path.dirname(__file__),
                'resources',
                'DQC_US_0018',
                '{}_deprecated-concepts.json'.format(str(year))
            )
            file = None

            try:
                file = openFileStream(
                    cntlr,
                    deprecations_json_file,
                    'rt',
                    encoding='utf-8'
                )

                val.usgaapDeprecations = json.load(file)
                file.close()

            except FileNotFoundError:  # noqa
                if file:
                    file.close()
                # year should be cached. It is not, so return False
                return False
            # year should be cached, and is. Return True
            return True
        year += 1
    # checked all years. No cache found.
    return False
Beispiel #14
0
def loadDqcRules(modelXbrl): # returns match expression, standard patterns
    # determine taxonomy usage by facts, must have more us-gaap facts than ifrs facts
    # (some ifrs filings may have a few us-gaap facts or us-gaap concepts loaded but are not us-gaap filings)
    namespaceUsage = {}
    for f in modelXbrl.facts:
        ns = f.qname.namespaceURI
        namespaceUsage[ns] = namespaceUsage.get(ns, 0) + 1
    numUsGaapFacts = sum(n for ns,n in namespaceUsage.items() if "us-gaap" in ns)
    numIfrsFacts = sum(n for ns,n in namespaceUsage.items() if "ifrs" in ns)
    if (usgaapYear(modelXbrl) >= "2020" and
        ((numUsGaapFacts == 0 and numIfrsFacts == 0) or (numUsGaapFacts > numIfrsFacts))):
        # found us-gaap facts present (more than ifrs facts present), load us-gaap DQC.US rules
        _file = openFileStream(modelXbrl.modelManager.cntlr, resourcesFilePath(modelXbrl.modelManager, "dqc-us-rules.json"), 'rt', encoding='utf-8')
        dqcRules = json.load(_file, object_pairs_hook=OrderedDict) # preserve order of keys
        _file.close()
        return dqcRules
    return {}
Beispiel #15
0
def _load_cache(val):
    """
    Loads the needed deprecated concepts cache into memory

    :param val: ValidateXbrl to load the concepts into
    :type val: :class:'~arelle.ValidateXbrl.ValidateXbrl'
    :return: Return True if cache exists, False otherwise.
    :rtype: bool
    """
    cntlr = val.modelXbrl.modelManager.cntlr
    year = _EARLIEST_US_GAAP_YEAR

    for ugt in ugtDocs:
        ugt_namespace = ugt["namespace"]
        if ((ugt_namespace in val.modelXbrl.namespaceDocs and
             len(val.modelXbrl.namespaceDocs[ugt_namespace]) > 0)):
            val.ugtNamespace = ugt_namespace
            deprecations_json_file = os.path.join(
                os.path.dirname(__file__),
                'resources',
                'DQC_US_0018',
                '{}_deprecated-concepts.json'.format(str(year))
            )
            file = None
            try:
                file = openFileStream(
                    cntlr,
                    deprecations_json_file,
                    'rt',
                    encoding='utf-8'
                )

                val.usgaapDeprecations = json.load(file)
                file.close()

            except FileNotFoundError:
                if file:
                    file.close()
                # year should be cached. It is not, so return False
                return False
            # year should be cached, and is. Return True
            return True
        year += 1
    # checked all years. No cache found.
    return False
Beispiel #16
0
def _load_cache(val):
    """
    Loads the cached taxonomy default demensions. If the file isn't cached yet
    it will create a new cache

    :param val: ValidateXbrl to be validated
    :type val: :class: '~arelle.ValidateXbrl.ValidateXbrl'
    :return: no explicit return, but loads caches for dqc_us_0041
    :rtype: None
    """
    val.linroleDefinitionIsDisclosure = (re.compile(r"-\s+Disclosure\s+-\s",
                                                    re.IGNORECASE))

    val.linkroleDefinitionStatementSheet = (re.compile(
        r"[^-]+-\s+Statement\s+-\s+.*",
        re.IGNORECASE))  # no restriction to type of statement

    val.ugtNamespace = None
    cntlr = val.modelXbrl.modelManager.cntlr

    year = _EARLIEST_GAAP_YEAR

    for ugt in ugtDocs:
        ugt_namespace = ugt["namespace"]
        if _is_in_namespace(val, ugt_namespace):
            ugt_default_dimensions_json_file = os.path.join(
                os.path.dirname(__file__), 'resources', 'DQC_US_0041',
                '{}_ugt-default-dimensions.json'.format(str(year)))

            file = None

            try:
                file = openFileStream(cntlr,
                                      ugt_default_dimensions_json_file,
                                      'rt',
                                      encoding='utf-8')

                val.usgaapDefaultDimensions = json.load(file)
                file.close()

            except FileNotFoundError:  # noqa
                if file:
                    file.close()
        year += 1
Beispiel #17
0
def moduleModuleInfo(moduleURL, reload=False):
    #TODO several directories, eg User Application Data
    moduleFilename = _cntlr.webCache.getfilename(moduleURL,
                                                 reload=reload,
                                                 normalize=True,
                                                 base=_pluginBase)
    if moduleFilename:
        f = None
        try:
            # if moduleFilename is a directory containing an __ini__.py file, open that instead
            if os.path.isdir(moduleFilename) and os.path.isfile(
                    os.path.join(moduleFilename, "__init__.py")):
                moduleFilename = os.path.join(moduleFilename, "__init__.py")
            f = openFileStream(_cntlr, moduleFilename)
            tree = ast.parse(f.read(), filename=moduleFilename)
            for item in tree.body:
                if isinstance(item, ast.Assign):
                    attr = item.targets[0].id
                    if attr == "__pluginInfo__":
                        f.close()
                        moduleInfo = {}
                        classMethods = []
                        for i, key in enumerate(item.value.keys):
                            _key = key.s
                            _value = item.value.values[i]
                            _valueType = _value.__class__.__name__
                            if _valueType == 'Str':
                                moduleInfo[_key] = _value.s
                            elif _valueType == 'Name':
                                classMethods.append(_key)
                        moduleInfo['classMethods'] = classMethods
                        moduleInfo["moduleURL"] = moduleURL
                        moduleInfo["status"] = 'enabled'
                        moduleInfo["fileDate"] = time.strftime(
                            '%Y-%m-%dT%H:%M:%S UTC',
                            time.gmtime(os.path.getmtime(moduleFilename)))
                        return moduleInfo
        except EnvironmentError:
            pass
        if f:
            f.close()
    return None
Beispiel #18
0
def loadNonNegativeFacts(modelXbrl):
    _file = openFileStream(modelXbrl.modelManager.cntlr,
                           resourcesFilePath(modelXbrl.modelManager,
                                             "signwarnings.json"),
                           'rt',
                           encoding='utf-8')
    signwarnings = json.load(_file)  # {localName: date, ...}
    _file.close()
    concepts = set()
    excludedMembers = set()
    excludedMemberStrings = set()
    excludedAxesMembers = defaultdict(set)
    for modelDocument in modelXbrl.urlDocs.values():
        ns = modelDocument.targetNamespace  # set up non neg lookup by full NS
        for abbrNs in (abbreviatedNamespace(ns),
                       abbreviatedNamespace(ns, WILD)):
            nsMatch = False
            for exName, exSet, isQName in (("conceptNames", concepts,
                                            True), ("excludedMemberNames",
                                                    excludedMembers, True),
                                           ("excludedMemberStrings",
                                            excludedMemberStrings, False)):
                for localName in signwarnings[exName].get(abbrNs, ()):
                    exSet.add(qname(ns, localName) if isQName else localName)
                    nsMatch = True
            for localDimName, localMemNames in signwarnings[
                    "excludedAxesMembers"].get(abbrNs, EMPTY_DICT).items():
                for localMemName in localMemNames:
                    excludedAxesMembers[qname(ns, localDimName)].add(
                        qname(ns, localMemName) if localMemName != "*" else "*"
                    )
                    nsMatch = True
            if nsMatch:
                break  # use explicit year rules if available, else generic year rules
    return attrdict(concepts=concepts,
                    excludedAxesMembers=excludedAxesMembers,
                    excludedMembers=excludedMembers,
                    excludedMemberNamesPattern=re.compile(
                        "|".join(excludedMemberStrings), re.IGNORECASE)
                    if excludedMemberStrings else None)
Beispiel #19
0
def loadDqcRules(modelXbrl):  # returns match expression, standard patterns
    # determine taxonomy usage by facts, must have more us-gaap facts than ifrs facts
    # (some ifrs filings may have a few us-gaap facts or us-gaap concepts loaded but are not us-gaap filings)
    namespaceUsage = {}
    for f in modelXbrl.facts:
        ns = f.qname.namespaceURI
        namespaceUsage[ns] = namespaceUsage.get(ns, 0) + 1
    if (modelXbrl.modelManager.disclosureSystem.version[0] >= 56
            and  # EDGAR release >= 21.1 
            sum(n for ns, n in namespaceUsage.items() if "us-gaap" in ns)
            >  # mostly us-gaap elements, not ifrs elements
            sum(n for ns, n in namespaceUsage.items() if "ifrs" in ns)):
        # found us-gaap facts present (more than ifrs facts present), load us-gaap DQC.US rules
        _file = openFileStream(modelXbrl.modelManager.cntlr,
                               resourcesFilePath(modelXbrl.modelManager,
                                                 "dqc-us-rules.json"),
                               'rt',
                               encoding='utf-8')
        dqcRules = json.load(
            _file, object_pairs_hook=OrderedDict)  # preserve order of keys
        _file.close()
        return dqcRules
    return {}
Beispiel #20
0
def moduleModuleInfo(moduleURL, reload=False):
    #TODO several directories, eg User Application Data
    moduleFilename = _cntlr.webCache.getfilename(moduleURL, reload=reload, normalize=True, base=_pluginBase)
    if moduleFilename:
        f = None
        try:
            # if moduleFilename is a directory containing an __ini__.py file, open that instead
            if os.path.isdir(moduleFilename) and os.path.isfile(os.path.join(moduleFilename, "__init__.py")):
                moduleFilename = os.path.join(moduleFilename, "__init__.py")
            f = openFileStream(_cntlr, moduleFilename)
            tree = ast.parse(f.read(), filename=moduleFilename)
            for item in tree.body:
                if isinstance(item, ast.Assign):
                    attr = item.targets[0].id
                    if attr == "__pluginInfo__":
                        f.close()
                        moduleInfo = {}
                        classMethods = []
                        for i, key in enumerate(item.value.keys):
                            _key = key.s
                            _value = item.value.values[i]
                            _valueType = _value.__class__.__name__
                            if _valueType == 'Str':
                                moduleInfo[_key] = _value.s
                            elif _valueType == 'Name':
                                classMethods.append(_key)
                        moduleInfo['classMethods'] = classMethods
                        moduleInfo["moduleURL"] = moduleURL
                        moduleInfo["status"] = 'enabled'
                        moduleInfo["fileDate"] = time.strftime('%Y-%m-%dT%H:%M:%S UTC', time.gmtime(os.path.getmtime(moduleFilename)))
                        return moduleInfo
        except EnvironmentError:
            pass
        if f:
            f.close()
    return None
Beispiel #21
0
def loadDeiValidations(modelXbrl, isInlineXbrl):
    _file = openFileStream(modelXbrl.modelManager.cntlr, resourcesFilePath(modelXbrl.modelManager, "dei-validations.json"), 'rt', encoding='utf-8')
    validations = json.load(_file) # {localName: date, ...}
    _file.close()
    #print ("original validations size {}".format(pyObjectSize(validations)))
    # get dei namespaceURI
    deiNamespaceURI = None
    for doc in modelXbrl.urlDocs.values():
         if doc.targetNamespace and doc.targetNamespace.startswith("http://xbrl.sec.gov/dei/"):
             deiNamespaceURI = doc.targetNamespace
             break
    # compile form-classes
    fc = validations["form-classes"]
    def compileFormSet(forms, formSet=None, visitedClasses=None):
        if visitedClasses is None: visitedClasses = set()
        if formSet is None: formSet = set()
        for form in flattenSequence(forms):
            if form.startswith("@"):
                referencedClass = form[1:]
                if referencedClass not in fc:
                    modelXbrl.error("arelle:loadDeiValidations", _("Missing declaration for %(referencedClass)s."), referencedClass=form)
                elif form in visitedClasses:
                    modelXbrl.error("arelle:loadDeiValidations", 
                                    _("Circular reference to %(formClass)s in %(formClasses)s."),
                                    formClass=referencedClass, formClasses=sorted(visitedClasses))
                else:
                    visitedClasses.add(form)
                    compileFormSet(fc[referencedClass], formSet, visitedClasses)
            else:
                formSet.add(form)
        return formSet
    for fev in validations["form-element-validations"]:
        for field in ("xbrl-names", "validation", "efm", "source") :
            if field not in fev:
                modelXbrl.error("arelle:loadDeiValidations", 
                                _("Missing form-element-validation[\"%(field)s\"] from %(validation)s."), 
                                field=field, validation=fev)
        if "severity" in fev and not any(field.startswith("message") for field in fev):
            modelXbrl.error("arelle:loadDeiValidations", 
                            _("Missing form-element-validation[\"%(field)s\"] from %(validation)s."), 
                            field="message*", validation=fev)
        validationCode = fev.get("validation")
        if validationCode in ("f2", "og", "ol1", "ol2", "oph", "ar", "sr", "oth", "t", "tb", "t1") and "references" not in fev:
            modelXbrl.error("arelle:loadDeiValidations", 
                            _("Missing form-element-validation[\"references\"] from %(validation)s."), 
                            field=field, validation=fev)
        if validationCode in ("ru", "ou"):
            if isinstance(fev.get("value"), list):
                fev["value"] = set(fev["value"]) # change options list into set
            else:
                modelXbrl.error("arelle:loadDeiValidations", 
                                _("Missing form-element-validation[\"value\"] from %(validation)s, must be a list."), 
                                field=field, validation=fev)
        if validationCode in ():
            if isinstance(fev.get("reference-value"), list):
                fev["reference-value"] = set(fev["reference-value"]) # change options list into set
            else:
                modelXbrl.error("arelle:loadDeiValidations", 
                                _("Missing form-element-validation[\"value\"] from %(validation)s, must be a list."), 
                                field=field, validation=fev)
        if validationCode not in validations["validations"]:
            modelXbrl.error("arelle:loadDeiValidations", _("Missing validation[\"%(validationCode)s\"]."), validationCode=validationCode)
        axisCode = fev.get("axis")
        if axisCode and axisCode not in validations["axis-validations"]:
            modelXbrl.error("arelle:loadDeiValidations", _("Missing axis[\"%(axisCode)s\"]."), axisCode=axisCode)
        if "lang" in fev:
            fev["langPattern"] = re.compile(fev["lang"])
        s = fev.get("source")
        if s not in ("inline", "non-inline", "both"):
            modelXbrl.error("arelle:loadDeiValidations", _("Invalid source [\"%(source)s\"]."), source=s)
        elif (isInlineXbrl and s in ("inline", "both")) or (not isInlineXbrl and s in ("non-inline", "both")):
            messageKey = fev.get("message")
            if messageKey and messageKey not in validations["messages"]:
                modelXbrl.error("arelle:loadDeiValidations", _("Missing message[\"%(messageKey)s\"]."), messageKey=messageKey)
            # only include dei names in current dei taxonomy
            fev["xbrl-names"] = [name
                                 for name in flattenSequence(fev.get("xbrl-names", ()))
                                 if qname(deiNamespaceURI, name) in modelXbrl.qnameConcepts]
            formSet = compileFormSet(fev.get("forms", (fev.get("form",()),)))
            if "*" in formSet:
                formSet = "all" # change to string for faster testing in Filing.py
            fev["formSet"] = formSet
        
    for axisKey, axisValidation in validations["axis-validations"].items():
        messageKey = axisValidation.get("message")
        if messageKey and messageKey not in validations["messages"]:
            modelXbrl.error("arelle:loadDeiValidations", _("Missing axis \"%(axisKey)s\" message[\"%(messageKey)s\"]."), 
                            axisKey=axisKey, messageKey=messageKey)
    for valKey, validation in validations["validations"].items():
        messageKey = validation.get("message")
        if messageKey and messageKey not in validations["messages"]:
            modelXbrl.error("arelle:loadDeiValidations", _("Missing validation \"%(valKey)s\" message[\"%(messageKey)s\"]."), 
                            valKey=valKey, messageKey=messageKey)
        
#print ("compiled validations size {}".format(pyObjectSize(validations)))
    return validations
Beispiel #22
0
def moduleModuleInfo(moduleURL, reload=False, parentImportsSubtree=False):
    #TODO several directories, eg User Application Data
    moduleFilename = _cntlr.webCache.getfilename(moduleURL, reload=reload, normalize=True, base=_pluginBase)
    if moduleFilename:
        f = None
        try:
            # if moduleFilename is a directory containing an __ini__.py file, open that instead
            if os.path.isdir(moduleFilename) and os.path.isfile(os.path.join(moduleFilename, "__init__.py")):
                moduleFilename = os.path.join(moduleFilename, "__init__.py")
            moduleDir = os.path.dirname(moduleFilename)
            f = openFileStream(_cntlr, moduleFilename)
            tree = ast.parse(f.read(), filename=moduleFilename)
            moduleImports = []
            for item in tree.body:
                if isinstance(item, ast.Assign):
                    attr = item.targets[0].id
                    if attr == "__pluginInfo__":
                        f.close()
                        moduleInfo = {"name":None}
                        classMethods = []
                        importURLs = []
                        for i, key in enumerate(item.value.keys):
                            _key = key.s
                            _value = item.value.values[i]
                            _valueType = _value.__class__.__name__
                            if _key == "import":
                                if _valueType == 'Str':
                                    importURLs.append(_value.s)
                                elif _valueType in ("List", "Tuple"):
                                    for elt in _value.elts:
                                        importURLs.append(elt.s)
                            elif _valueType == 'Str':
                                moduleInfo[_key] = _value.s
                            elif _valueType == 'Name':
                                classMethods.append(_key)
                            elif _key == "imports" and _valueType in ("List", "Tuple"):
                                importURLs = [elt.s for elt in _value.elts]
                        moduleInfo['classMethods'] = classMethods
                        moduleInfo["moduleURL"] = moduleURL
                        moduleInfo["status"] = 'enabled'
                        moduleInfo["fileDate"] = time.strftime('%Y-%m-%dT%H:%M:%S UTC', time.gmtime(os.path.getmtime(moduleFilename)))
                        mergedImportURLs = []
                        _moduleImportsSubtree = False
                        for _url in importURLs:
                            if _url.startswith("module_import"):
                                for moduleImport in moduleImports:
                                    mergedImportURLs.append(moduleImport + ".py")
                                if _url == "module_import_subtree":
                                    _moduleImportsSubtree = True
                            else:
                                mergedImportURLs.append(_url)
                        if parentImportsSubtree and not _moduleImportsSubtree:
                            _moduleImportsSubtree = True
                            for moduleImport in moduleImports:
                                mergedImportURLs.append(moduleImport + ".py")
                        imports = []
                        for _url in mergedImportURLs:
                            _importURL = (_url if isAbsolute(_url) or os.path.isabs(_url)
                                          else os.path.join(os.path.dirname(moduleURL), _url))
                            _importModuleInfo = moduleModuleInfo(_importURL, reload, _moduleImportsSubtree)
                            if _importModuleInfo:
                                _importModuleInfo["isImported"] = True
                                imports.append(_importModuleInfo)
                        moduleInfo["imports"] =  imports
                        return moduleInfo
                elif isinstance(item, ast.ImportFrom):
                    if item.level == 1: # starts with .
                        if item.module is None:  # from . import module1, module2, ...
                            for importee in item.names:
                                if (os.path.isfile(os.path.join(moduleDir, importee.name + ".py"))
                                    and importee.name not in moduleImports):
                                    moduleImports.append(importee.name)
                        else:
                            modulePkgs = item.module.split('.')
                            modulePath = os.path.join(*modulePkgs)
                            if (os.path.isfile(os.path.join(moduleDir, modulePath) + ".py")
                                and modulePath not in moduleImports):
                                    moduleImports.append(modulePath)
                            for importee in item.names:
                                _importeePfxName = os.path.join(modulePath, importee.name)
                                if (os.path.isfile(os.path.join(moduleDir, _importeePfxName) + ".py")
                                    and _importeePfxName not in moduleImports):
                                        moduleImports.append(_importeePfxName)
        except EnvironmentError:
            pass
        if f:
            f.close()
    return None
Beispiel #23
0
def moduleModuleInfo(moduleURL, reload=False, parentImportsSubtree=False):
    #TODO several directories, eg User Application Data
    moduleFilename = _cntlr.webCache.getfilename(moduleURL,
                                                 reload=reload,
                                                 normalize=True,
                                                 base=_pluginBase)
    if moduleFilename:
        f = None
        try:
            # if moduleFilename is a directory containing an __ini__.py file, open that instead
            if os.path.isdir(moduleFilename) and os.path.isfile(
                    os.path.join(moduleFilename, "__init__.py")):
                moduleFilename = os.path.join(moduleFilename, "__init__.py")
            moduleDir = os.path.dirname(moduleFilename)
            f = openFileStream(_cntlr, moduleFilename)
            tree = ast.parse(f.read(), filename=moduleFilename)
            moduleImports = []
            for item in tree.body:
                if isinstance(item, ast.Assign):
                    attr = item.targets[0].id
                    if attr == "__pluginInfo__":
                        f.close()
                        moduleInfo = {"name": None}
                        classMethods = []
                        importURLs = []
                        for i, key in enumerate(item.value.keys):
                            _key = key.s
                            _value = item.value.values[i]
                            _valueType = _value.__class__.__name__
                            if _key == "import":
                                if _valueType == 'Str':
                                    importURLs.append(_value.s)
                                elif _valueType in ("List", "Tuple"):
                                    for elt in _value.elts:
                                        importURLs.append(elt.s)
                            elif _valueType == 'Str':
                                moduleInfo[_key] = _value.s
                            elif _valueType == 'Name':
                                classMethods.append(_key)
                            elif _key == "imports" and _valueType in ("List",
                                                                      "Tuple"):
                                importURLs = [elt.s for elt in _value.elts]
                        moduleInfo['classMethods'] = classMethods
                        moduleInfo["moduleURL"] = moduleURL
                        moduleInfo["status"] = 'enabled'
                        moduleInfo["fileDate"] = time.strftime(
                            '%Y-%m-%dT%H:%M:%S UTC',
                            time.gmtime(os.path.getmtime(moduleFilename)))
                        mergedImportURLs = []
                        _moduleImportsSubtree = False
                        for _url in importURLs:
                            if _url.startswith("module_import"):
                                for moduleImport in moduleImports:
                                    mergedImportURLs.append(moduleImport +
                                                            ".py")
                                if _url == "module_import_subtree":
                                    _moduleImportsSubtree = True
                            else:
                                mergedImportURLs.append(_url)
                        if parentImportsSubtree and not _moduleImportsSubtree:
                            _moduleImportsSubtree = True
                            for moduleImport in moduleImports:
                                mergedImportURLs.append(moduleImport + ".py")
                        imports = []
                        for _url in mergedImportURLs:
                            _importURL = (
                                _url if isAbsolute(_url)
                                or os.path.isabs(_url) else os.path.join(
                                    os.path.dirname(moduleURL), _url))
                            _importModuleInfo = moduleModuleInfo(
                                _importURL, reload, _moduleImportsSubtree)
                            if _importModuleInfo:
                                _importModuleInfo["isImported"] = True
                                imports.append(_importModuleInfo)
                        moduleInfo["imports"] = imports
                        return moduleInfo
                elif isinstance(item, ast.ImportFrom):
                    if item.level == 1:  # starts with .
                        if item.module is None:  # from . import module1, module2, ...
                            for importee in item.names:
                                if (os.path.isfile(
                                        os.path.join(moduleDir,
                                                     importee.name + ".py"))
                                        and importee.name
                                        not in moduleImports):
                                    moduleImports.append(importee.name)
                        else:
                            modulePkgs = item.module.split('.')
                            modulePath = os.path.join(*modulePkgs)
                            if (os.path.isfile(
                                    os.path.join(moduleDir, modulePath) +
                                    ".py")
                                    and modulePath not in moduleImports):
                                moduleImports.append(modulePath)
                            for importee in item.names:
                                _importeePfxName = os.path.join(
                                    modulePath, importee.name)
                                if (os.path.isfile(
                                        os.path.join(moduleDir,
                                                     _importeePfxName) + ".py")
                                        and _importeePfxName
                                        not in moduleImports):
                                    moduleImports.append(_importeePfxName)
        except EnvironmentError:
            pass
        if f:
            f.close()
    return None
def setup(val):
    if not val.validateLoggingSemantic:  # all checks herein are SEMANTIC
        return

    val.linroleDefinitionIsDisclosure = re.compile(r"-\s+Disclosure\s+-\s",
                                                   re.IGNORECASE)
    val.linkroleDefinitionStatementSheet = re.compile(r"[^-]+-\s+Statement\s+-\s+.*", # no restriction to type of statement
                                                      re.IGNORECASE)
    val.ugtNamespace = None
    cntlr = val.modelXbrl.modelManager.cntlr
    # load deprecated concepts for filed year of us-gaap
    for ugt in ugtDocs:
        ugtNamespace = ugt["namespace"]
        if ugtNamespace in val.modelXbrl.namespaceDocs and len(val.modelXbrl.namespaceDocs[ugtNamespace]) > 0:
            val.ugtNamespace = ugtNamespace
            usgaapDoc = val.modelXbrl.namespaceDocs[ugtNamespace][0]
            deprecationsJsonFile = usgaapDoc.filepathdir + os.sep + "deprecated-concepts.json"
            file = None
            try:
                file = openFileStream(cntlr, deprecationsJsonFile, 'rt', encoding='utf-8')
                val.usgaapDeprecations = json.load(file)
                file.close()
            except Exception:
                if file:
                    file.close()
                val.modelXbrl.modelManager.addToLog(_("loading us-gaap {0} deprecated concepts into cache").format(ugt["year"]))
                startedAt = time.time()
                ugtDocLB = ugt["docLB"]
                val.usgaapDeprecations = {}
                # load without SEC/EFM validation (doc file would not be acceptable)
                priorValidateDisclosureSystem = val.modelXbrl.modelManager.validateDisclosureSystem
                val.modelXbrl.modelManager.validateDisclosureSystem = False
                deprecationsInstance = ModelXbrl.load(val.modelXbrl.modelManager, 
                      # "http://xbrl.fasb.org/us-gaap/2012/elts/us-gaap-doc-2012-01-31.xml",
                      # load from zip (especially after caching) is incredibly faster
                      openFileSource(ugtDocLB, cntlr), 
                      _("built deprecations table in cache"))
                val.modelXbrl.modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
                if deprecationsInstance is None:
                    val.modelXbrl.error("arelle:notLoaded",
                        _("US-GAAP documentation not loaded: %(file)s"),
                        modelXbrl=val, file=os.path.basename(ugtDocLB))
                else:   
                    # load deprecations
                    for labelRel in deprecationsInstance.relationshipSet(XbrlConst.conceptLabel).modelRelationships:
                        modelDocumentation = labelRel.toModelObject
                        conceptName = labelRel.fromModelObject.name
                        if modelDocumentation.role == 'http://www.xbrl.org/2009/role/deprecatedLabel':
                            val.usgaapDeprecations[conceptName] = (val.usgaapDeprecations.get(conceptName, ('',''))[0], modelDocumentation.text)
                        elif modelDocumentation.role == 'http://www.xbrl.org/2009/role/deprecatedDateLabel':
                            val.usgaapDeprecations[conceptName] = (modelDocumentation.text, val.usgaapDeprecations.get(conceptName, ('',''))[1])
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapDeprecations, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, deprecationsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    deprecationsInstance.close()
                    del deprecationsInstance # dereference closed modelXbrl
                val.modelXbrl.profileStat(_("build us-gaap deprecated concepts cache"), time.time() - startedAt)
            ugtCalcsJsonFile = usgaapDoc.filepathdir + os.sep + "ugt-calculations.json"
            ugtDefaultDimensionsJsonFile = usgaapDoc.filepathdir + os.sep + "ugt-default-dimensions.json"
            file = None
            try:
                file = openFileStream(cntlr, ugtCalcsJsonFile, 'rt', encoding='utf-8')
                val.usgaapCalculations = json.load(file)
                file.close()
                file = openFileStream(cntlr, ugtDefaultDimensionsJsonFile, 'rt', encoding='utf-8')
                val.usgaapDefaultDimensions = json.load(file)
                file.close()
            except Exception:
                if file:
                    file.close()
                val.modelXbrl.modelManager.addToLog(_("loading us-gaap {0} calculations and default dimensions into cache").format(ugt["year"]))
                startedAt = time.time()
                ugtEntryXsd = ugt["entryXsd"]
                val.usgaapCalculations = {}
                val.usgaapDefaultDimensions = {}
                # load without SEC/EFM validation (doc file would not be acceptable)
                priorValidateDisclosureSystem = val.modelXbrl.modelManager.validateDisclosureSystem
                val.modelXbrl.modelManager.validateDisclosureSystem = False
                calculationsInstance = ModelXbrl.load(val.modelXbrl.modelManager, 
                      # "http://xbrl.fasb.org/us-gaap/2012/entire/us-gaap-entryPoint-std-2012-01-31.xsd",
                      # load from zip (especially after caching) is incredibly faster
                      openFileSource(ugtEntryXsd, cntlr), 
                      _("built us-gaap calculations cache"))
                val.modelXbrl.modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
                if calculationsInstance is None:
                    val.modelXbrl.error("arelle:notLoaded",
                        _("US-GAAP calculations not loaded: %(file)s"),
                        modelXbrl=val, file=os.path.basename(ugtEntryXsd))
                else:   
                    # load calculations
                    for ELR in calculationsInstance.relationshipSet(XbrlConst.summationItem).linkRoleUris:
                        elrRelSet = calculationsInstance.relationshipSet(XbrlConst.summationItem, ELR)
                        definition = ""
                        for roleType in calculationsInstance.roleTypes.get(ELR,()):
                            definition = roleType.definition
                            break
                        isStatementSheet = bool(val.linkroleDefinitionStatementSheet.match(definition))
                        elrUgtCalcs = {"#roots": [c.name for c in elrRelSet.rootConcepts],
                                       "#definition": definition,
                                       "#isStatementSheet": isStatementSheet}
                        for relFrom, rels in elrRelSet.fromModelObjects().items():
                            elrUgtCalcs[relFrom.name] = [rel.toModelObject.name for rel in rels]
                        val.usgaapCalculations[ELR] = elrUgtCalcs
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapCalculations, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, ugtCalcsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    # load default dimensions
                    for defaultDimRel in calculationsInstance.relationshipSet(XbrlConst.dimensionDefault).modelRelationships:
                        if defaultDimRel.fromModelObject is not None and defaultDimRel.toModelObject is not None:
                            val.usgaapDefaultDimensions[defaultDimRel.fromModelObject.name] = defaultDimRel.toModelObject.name
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapDefaultDimensions, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, ugtDefaultDimensionsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    calculationsInstance.close()
                    del calculationsInstance # dereference closed modelXbrl
                val.modelXbrl.profileStat(_("build us-gaap calculations and default dimensions cache"), time.time() - startedAt)
            break
    val.deprecatedFactConcepts = defaultdict(list)
    val.deprecatedDimensions = defaultdict(list)
    val.deprecatedMembers = defaultdict(list)
Beispiel #25
0
def loadDqc0015signwarningRules(modelXbrl):
    conceptRule = "http://fasb.org/dqcrules/arcrole/concept-rule" # FASB arcrule
    rule0015 = "http://fasb.org/us-gaap/role/dqc/0015"
    modelManager = modelXbrl.modelManager
    cntlr = modelXbrl.modelManager.cntlr
    # check for cached completed signwarnings
    _signwarningsFileName = resourcesFilePath(modelManager, "signwarnings.json")
    if os.path.exists(_signwarningsFileName): 
        _file = openFileStream(modelManager.cntlr, _signwarningsFileName, 'rt', encoding='utf-8')
        signwarnings = json.load(_file) # {localName: date, ...}
        _file.close()
        return signwarnings
    # load template rules
    _fileName = resourcesFilePath(modelManager, "signwarnings-template.json")
    if _fileName:
        _file = openFileStream(modelXbrl.modelManager.cntlr, _fileName, 'rt', encoding='utf-8')
        signwarnings = json.load(_file, object_pairs_hook=OrderedDict) # {localName: date, ...}
        _file.close()

    # load rules and add to signwarnings template
    for dqcAbbr, dqcrtUrl in latestDqcrtDocs.items():
        modelManager.addToLog(_("loading {} DQC Rules {}").format(dqcAbbr, dqcrtUrl), messageCode="info")
        # load without SEC/EFM validation (doc file would not be acceptable)
        priorValidateDisclosureSystem = modelManager.validateDisclosureSystem
        modelManager.validateDisclosureSystem = False
        from arelle import ModelXbrl
        dqcrtInstance = ModelXbrl.load(modelManager, 
              # "http://xbrl.fasb.org/us-gaap/2012/elts/us-gaap-doc-2012-01-31.xml",
              # load from zip (especially after caching) is incredibly faster
              openFileSource(dqcrtUrl, cntlr), 
              _("built dqcrt table in cache"))
        modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
        if dqcrtInstance is None:
            modelManager.addToLog(
                _("%(name)s documentation not loaded"),
                messageCode="arelle:notLoaded", messageArgs={"modelXbrl": val, "name":dqcAbbr})
        else:   
            # load signwarnings from DQC 0015
            dqcRelSet = dqcrtInstance.relationshipSet(conceptRule, rule0015)
            for signWrnObj, headEltName in (("conceptNames", "Dqc_0015_ListOfElements"),
                                            ("excludedMemberNames", "Dqc_0015_ExcludeNonNegMembersAbstract"),
                                            ("excludedAxesMembers", "Dqc_0015_ExcludeNonNegAxisAbstract"),
                                            ("excludedAxesMembers", "Dqc_0015_ExcludeNonNegAxisMembersAbstract"),
                                            ("excludedMemberStrings", "Dqc_0015_ExcludeNonNegMemberStringsAbstract")):
                headElts = dqcrtInstance.nameConcepts.get(headEltName,())
                for headElt in headElts:
                    if signWrnObj == "excludedMemberStrings":
                        for refRel in dqcrtInstance.relationshipSet(XbrlConst.conceptReference).fromModelObject(headElt):
                            for refPart in refRel.toModelObject.iterchildren("{*}allowableSubString"):
                                for subStr in refPart.text.split():
                                    signwarnings[signWrnObj].setdefault(nsAbbr, []).append(subStr)
                    else:
                        for ruleRel in dqcRelSet.fromModelObject(headElt):
                            elt = ruleRel.toModelObject
                            nsAbbr = abbreviatedNamespace(elt.qname.namespaceURI)
                            if signWrnObj in ("conceptNames", "excludedMemberNames"):
                                signwarnings[signWrnObj].setdefault(nsAbbr, []).append(elt.name)
                            else:
                                l = signwarnings[signWrnObj].setdefault(nsAbbr, {}).setdefault(elt.name, [])
                                if headEltName == "Dqc_0015_ExcludeNonNegAxisAbstract":
                                    l.append("*")
                                else:
                                    for memRel in dqcRelSet.fromModelObject(elt):
                                        l.append(memRel.toModelObject.name)
            jsonStr = _STR_UNICODE(json.dumps(signwarnings, ensure_ascii=False, indent=2)) # might not be unicode in 2.7
            saveFile(cntlr, _signwarningsFileName, jsonStr)  # 2.7 gets unicode this way
            dqcrtInstance.close()
            del dqcrtInstance # dereference closed modelXbrl
    return signwarnings
def setup(val):
    val.linroleDefinitionIsDisclosure = re.compile(r"-\s+Disclosure\s+-\s",
                                                   re.IGNORECASE)
    val.linkroleDefinitionStatementSheet = re.compile(r"[^-]+-\s+Statement\s+-\s+.*", # no restriction to type of statement
                                                      re.IGNORECASE)
    val.ugtNamespace = None
    cntlr = val.modelXbrl.modelManager.cntlr
    # load deprecated concepts for filed year of us-gaap
    for ugt in ugtDocs:
        ugtNamespace = ugt["namespace"]
        if ugtNamespace in val.modelXbrl.namespaceDocs and len(val.modelXbrl.namespaceDocs[ugtNamespace]) > 0:
            val.ugtNamespace = ugtNamespace
            usgaapDoc = val.modelXbrl.namespaceDocs[ugtNamespace][0]
            deprecationsJsonFile = usgaapDoc.filepathdir + os.sep + "deprecated-concepts.json"
            file = None
            try:
                file = openFileStream(cntlr, deprecationsJsonFile, 'rt', encoding='utf-8')
                val.usgaapDeprecations = json.load(file)
                file.close()
            except Exception:
                if file:
                    file.close()
                val.modelXbrl.modelManager.addToLog(_("loading us-gaap {0} deprecated concepts into cache").format(ugt["year"]))
                startedAt = time.time()
                ugtDocLB = ugt["docLB"]
                val.usgaapDeprecations = {}
                # load without SEC/EFM validation (doc file would not be acceptable)
                priorValidateDisclosureSystem = val.modelXbrl.modelManager.validateDisclosureSystem
                val.modelXbrl.modelManager.validateDisclosureSystem = False
                deprecationsInstance = ModelXbrl.load(val.modelXbrl.modelManager, 
                      # "http://xbrl.fasb.org/us-gaap/2012/elts/us-gaap-doc-2012-01-31.xml",
                      # load from zip (especially after caching) is incredibly faster
                      openFileSource(ugtDocLB, cntlr), 
                      _("built deprecations table in cache"))
                val.modelXbrl.modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
                if deprecationsInstance is None:
                    val.modelXbrl.error("arelle:notLoaded",
                        _("US-GAAP documentation not loaded: %(file)s"),
                        modelXbrl=val, file=os.path.basename(ugtDocLB))
                else:   
                    # load deprecations
                    for labelRel in deprecationsInstance.relationshipSet(XbrlConst.conceptLabel).modelRelationships:
                        modelDocumentation = labelRel.toModelObject
                        conceptName = labelRel.fromModelObject.name
                        if modelDocumentation.role == 'http://www.xbrl.org/2009/role/deprecatedLabel':
                            val.usgaapDeprecations[conceptName] = (val.usgaapDeprecations.get(conceptName, ('',''))[0], modelDocumentation.text)
                        elif modelDocumentation.role == 'http://www.xbrl.org/2009/role/deprecatedDateLabel':
                            val.usgaapDeprecations[conceptName] = (modelDocumentation.text, val.usgaapDeprecations.get(conceptName, ('',''))[1])
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapDeprecations, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, deprecationsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    deprecationsInstance.close()
                    del deprecationsInstance # dereference closed modelXbrl
                val.modelXbrl.profileStat(_("build us-gaap deprecated concepts cache"), time.time() - startedAt)
            ugtCalcsJsonFile = usgaapDoc.filepathdir + os.sep + "ugt-calculations.json"
            ugtDefaultDimensionsJsonFile = usgaapDoc.filepathdir + os.sep + "ugt-default-dimensions.json"
            file = None
            try:
                file = openFileStream(cntlr, ugtCalcsJsonFile, 'rt', encoding='utf-8')
                val.usgaapCalculations = json.load(file)
                file.close()
                file = openFileStream(cntlr, ugtDefaultDimensionsJsonFile, 'rt', encoding='utf-8')
                val.usgaapDefaultDimensions = json.load(file)
                file.close()
            except Exception:
                if file:
                    file.close()
                val.modelXbrl.modelManager.addToLog(_("loading us-gaap {0} calculations and default dimensions into cache").format(ugt["year"]))
                startedAt = time.time()
                ugtEntryXsd = ugt["entryXsd"]
                val.usgaapCalculations = {}
                val.usgaapDefaultDimensions = {}
                # load without SEC/EFM validation (doc file would not be acceptable)
                priorValidateDisclosureSystem = val.modelXbrl.modelManager.validateDisclosureSystem
                val.modelXbrl.modelManager.validateDisclosureSystem = False
                calculationsInstance = ModelXbrl.load(val.modelXbrl.modelManager, 
                      # "http://xbrl.fasb.org/us-gaap/2012/entire/us-gaap-entryPoint-std-2012-01-31.xsd",
                      # load from zip (especially after caching) is incredibly faster
                      openFileSource(ugtEntryXsd, cntlr), 
                      _("built us-gaap calculations cache"))
                val.modelXbrl.modelManager.validateDisclosureSystem = priorValidateDisclosureSystem
                if calculationsInstance is None:
                    val.modelXbrl.error("arelle:notLoaded",
                        _("US-GAAP calculations not loaded: %(file)s"),
                        modelXbrl=val, file=os.path.basename(ugtEntryXsd))
                else:   
                    # load calculations
                    for ELR in calculationsInstance.relationshipSet(XbrlConst.summationItem).linkRoleUris:
                        elrRelSet = calculationsInstance.relationshipSet(XbrlConst.summationItem, ELR)
                        definition = ""
                        for roleType in calculationsInstance.roleTypes.get(ELR,()):
                            definition = roleType.definition
                            break
                        isStatementSheet = bool(val.linkroleDefinitionStatementSheet.match(definition))
                        elrUgtCalcs = {"#roots": [c.name for c in elrRelSet.rootConcepts],
                                       "#definition": definition,
                                       "#isStatementSheet": isStatementSheet}
                        for relFrom, rels in elrRelSet.fromModelObjects().items():
                            elrUgtCalcs[relFrom.name] = [rel.toModelObject.name for rel in rels]
                        val.usgaapCalculations[ELR] = elrUgtCalcs
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapCalculations, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, ugtCalcsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    # load default dimensions
                    for defaultDimRel in calculationsInstance.relationshipSet(XbrlConst.dimensionDefault).modelRelationships:
                        if defaultDimRel.fromModelObject is not None and defaultDimRel.toModelObject is not None:
                            val.usgaapDefaultDimensions[defaultDimRel.fromModelObject.name] = defaultDimRel.toModelObject.name
                    jsonStr = _STR_UNICODE(json.dumps(val.usgaapDefaultDimensions, ensure_ascii=False, indent=0)) # might not be unicode in 2.7
                    saveFile(cntlr, ugtDefaultDimensionsJsonFile, jsonStr)  # 2.7 gets unicode this way
                    calculationsInstance.close()
                    del calculationsInstance # dereference closed modelXbrl
                val.modelXbrl.profileStat(_("build us-gaap calculations and default dimensions cache"), time.time() - startedAt)
            break
    val.deprecatedFactConcepts = defaultdict(list)
    val.deprecatedDimensions = defaultdict(list)
    val.deprecatedMembers = defaultdict(list)
Beispiel #27
0
def moduleModuleInfo(moduleURL, reload=False, parentImportsSubtree=False):
    #TODO several directories, eg User Application Data
    moduleFilename = _cntlr.webCache.getfilename(moduleURL, reload=reload, normalize=True, base=_pluginBase)
    if moduleFilename:
        f = None
        try:
            # if moduleFilename is a directory containing an __ini__.py file, open that instead
            if os.path.isdir(moduleFilename) and os.path.isfile(os.path.join(moduleFilename, "__init__.py")):
                moduleFilename = os.path.join(moduleFilename, "__init__.py")
            moduleDir = os.path.dirname(moduleFilename)
            if PLUGIN_TRACE_FILE:
                with open(PLUGIN_TRACE_FILE, "at", encoding='utf-8') as fh:
                    fh.write("Scanning module for plug-in info: {}\n".format(moduleFilename))
            f = openFileStream(_cntlr, moduleFilename)
            tree = ast.parse(f.read(), filename=moduleFilename)
            moduleImports = []
            for item in tree.body:
                if isinstance(item, ast.Assign):
                    attr = item.targets[0].id
                    if attr == "__pluginInfo__":
                        f.close()
                        moduleInfo = {"name":None}
                        classMethods = []
                        importURLs = []
                        for i, key in enumerate(item.value.keys):
                            _key = key.s
                            _value = item.value.values[i]
                            _valueType = _value.__class__.__name__
                            if _key == "import":
                                if _valueType == 'Str':
                                    importURLs.append(_value.s)
                                elif _valueType in ("List", "Tuple"):
                                    for elt in _value.elts:
                                        importURLs.append(elt.s)
                            elif _valueType == 'Str':
                                moduleInfo[_key] = _value.s
                            elif _valueType == 'Name':
                                classMethods.append(_key)
                            elif _key == "imports" and _valueType in ("List", "Tuple"):
                                importURLs = [elt.s for elt in _value.elts]
                        moduleInfo['classMethods'] = classMethods
                        moduleInfo["moduleURL"] = moduleURL
                        moduleInfo["status"] = 'enabled'
                        moduleInfo["fileDate"] = time.strftime('%Y-%m-%dT%H:%M:%S UTC', time.gmtime(os.path.getmtime(moduleFilename)))
                        mergedImportURLs = []
                        _moduleImportsSubtree = False
                        for _url in importURLs:
                            if _url.startswith("module_import"):
                                for moduleImport in moduleImports:
                                    mergedImportURLs.append(moduleImport + ".py")
                                if _url == "module_import_subtree":
                                    _moduleImportsSubtree = True
                            else:
                                mergedImportURLs.append(_url)
                        if parentImportsSubtree and not _moduleImportsSubtree:
                            _moduleImportsSubtree = True
                            for moduleImport in moduleImports:
                                mergedImportURLs.append(moduleImport + ".py")
                        imports = []
                        for _url in mergedImportURLs:
                            if isAbsolute(_url) or os.path.isabs(_url):
                                _importURL = _url # URL is absolute http or local file system
                            else: # check if exists relative to this module's directory
                                _importURL = os.path.join(os.path.dirname(moduleURL), _url)
                                if not os.path.exists(_importURL): # not relative to this plugin, assume standard plugin base
                                    _importURL = os.path.join(_pluginBase, _url)
                            _importModuleInfo = moduleModuleInfo(_importURL, reload, _moduleImportsSubtree)
                            if _importModuleInfo:
                                _importModuleInfo["isImported"] = True
                                imports.append(_importModuleInfo)
                        moduleInfo["imports"] =  imports
                        return moduleInfo
                elif isinstance(item, ast.ImportFrom):
                    if item.level == 1: # starts with .
                        if item.module is None:  # from . import module1, module2, ...
                            for importee in item.names:
                                if (os.path.isfile(os.path.join(moduleDir, importee.name + ".py"))
                                    and importee.name not in moduleImports):
                                    moduleImports.append(importee.name)
                        else:
                            modulePkgs = item.module.split('.')
                            modulePath = os.path.join(*modulePkgs)
                            if (os.path.isfile(os.path.join(moduleDir, modulePath) + ".py")
                                and modulePath not in moduleImports):
                                    moduleImports.append(modulePath)
                            for importee in item.names:
                                _importeePfxName = os.path.join(modulePath, importee.name)
                                if (os.path.isfile(os.path.join(moduleDir, _importeePfxName) + ".py")
                                    and _importeePfxName not in moduleImports):
                                        moduleImports.append(_importeePfxName)
            if PLUGIN_TRACE_FILE:
                with open(PLUGIN_TRACE_FILE, "at", encoding='utf-8') as fh:
                    fh.write("Successful module plug-in info: " + moduleFilename + '\n')
        except Exception as err:
            _msg = _("Exception obtaining plug-in module info: {error}\n{traceback}").format(
                    error=err, traceback=traceback.format_tb(sys.exc_info()[2]))
            if PLUGIN_TRACE_FILE:
                with open(PLUGIN_TRACE_FILE, "at", encoding='utf-8') as fh:
                    fh.write(_msg + '\n')
            else:
                print(_msg, file=sys.stderr)

        if f:
            f.close()
    return None
Beispiel #28
0
def moduleModuleInfo(moduleURL, reload=False, parentImportsSubtree=False):
    #TODO several directories, eg User Application Data
    moduleFilename = _cntlr.webCache.getfilename(moduleURL,
                                                 reload=reload,
                                                 normalize=True,
                                                 base=_pluginBase)
    if moduleFilename:
        f = None
        try:
            # if moduleFilename is a directory containing an __ini__.py file, open that instead
            if os.path.isdir(moduleFilename):
                if os.path.isfile(os.path.join(moduleFilename, "__init__.py")):
                    moduleFilename = os.path.join(moduleFilename,
                                                  "__init__.py")
                else:  # impossible to get a moduleinfo from a directory without an __init__.py
                    return None
            elif not moduleFilename.endswith(".py") and not os.path.exists(
                    moduleFilename) and os.path.exists(moduleFilename + ".py"):
                moduleFilename += ".py"  # extension module without .py suffix
            moduleDir, moduleName = os.path.split(moduleFilename)
            if PLUGIN_TRACE_FILE:
                with open(PLUGIN_TRACE_FILE, "at", encoding='utf-8') as fh:
                    fh.write("Scanning module for plug-in info: {}\n".format(
                        moduleFilename))
            f = openFileStream(_cntlr, moduleFilename)
            tree = ast.parse(f.read(), filename=moduleFilename)
            constantStrings = {}
            functionDefNames = set()
            moduleImports = []
            for item in tree.body:
                if isinstance(item, ast.Assign):
                    attr = item.targets[0].id
                    if attr == "__pluginInfo__":
                        f.close()
                        moduleInfo = {"name": None}
                        classMethods = []
                        importURLs = []
                        for i, key in enumerate(item.value.keys):
                            _key = key.s
                            _value = item.value.values[i]
                            _valueType = _value.__class__.__name__
                            if _key == "import":
                                if _valueType == 'Str':
                                    importURLs.append(_value.s)
                                elif _valueType in ("List", "Tuple"):
                                    for elt in _value.elts:
                                        importURLs.append(elt.s)
                            elif _valueType == 'Str':
                                moduleInfo[_key] = _value.s
                            elif _valueType == 'Name':
                                if _value.id in constantStrings:
                                    moduleInfo[_key] = constantStrings[
                                        _value.id]
                                elif _value.id in functionDefNames:
                                    classMethods.append(_key)
                            elif _key == "imports" and _valueType in ("List",
                                                                      "Tuple"):
                                importURLs = [elt.s for elt in _value.elts]
                        moduleInfo['classMethods'] = classMethods
                        moduleInfo["moduleURL"] = moduleURL
                        moduleInfo["status"] = 'enabled'
                        moduleInfo["fileDate"] = time.strftime(
                            '%Y-%m-%dT%H:%M:%S UTC',
                            time.gmtime(os.path.getmtime(moduleFilename)))
                        mergedImportURLs = []
                        _moduleImportsSubtree = False
                        for _url in importURLs:
                            if _url.startswith("module_import"):
                                for moduleImport in moduleImports:
                                    mergedImportURLs.append(moduleImport +
                                                            ".py")
                                if _url == "module_import_subtree":
                                    _moduleImportsSubtree = True
                            elif _url == "module_subtree":
                                for _dir in os.listdir(moduleDir):
                                    _subtreeModule = os.path.join(
                                        moduleDir, _dir)
                                    if os.path.isdir(
                                            _subtreeModule
                                    ) and _dir != "__pycache__":
                                        mergedImportURLs.append(_subtreeModule)
                            else:
                                mergedImportURLs.append(_url)
                        if parentImportsSubtree and not _moduleImportsSubtree:
                            _moduleImportsSubtree = True
                            for moduleImport in moduleImports:
                                mergedImportURLs.append(moduleImport + ".py")
                        imports = []
                        for _url in mergedImportURLs:
                            if isAbsolute(_url) or os.path.isabs(_url):
                                _importURL = _url  # URL is absolute http or local file system
                            else:  # check if exists relative to this module's directory
                                _importURL = os.path.join(
                                    os.path.dirname(moduleURL),
                                    os.path.normpath(_url))
                                if not os.path.exists(
                                        _importURL
                                ):  # not relative to this plugin, assume standard plugin base
                                    _importURL = _url  # moduleModuleInfo adjusts relative URL to plugin base
                            _importModuleInfo = moduleModuleInfo(
                                _importURL, reload, _moduleImportsSubtree)
                            if _importModuleInfo:
                                _importModuleInfo["isImported"] = True
                                imports.append(_importModuleInfo)
                        moduleInfo["imports"] = imports
                        return moduleInfo
                    elif isinstance(
                            item.value, ast.Str
                    ):  # possible constant used in plugininfo, such as VERSION
                        for assignmentName in item.targets:
                            constantStrings[assignmentName.id] = item.value.s
                elif isinstance(item, ast.ImportFrom):
                    if item.level == 1:  # starts with .
                        if item.module is None:  # from . import module1, module2, ...
                            for importee in item.names:
                                if importee.name == '*':  #import all submodules
                                    for _file in os.listdir(moduleDir):
                                        if _file != moduleFile and os.path.isfile(
                                                _file) and _file.endswith(
                                                    ".py"):
                                            moduleImports.append(_file)
                                elif (os.path.isfile(
                                        os.path.join(moduleDir,
                                                     importee.name + ".py"))
                                      and importee.name not in moduleImports):
                                    moduleImports.append(importee.name)
                        else:
                            modulePkgs = item.module.split('.')
                            modulePath = os.path.join(*modulePkgs)
                            if (os.path.isfile(
                                    os.path.join(moduleDir, modulePath) +
                                    ".py")
                                    and modulePath not in moduleImports):
                                moduleImports.append(modulePath)
                            for importee in item.names:
                                _importeePfxName = os.path.join(
                                    modulePath, importee.name)
                                if (os.path.isfile(
                                        os.path.join(moduleDir,
                                                     _importeePfxName) + ".py")
                                        and _importeePfxName
                                        not in moduleImports):
                                    moduleImports.append(_importeePfxName)
                elif isinstance(item, ast.FunctionDef
                                ):  # possible functionDef used in plugininfo
                    functionDefNames.add(item.name)
            if PLUGIN_TRACE_FILE:
                with open(PLUGIN_TRACE_FILE, "at", encoding='utf-8') as fh:
                    fh.write("Successful module plug-in info: " +
                             moduleFilename + '\n')
        except Exception as err:
            _msg = _(
                "Exception obtaining plug-in module info: {moduleFilename}\n{error}\n{traceback}"
            ).format(error=err,
                     moduleFilename=moduleFilename,
                     traceback=traceback.format_tb(sys.exc_info()[2]))
            if PLUGIN_TRACE_FILE:
                with open(PLUGIN_TRACE_FILE, "at", encoding='utf-8') as fh:
                    fh.write(_msg + '\n')
            else:
                print(_msg, file=sys.stderr)

        if f:
            f.close()
    return None
Beispiel #29
0
def loadUgtRelQnames(modelXbrl, dqcRules):
    if not dqcRules:
        return {}  # not a us-gaap filing
    abbrNs = ""
    for modelDocument in modelXbrl.urlDocs.values():
        abbrNs = abbreviatedNamespace(modelDocument.targetNamespace)
        if abbrNs and abbrNs.startswith("us-gaap/"):
            break
    if not abbrNs:  # no gaap/ifrs taxonomy for this filing
        return {}
    _ugtRelsFileName = resourcesFilePath(
        modelXbrl.modelManager,
        "us-gaap-rels-{}.json".format(abbrNs.rpartition("/")[2]))
    if not os.path.exists(_ugtRelsFileName):
        buildUgtFullRelsFiles(modelXbrl, dqcRules)
    if not os.path.exists(_ugtRelsFileName):
        return {}
    _file = openFileStream(modelXbrl.modelManager.cntlr,
                           _ugtRelsFileName,
                           'rt',
                           encoding='utf-8')
    ugtRels = json.load(_file)  # {localName: date, ...}
    _file.close()

    def qn(nsPrefix, localName):
        return qname(nsPrefix + ":" + localName, modelXbrl.prefixedNamespaces)

    ugtCalcsByQnames = defaultdict(
        dict
    )  # store as concept indices to avoid using memory for repetitive strings
    for wgt, fromNSes in ugtRels["calcs"].items():
        calcWgtObj = ugtCalcsByQnames.setdefault(
            float(wgt), {})  # json weight object needs to be float
        for fromNs, fromObjs in fromNSes.items():
            for fromName, toNSes in fromObjs.items():
                fromConcept = modelXbrl.qnameConcepts.get(qn(fromNs, fromName))
                if fromConcept is not None:
                    calcFromObj = calcWgtObj.setdefault(
                        fromConcept.qname, set())
                    for toNs, toNames in toNSes.items():
                        for toName in toNames:
                            toConcept = modelXbrl.qnameConcepts.get(
                                qn(toNs, toName))
                            if toConcept is not None:
                                calcFromObj.add(toConcept.qname)
    ugtAxesByQnames = defaultdict(
        set
    )  # store as concept indices to avoid using memory for repetitive strings
    for axisName, memNames in ugtRels["axes"].items():
        for axisConcept in modelXbrl.nameConcepts.get(axisName, ()):
            axisObj = ugtAxesByQnames[axisConcept.name]
            for memName in memNames:
                for memConcept in modelXbrl.nameConcepts.get(memName, ()):
                    axisObj.add(memConcept.qname)
    ugt = {"calcs": ugtCalcsByQnames, "axes": ugtAxesByQnames}
    # dqc0015
    if "DQC.US.0015" in ugtRels:
        dqc0015 = ugtRels["DQC.US.0015"]
        concepts = set()
        excludedMembers = set()
        excludedMemberStrings = set()
        excludedAxesMembers = defaultdict(set)
        conceptRuleIDs = {}
        for exName, exSet, isQName in (("conceptNames", concepts, True),
                                       ("excludedMemberNames", excludedMembers,
                                        True), ("excludedMemberStrings",
                                                excludedMemberStrings, False)):
            for ns, names in dqc0015[exName].items():
                for localName in names:
                    exSet.add(qn(ns, localName) if isQName else localName)
        for localDimNs, localDimMems in dqc0015["excludedAxesMembers"].items():
            for localDimName, localMemObjs in localDimMems.items():
                for localMemNs, localMemNames in localMemObjs.items():
                    if localMemNs == "*":
                        excludedAxesMembers[qn(localDimNs,
                                               localDimName)].add("*")
                    else:
                        for localMemName in localMemNames:
                            excludedAxesMembers[qn(
                                localDimNs, localDimName)].add(
                                    qn(localMemNs, localMemName
                                       ) if localMemName != "*" else "*")
        #if abbrNs < "us-gaap/2021": # no rel ids in us-gaap/2020
        #    _ugtRelsFileName = resourcesFilePath(modelXbrl.modelManager, "us-gaap-rels-2021.json")
        #    _file = openFileStream(modelXbrl.modelManager.cntlr, _ugtRelsFileName, 'rt', encoding='utf-8')
        #    ugtRels = json.load(_file) # {localName: date, ...}
        #    _file.close()
        for conceptNs, conceptNameIDs in ugtRels["DQC.US.0015"][
                "conceptRuleIDs"].items():
            for conceptName, conceptID in conceptNameIDs.items():
                conceptRuleIDs[qn(conceptNs, conceptName)] = conceptID
        ugt["DQC.US.0015"] = attrdict(
            concepts=concepts,
            excludedAxesMembers=excludedAxesMembers,
            excludedMembers=excludedMembers,
            excludedMemberNamesPattern=re.compile(
                "|".join(excludedMemberStrings), re.IGNORECASE)
            if excludedMemberStrings else None,
            conceptRuleIDs=conceptRuleIDs)
    return ugt