def test_file_reader_writer(self, io_data, ifp_key, ofp_key, enforceAscii): io = IoAdapter(raiseExceptions=True, useCharRefs=enforceAscii) containerList = io.readFile(str(io_data[ifp_key])) print ("Read %d data blocks" % len(containerList)) ok = io.writeFile(str(io_data[ofp_key]), containerList=containerList, enforceAscii=enforceAscii) assert ok
def testSerialize(self): try: for storeStringsAsBytes in [True, False]: tcL = [] ioPy = IoAdapter() containerList = ioPy.readFile(self.__pathTextCif) for container in containerList: cName = container.getName() tc = DataContainer(cName) for catName in container.getObjNameList(): dObj = container.getObj(catName) tObj = DataCategoryTyped(dObj, dictionaryApi=self.__dApi, copyInputData=True) tc.append(tObj) tcL.append(tc) # bcw = BinaryCifWriter(self.__dApi, storeStringsAsBytes=storeStringsAsBytes, applyTypes=False, useFloat64=True) bcw.serialize(self.__testBcifOutput, tcL) self.assertEqual(containerList[0], containerList[0]) self.assertEqual(tcL[0], tcL[0]) bcr = BinaryCifReader(storeStringsAsBytes=storeStringsAsBytes) cL = bcr.deserialize(self.__testBcifOutput) # ioPy = IoAdapter() ok = ioPy.writeFile(self.__testBcifTranslated, cL) self.assertTrue(ok) self.assertTrue(self.__same(tcL[0], cL[0])) except Exception as e: logger.exception("Failing with %s", str(e)) self.fail()
def testReadWriteStarFile(self): """Test case - star file read and write ...""" try: for fp in self.__pathStarFileList: myIo = IoAdapter(self.__verbose, self.__lfh) containerList = myIo.readFile(inputFilePath=fp) # # containerList is a flat list of containers in the order parsed. # # Create an index from the linear list data_ save_ sections and names -- # # There can multiple data blocks where each data section is followed # by save frames -- Names can be repeated and the application must # create an appropriate index of the data and save sections according # it own requirements. # # iD = {} iDN = {} dL = [] for container in containerList: if container.getType() == "data": dL.append(container) if container.getName() not in iD: curContainerName = container.getName() iD[curContainerName] = [] iDN[curContainerName] = [] else: logger.debug("Duplicate data block %s", container.getName()) else: iD[curContainerName].append(container) iDN[curContainerName].append(container.getName()) # # get the reference data out of the 2nd data block -- # if len(dL) > 1: c1 = dL[1] if "chemical_shift_reference_1" in iDN[c1.getName()]: idx = iDN[c1.getName()].index( "chemical_shift_reference_1") sf0 = iD[c1.getName()][idx] catObj = sf0.getObj("Chem_shift_ref") aL = catObj.getAttributeList() rowL = catObj.getRowList() logger.debug("Attribute list %s", aL) rowL = catObj.getRowList() for ii, row in enumerate(rowL): logger.debug(" %4d %r", ii, row) _, fnOut = os.path.split(fp) ofn = os.path.join(HERE, "test-output", fnOut + ".out") ok = myIo.writeFile(outputFilePath=ofn, containerList=containerList, useStopTokens=True) self.assertEqual(ok, True) except Exception as e: logger.exception("Failing with %s", str(e)) self.fail()
def main(): parser = argparse.ArgumentParser() # # parser.add_argument("--op", default=None, required=True, help="Operation (build | get_version)") parser.add_argument("--input_dict_path", required=True, default=None, help="Path to dictionary generator file") parser.add_argument("--output_dict_path", default=None, help="Path to output dictionary text file") parser.add_argument( "--cleanup", default=False, action="store_true", help="Remove include instruction categories after processing") args = parser.parse_args() # try: op = args.op.lower() if args.op else None inputDictPath = args.input_dict_path outputDictPath = args.output_dict_path cleanup = args.cleanup except Exception as e: logger.exception("Argument processing problem %s", str(e)) parser.print_help(sys.stderr) exit(1) ## if op == "build" and inputDictPath and outputDictPath: dirPath = os.path.abspath(os.path.dirname(inputDictPath)) logger.info("Starting dictionary path %s", dirPath) myIo = IoAdapter(raiseExceptions=True) containerList = myIo.readFile(inputFilePath=inputDictPath) logger.info("Starting dictionary container list length (%d)", len(containerList)) dIncl = DictionaryInclude(dirPath=dirPath) inclL = dIncl.processIncludedContent(containerList, cleanup=cleanup) logger.info("Processed dictionary container length (%d)", len(inclL)) ok = myIo.writeFile(outputFilePath=outputDictPath, containerList=inclL) logger.info("Operation completed with status %r", ok) elif op == "get_version" and inputDictPath: logger.setLevel(logging.ERROR) myIo = IoAdapter(raiseExceptions=True) containerList = myIo.readFile(inputFilePath=inputDictPath) dIncl = DictionaryInclude() inclL = dIncl.processIncludedContent(containerList) baseContainer = inclL[0] if baseContainer.exists("dictionary"): cObj = baseContainer.getObj("dictionary") version = cObj.getValueOrDefault("version", 0, None) print(version)
def testDeserializeIhm(self): try: bcr = BinaryCifReader(storeStringsAsBytes=True) cL1 = bcr.deserialize(self.__pathIhmBcifGzip) ioPy = IoAdapter() ok = ioPy.writeFile(self.__pathIhmBcifTranslated, cL1) self.assertTrue(ok) except Exception as e: logger.exception("Failing with %s", str(e)) self.fail()
def testDeserializeRemoteMolStar(self): try: bcr = BinaryCifReader(storeStringsAsBytes=False) cL0 = bcr.deserialize(self.__locatorRcsbBcifGzip) ioPy = IoAdapter() ok = ioPy.writeFile(self.__pathRcsbBcifTranslated, cL0) self.assertTrue(ok) except Exception as e: logger.exception("Failing with %s", str(e)) self.fail()
def test_file_reader_writer_select(self, io_data, ifp_key, ofp_key, selectList, excludeFlag): io = IoAdapter(raiseExceptions=False, useCharRefs=True) containerList = io.readFile(str(io_data[ifp_key]), enforceAscii=True, selectList=selectList, excludeFlag=excludeFlag, outDirPath=str(io_data['pathOutputDir'])) print ("Read %d data blocks" % len(containerList)) ok = io.writeFile(str(io_data[ofp_key]), containerList=containerList, enforceAscii=True) assert ok
def testReadWriteDictionary(self): """Test case - read and dump logical structure of dictionary""" try: myIo = IoAdapterPy(self.__verbose, self.__lfh) containerList = myIo.readFile(inputFilePath=self.__pathPdbxDictionary) ok = myIo.writeFile(outputFilePath=os.path.join(HERE, "test-output", "test-dict-out.dic"), containerList=containerList) self.assertTrue(ok) except Exception as e: logger.exception("Failing with %s", str(e)) self.fail()
def test_read_star_file(self, in_tmpdir, star_files_list): for fp in star_files_list: myIo = IoAdapter() containerList = myIo.readFile(inputFilePath=str(fp)) print("container list is %r\n" % ([(c.getName(), c.getType()) for c in containerList])) for c in containerList: c.setType('data') ofn = Path(fp.stem + '.cif') ok = myIo.writeFile(outputFilePath=str(ofn), containerList=containerList[1:]) assert ok
def __testFileReaderWriter(self, ifp, ofp, **kwargs): """Test case - read and then write PDBx file or dictionary""" try: enforceAscii = kwargs.get("enforceAscii", True) useCharRefs = True if enforceAscii else False io = IoAdapter(raiseExceptions=True, useCharRefs=useCharRefs) containerList = io.readFile(ifp) logger.debug("Read %d data blocks", len(containerList)) ok = io.writeFile(ofp, containerList=containerList, **kwargs) self.assertTrue(ok) except Exception as e: logger.exception("Failing with %s", str(e)) self.fail()
def test_read_write_star_file(self, in_tmpdir, star_files_list): for fp in star_files_list: myIo = IoAdapter() containerList = myIo.readFile(inputFilePath=str(fp)) # # containerList is a flat list of containers in the order parsed. # # Create an index from the linear list data_ save_ sections and names -- # # There can multiple data blocks where each data section is followed # by save frames -- Names can be repeated and the application must # create an appropriate index of the data and save sections according # it own requirements. # # iD = {} iDN = {} dL = [] for container in containerList: if container.getType() == "data": dL.append(container) if container.getName() not in iD: curContainerName = container.getName() iD[curContainerName] = [] iDN[curContainerName] = [] else: print("Duplicate data block %s\n" % container.getName()) else: iD[curContainerName].append(container) iDN[curContainerName].append(container.getName()) # # get the reference data out of the 2nd data block -- # if len(dL) > 1: c1 = dL[1] if 'chemical_shift_reference_1' in iDN[c1.getName()]: idx = iDN[c1.getName()].index('chemical_shift_reference_1') sf0 = iD[c1.getName()][idx] catObj = sf0.getObj('Chem_shift_ref') aL = catObj.getAttributeList() rowL = catObj.getRowList() print("Attribute list %s\n" % aL) rowL = catObj.getRowList() for ii, row in enumerate(rowL): print(" %4d %r\n" % (ii, row)) ofn = Path(fp.stem + '.out') ok = myIo.writeFile(outputFilePath=str(ofn), containerList=containerList, useStopTokens=True) assert ok
def test_invoke_dictionary_methods(self, test_paths): myIo = IoAdapter() dictContainerList = myIo.readFile(inputFilePath=str(test_paths['pathPdbxDictFile'])) mU = MethodUtils(dictContainerList=dictContainerList) dataContainerList = myIo.readFile(inputFilePath=str(test_paths['pathPdbxDataFile'])) mU.setDataContainerList(dataContainerList=dataContainerList) mU.invokeMethods() print("Write data file after invoking methods") dataContainerList = mU.getDataContainerList() ok = myIo.writeFile(outputFilePath=str(test_paths['pathOutFile']), containerList=dataContainerList) assert ok
def __serializeMmCifDict(self, filePath, containerList, **kwargs): """ """ try: ret = False # workPath = kwargs.get('workPath', None) enforceAscii = kwargs.get("enforceAscii", True) raiseExceptions = kwargs.get("raiseExceptions", True) useCharRefs = kwargs.get("useCharRefs", True) # myIo = IoAdapterPy(raiseExceptions=raiseExceptions, useCharRefs=useCharRefs) ret = myIo.writeFile(filePath, containerList=containerList, enforceAscii=enforceAscii) except Exception as e: logger.error("Failing for %s with %s", filePath, str(e)) return ret
def testReadStarFile(self): """Test case - read star file - """ try: for fp in self.__pathStarFileList: myIo = IoAdapter(self.__verbose, self.__lfh) containerList = myIo.readFile(inputFilePath=fp) logger.debug("container list is %r", [(cV.getName(), cV.getType()) for cV in containerList]) for cV in containerList: cV.setType("data") _, fnOut = os.path.split(fp) ofn = os.path.join(HERE, "test-output", fnOut + ".cif") ok = myIo.writeFile(outputFilePath=ofn, containerList=containerList[1:]) self.assertEqual(ok, True) except Exception as e: logger.exception("Failing with %s", str(e)) self.fail()
def testInvokeDictionaryMethods(self): """Test case - invoke dictionary methods -""" try: myIo = IoAdapter(self.__verbose, self.__lfh) dictContainerList = myIo.readFile(inputFilePath=self.__pathPdbxDictFile) dataContainerList = myIo.readFile(inputFilePath=self.__pathPdbxDataFile) # mU = MethodUtils(dictContainerList=dictContainerList, verbose=self.__verbose) mU.setDataContainerList(dataContainerList=dataContainerList) mU.invokeMethods() logger.debug("Write data file after invoking methods") dataContainerList = mU.getDataContainerList() ok = myIo.writeFile(outputFilePath=self.__pathOutFile, containerList=dataContainerList) # self.assertEqual(ok, True) except Exception as e: logger.exception("Failing with %s", str(e)) self.fail()
def testEmbeddedInclude(self): """Test case - embedded include""" try: myIo = IoAdapter(raiseExceptions=True) containerList = myIo.readFile( inputFilePath=self.__pathEmbeddedIncludeDictionary) logger.info("Starting container list length (%d)", len(containerList)) dIncl = DictionaryInclude( dirPath=os.path.dirname(self.__pathEmbeddedIncludeDictionary)) inclL = dIncl.processIncludedContent(containerList) logger.info("Processed included container length (%d)", len(inclL)) self.assertEqual(len(inclL), 4) ok = myIo.writeFile(outputFilePath=os.path.join( HERE, "test-output", "test_ext_generated.dic"), containerList=inclL) self.assertTrue(ok) except Exception as e: logger.exception("Failing with %s", str(e)) self.fail()
def __testFileReaderWriterSelect(self, ifp, ofp, selectList=None, excludeFlag=False): """Test case - read and then write PDBx file with selection.""" try: io = IoAdapter(raiseExceptions=False, useCharRefs=True) containerList = io.readFile(ifp, enforceAscii=True, selectList=selectList, excludeFlag=excludeFlag, outDirPath=self.__pathOutputDir) logger.debug("Read %d data blocks", len(containerList)) ok = io.writeFile(ofp, containerList=containerList, enforceAscii=True) self.assertTrue(ok) except Exception as e: logger.exception("Failing input %s and output %s with %s", ifp, ofp, str(e)) self.fail()
def testDDLInclude(self): """Test case - DDL composition/include tests""" try: myIo = IoAdapter(raiseExceptions=True) containerList = myIo.readFile( inputFilePath=self.__pathDdlIncludeDictionary) logger.info("Starting container list length (%d)", len(containerList)) dIncl = DictionaryInclude() inclL = dIncl.processIncludedContent(containerList) logger.info("Processed included container length (%d)", len(inclL)) ok = myIo.writeFile(outputFilePath=os.path.join( HERE, "test-output", "mmcif_ddl_generated.dic"), containerList=inclL) self.assertTrue(ok) # myIo = IoAdapter(raiseExceptions=True) crefL = myIo.readFile(inputFilePath=self.__pathDdlDictionary) logger.info("Reference object count (%d)", len(crefL)) self.assertGreaterEqual(len(crefL), 257) # cD = {incl.getName(): True for incl in inclL} for cref in crefL: if cref.getName() not in cD: logger.debug("In reference missing in included file %r", cref.getName()) # cD = {cref.getName(): True for cref in crefL} for incl in inclL: if incl.getName() not in cD: logger.debug("Included but missing in reference %r", incl.getName()) # self.assertGreaterEqual(len(inclL), 258) except Exception as e: logger.exception("Failing with %s", str(e)) self.fail()
def testGenDDLm(self): """Generating alternative DDLm metadata format. (starting point)""" try: myIo = IoAdapterPy(self.__verbose, self.__lfh) self.__containerList = myIo.readFile( inputFilePath=self.__pathPdbxDictionary) dApi = DictionaryApi(containerList=self.__containerList, consolidate=True, verbose=self.__verbose) parentD = dApi.getParentDictionary() # oCList = [] dDef = DataContainer("mmcif_pdbx_ddlm_auto") dc = DataCategory("dictionary") dc.appendAttribute("title") dc.appendAttribute("class") dc.appendAttribute("version") dc.appendAttribute("date") dc.appendAttribute("ddl_conformance") dc.appendAttribute("text") dc.append([ "mmcif_pdbx_ddlm_auto", "Instance", "latest", "2018-03-09", "ddlm best effort", "Software converted PDBx dictionary using DDLm semantics" ]) dDef.append(dc) oCList.append(dDef) catIdx = dApi.getCategoryIndex() for catName in sorted(catIdx.keys()): attNameList = catIdx[catName] # created definition container - cDef = DefinitionContainer(catName) oCList.append(cDef) # dc = DataCategory("definition") dc.appendAttribute("id") dc.appendAttribute("scope") dc.appendAttribute("class") dc.appendAttribute("update") dc.append([catName, "Category", "Loop", "2018-03-09"]) cDef.append(dc) val = dApi.getCategoryDescription(category=catName) dc = DataCategory("description") dc.appendAttribute("text") dc.append([val]) cDef.append(dc) # dc = DataCategory("name") dc.appendAttribute("category_id") dc.appendAttribute("object_id") valList = dApi.getCategoryGroupList(category=catName) pcg = catName for val in valList: if val != "inclusive_group": pcg = val break dc.append([catName, pcg]) cDef.append(dc) valList = dApi.getCategoryKeyList(category=catName) if not valList: self.__lfh.write("Missing caegory key for category %s\n" % catName) else: dc = DataCategory("category") dc.appendAttribute("key_id") kItemName = CifName.itemName(catName, "synthetic_key") dc.append([kItemName]) cDef.append(dc) iDef = DefinitionContainer(kItemName) self.__makeKeyItem(catName, "synthetic_key", valList, iDef) oCList.append(iDef) for attName in attNameList: itemName = CifName.itemName(catName, attName) iDef = DefinitionContainer(itemName) oCList.append(iDef) # dc = DataCategory("definition") dc.appendAttribute("id") dc.appendAttribute("scope") dc.appendAttribute("class") dc.appendAttribute("update") dc.append([itemName, "Item", "Single", "2013-08-22"]) iDef.append(dc) # val = dApi.getDescription(category=catName, attribute=attName) dc = DataCategory("description") dc.appendAttribute("text") dc.append([val]) iDef.append(dc) # dc = DataCategory("name") dc.appendAttribute("category_id") dc.appendAttribute("object_id") # if itemName in parentD: dc.appendAttribute("linked_item_id") dc.append([catName, attName, parentD[itemName][0]]) else: dc.append([catName, attName]) iDef.append(dc) # # aliasList = dApi.getItemAliasList(category=catName, attribute=attName) if aliasList: dc = DataCategory("alias") dc.appendAttribute("definition_id") for alias in aliasList: dc.append([alias[0]]) iDef.append(dc) enList = dApi.getEnumListAltWithDetail(category=catName, attribute=attName) tC = dApi.getTypeCode(category=catName, attribute=attName) tcontainer = "Single" purpose = "Describe" source = "Recorded" contents = "Text" # if tC is None: self.__lfh.write("Missing data type attribute %s\n" % attName) elif tC in [ "code", "atcode", "name", "idname", "symop", "fax", "phone", "email", "code30", "ec-type" ]: purpose = "Encode" contents = "Text" source = "Assigned" elif tC in ["ucode"]: purpose = "Encode" contents = "Code" source = "Assigned" elif tC in ["line", "uline", "text"]: purpose = "Describe" source = "Recorded" contents = "Text" elif tC in ["int"]: purpose = "Number" source = "Recorded" contents = "Integer" elif tC in ["int-range"]: purpose = "Number" source = "Recorded" contents = "Range" elif tC in ["float"]: purpose = "Measurand" source = "Recorded" contents = "Real" elif tC in ["float-range"]: purpose = "Measurand" source = "Recorded" contents = "Range" elif tC.startswith("yyyy"): source = "Assigned" contents = "Date" purpose = "Describe" if enList: purpose = "State" dc = DataCategory("type") dc.appendAttribute("purpose") dc.appendAttribute("source") dc.appendAttribute("contents") dc.appendAttribute("container") dc.append([purpose, source, contents, tcontainer]) iDef.append(dc) # if enList: dc = DataCategory("enumeration_set") dc.appendAttribute("state") dc.appendAttribute("detail") for en in enList: dc.append([en[0], en[1]]) iDef.append(dc) dfv = dApi.getDefaultValue(category=catName, attribute=attName) bvList = dApi.getBoundaryList(category=catName, attribute=attName) if ((dfv is not None) and (dfv not in ["?", "."])) or bvList: row = [] dc = DataCategory("enumeration") if dfv is not None: dc.appendAttribute("default") row.append(dfv) if bvList: dc.appendAttribute("range") mminVp = -1000000 mmaxVp = 10000000 mminV = mmaxVp mmaxV = mminVp for bv in bvList: minV = float(bv[0]) if bv[0] != "." else mminVp maxV = float(bv[1]) if bv[1] != "." else mmaxVp mminV = min(mminV, minV) mmaxV = max(mmaxV, maxV) if mminV == mminVp: mminV = "" if mmaxV == mmaxVp: mmaxV = "" row.append(str(mminV) + ":" + str(mmaxV)) dc.append(row) iDef.append(dc) myIo.writeFile(outputFilePath=os.path.join( HERE, "test-output", "mmcif_pdbx_ddlm_auto.dic"), containerList=oCList) except Exception as e: logger.exception("Failing with %s", str(e)) self.fail()
def test_gen_ddlm(self, in_tmpdir, test_files): myIo = IoAdapterPy() containerList = myIo.readFile( inputFilePath=str(test_files / 'mmcif_pdbx_v5_next.dic')) dApi = DictionaryApi(containerList=containerList, consolidate=True) parentD = dApi.getParentDictionary() # oCList = [] dDef = DataContainer('mmcif_pdbx_ddlm_auto') dc = DataCategory("dictionary") dc.appendAttribute("title") dc.appendAttribute("class") dc.appendAttribute("version") dc.appendAttribute("date") dc.appendAttribute("ddl_conformance") dc.appendAttribute("text") dc.append([ 'mmcif_pdbx_ddlm_auto', 'Instance', 'latest', '2018-03-09', 'ddlm best effort', 'Software converted PDBx dictionary using DDLm semantics' ]) dDef.append(dc) oCList.append(dDef) catIdx = dApi.getCategoryIndex() for catName in sorted(catIdx.keys()): attNameList = catIdx[catName] # created definition container - cDef = DefinitionContainer(catName) oCList.append(cDef) # dc = DataCategory("definition") dc.appendAttribute("id") dc.appendAttribute("scope") dc.appendAttribute("class") dc.appendAttribute("update") dc.append([catName, "Category", "Loop", "2018-03-09"]) cDef.append(dc) val = dApi.getCategoryDescription(category=catName) dc = DataCategory("description") dc.appendAttribute("text") dc.append([val]) cDef.append(dc) # dc = DataCategory("name") dc.appendAttribute("category_id") dc.appendAttribute("object_id") valList = dApi.getCategoryGroupList(category=catName) pcg = catName for val in valList: if val != 'inclusive_group': pcg = val break dc.append([catName, pcg]) cDef.append(dc) valList = dApi.getCategoryKeyList(category=catName) if len(valList) < 1: print("Missing caegory key for category %s\n" % catName) else: dc = DataCategory("category") dc.appendAttribute("key_id") kItemName = CifName.itemName(catName, "synthetic_key") dc.append([kItemName]) cDef.append(dc) iDef = DefinitionContainer(kItemName) self._makeKeyItem(catName, "synthetic_key", valList, iDef) oCList.append(iDef) for attName in attNameList: itemName = CifName.itemName(catName, attName) iDef = DefinitionContainer(itemName) oCList.append(iDef) # dc = DataCategory("definition") dc.appendAttribute("id") dc.appendAttribute("scope") dc.appendAttribute("class") dc.appendAttribute("update") dc.append([itemName, "Item", "Single", "2013-08-22"]) iDef.append(dc) # val = dApi.getDescription(category=catName, attribute=attName) dc = DataCategory("description") dc.appendAttribute("text") dc.append([val]) iDef.append(dc) # dc = DataCategory("name") dc.appendAttribute("category_id") dc.appendAttribute("object_id") # if itemName in parentD: dc.appendAttribute("linked_item_id") dc.append([catName, attName, parentD[itemName][0]]) else: dc.append([catName, attName]) iDef.append(dc) # # aliasList = dApi.getItemAliasList(category=catName, attribute=attName) if len(aliasList) > 0: dc = DataCategory("alias") dc.appendAttribute("definition_id") for alias in aliasList: dc.append([alias[0]]) iDef.append(dc) enList = dApi.getEnumListAltWithDetail(category=catName, attribute=attName) tC = dApi.getTypeCode(category=catName, attribute=attName) tcontainer = 'Single' purpose = 'Describe' source = 'Recorded' contents = 'Text' # if tC is None: self.__lfh.write("Missing data type attribute %s\n" % attName) elif tC in [ 'code', 'atcode', 'name', 'idname', 'symop', 'fax', 'phone', 'email', 'code30', 'ec-type' ]: purpose = 'Encode' contents = 'Text' source = 'Assigned' elif tC in ['ucode']: purpose = 'Encode' contents = 'Code' source = 'Assigned' elif tC in ['line', 'uline', 'text']: purpose = 'Describe' source = 'Recorded' contents = 'Text' elif tC in ['int']: purpose = 'Number' source = 'Recorded' contents = 'Integer' elif tC in ['int-range']: purpose = 'Number' source = 'Recorded' contents = 'Range' elif tC in ['float']: purpose = 'Measurand' source = 'Recorded' contents = 'Real' elif tC in ['float-range']: purpose = 'Measurand' source = 'Recorded' contents = 'Range' elif tC.startswith('yyyy'): source = 'Assigned' contents = 'Date' purpose = 'Describe' if len(enList) > 0: purpose = 'State' dc = DataCategory("type") dc.appendAttribute("purpose") dc.appendAttribute("source") dc.appendAttribute("contents") dc.appendAttribute("container") dc.append([purpose, source, contents, tcontainer]) iDef.append(dc) # if (len(enList) > 0): dc = DataCategory("enumeration_set") dc.appendAttribute("state") dc.appendAttribute("detail") for en in enList: dc.append([en[0], en[1]]) iDef.append(dc) dfv = dApi.getDefaultValue(category=catName, attribute=attName) bvList = dApi.getBoundaryList(category=catName, attribute=attName) if (((dfv is not None) and (dfv not in ['?', '.'])) or len(bvList) > 0): row = [] dc = DataCategory("enumeration") if dfv is not None: dc.appendAttribute("default") row.append(dfv) if len(bvList) > 0: dc.appendAttribute("range") mminVp = -1000000 mmaxVp = 10000000 mminV = mmaxVp mmaxV = mminVp for bv in bvList: minV = float(bv[0]) if bv[0] != '.' else mminVp maxV = float(bv[1]) if bv[1] != '.' else mmaxVp mminV = min(mminV, minV) mmaxV = max(mmaxV, maxV) if mminV == mminVp: mminV = '' if mmaxV == mmaxVp: mmaxV = '' row.append(str(mminV) + ":" + str(mmaxV)) dc.append(row) iDef.append(dc) myIo.writeFile(outputFilePath="mmcif_pdbx_ddlm_auto.dic", containerList=oCList)