Example #1
0
def convertJson(inputJsonFile, date, MRepositDir, patchRepositDir,
                generateHTML, generatePDF, outDir=None, pdfOutDir=None,
                local=False):
    if not generateHTML and not generatePDF:
        raise Exception("Nothing to generate!")

    global DOX_URL
    global VIVIAN_URL
    DOX_URL = getDOXURL(local)
    VIVIAN_URL = getViViaNURL(local)

    if generateHTML:
        if not outDir:
            raise Exception("Must specify Output directory")
        if not os.path.exists(outDir):
            # Will also create intermediate directories if needed
            os.makedirs(outDir)

    if generatePDF:
        if not pdfOutDir:
            raise Exception("Must specify PDF Output directory")
        # Will also create intermediate directories if needed
        if not os.path.exists(pdfOutDir):
            os.makedirs(pdfOutDir)

    crossRef = parseCrossReferenceGeneratorArgs(MRepositDir,
                                                patchRepositDir)
    global RPC_NAME_TO_IEN_MAPPING
    RPC_NAME_TO_IEN_MAPPING = generateSingleFileFieldToIenMappingBySchema(MRepositDir,
                                                                          crossRef,
                                                                          RPC_FILE_NO,
                                                                          RPC_NAME_FIELD_NO)


    with open(inputJsonFile, 'r') as inputFile:
        pkgJson = {} # group by package
        allpkgJson = []
        inputJson = json.load(inputFile)
        for icrEntry in inputJson:
            if 'NUMBER' not in icrEntry:
                logger.error("Could not parse entry: " + str(icrEntry))
                continue
            if generatePDF:
                _generateICRIndividualPagePDF(icrEntry, date, pdfOutDir)
            if generateHTML:
                _generateICRIndividualPage(icrEntry, date, outDir, crossRef)
                summaryInfo = _convertICREntryToSummaryInfo(icrEntry, crossRef)
                allpkgJson.append(summaryInfo)
                if 'CUSTODIAL PACKAGE' in icrEntry:
                    pkgJson.setdefault(icrEntry['CUSTODIAL PACKAGE'],[]).append(summaryInfo)
        if generateHTML:
            _generateICRSummaryPageImpl(allpkgJson, 'ICR List', 'All', date,
                                        outDir, isForAll=True)
            for pkgName, outJson in pkgJson.iteritems():
                _generateICRSummaryPageImpl(outJson, 'ICR List', pkgName, date,
                                            outDir)
            logger.warn('Total # entry in PACKAGE_MAP is [%s]', len(PACKAGE_MAP))
            logger.warn('Total # entry in pkgJson is [%s]', len(pkgJson))
            _generatePkgDepSummaryPage(inputJson, date, outDir, crossRef)
def run(args):
    from InitCrossReferenceGenerator import parseCrossRefGeneratorWithArgs
    from FileManDataToHtml import FileManDataToHtml

    crossRef = parseCrossRefGeneratorWithArgs(args)
    _doxURL = getDOXURL(args.local)
    _vivianURL = getViViaNURL(False)
    glbDataParser = FileManGlobalDataParser(args.MRepositDir, crossRef)
    assert '0' in glbDataParser.allFiles and '1' in glbDataParser.allFiles and set(
        args.fileNos).issubset(glbDataParser.allFiles)

    # Populate glbDataParser.globalLocationMap
    glbDataParser.parseZWRGlobalFileBySchemaV2(
        glbDataParser.allFiles['1']['path'], '1', '^DIC(')
    for fileNo in args.fileNos:
        assert fileNo in glbDataParser.globalLocationMap
    del glbDataParser.outFileManData['1']

    glbDataParser.outdir = args.outDir

    glbDataParser.patchDir = args.patchRepositDir
    htmlGen = FileManDataToHtml(crossRef, args.outDir, _doxURL, _vivianURL)
    isolatedFiles = glbDataParser.schemaParser.isolatedFiles
    if not args.all or set(args.fileNos).issubset(isolatedFiles):
        for fileNo in args.fileNos:
            gdFile = glbDataParser.allFiles[fileNo]['path']
            logger.info("Parsing file: %s at %s" % (fileNo, gdFile))
            glbDataParser.parseZWRGlobalFileBySchemaV2(gdFile, fileNo)

            htmlGen.outputFileManDataAsHtml(glbDataParser)

            del glbDataParser.outFileManData[fileNo]
    else:
        # Generate all required files
        sccSet = glbDataParser.schemaParser.sccSet
        fileSet = set(args.fileNos)
        for idx, value in enumerate(sccSet):
            fileSet.difference_update(value)
            if not fileSet:
                break
        for i in xrange(0, idx + 1):
            fileSet = fileSet.union(sccSet[i])
            fileSet &= set(glbDataParser.allFiles.keys())
            fileSet.discard('757')
        if len(fileSet) > 1:
            for file in fileSet:
                zwrFile = glbDataParser.allFiles[file]['path']
                globalSub = glbDataParser.allFiles[file]['name']
                glbDataParser.generateFileIndex(zwrFile, file)
        for file in fileSet:
            zwrFile = glbDataParser.allFiles[file]['path']
            globalSub = glbDataParser.allFiles[file]['name']
            logger.info("Parsing file: %s at %s" % (file, zwrFile))
            glbDataParser.parseZWRGlobalFileBySchemaV2(zwrFile, file)
            htmlGen.outputFileManDataAsHtml(glbDataParser)
            del glbDataParser.outFileManData[file]

    glbDataParser.outRtnReferenceDict()
Example #3
0
def convertJson(inputJsonFile, date, MRepositDir, patchRepositDir,
                generateHTML, generatePDF, outDir=None, pdfOutDir=None,
                local=False):
    if not generateHTML and not generatePDF:
        raise Exception("Nothing to generate!")

    global DOX_URL
    global VIVIAN_URL
    DOX_URL = getDOXURL(local)
    VIVIAN_URL = getViViaNURL(local)

    if generateHTML:
        if not outDir:
            raise Exception("Must specify Output directory")
        if not os.path.exists(outDir):
            # Will also create intermediate directories if needed
            os.makedirs(outDir)

    if generatePDF:
        if not pdfOutDir:
            raise Exception("Must specify PDF Output directory")
        # Will also create intermediate directories if needed
        if not os.path.exists(pdfOutDir):
            os.makedirs(pdfOutDir)

    crossRef = parseCrossReferenceGeneratorArgs(MRepositDir,
                                                patchRepositDir)
    global RPC_NAME_TO_IEN_MAPPING
    RPC_NAME_TO_IEN_MAPPING = generateSingleFileFieldToIenMappingBySchema(MRepositDir,
                                                                          crossRef,
                                                                          RPC_FILE_NO,
                                                                          RPC_NAME_FIELD_NO)


    with open(inputJsonFile, 'r') as inputFile:
        pkgJson = {} # group by package
        allpkgJson = []
        inputJson = json.load(inputFile)
        for icrEntry in inputJson:
            if generatePDF:
                _generateICRIndividualPagePDF(icrEntry, date, pdfOutDir)
            if generateHTML:
                _generateICRIndividualPage(icrEntry, date, outDir, crossRef)
                summaryInfo = _convertICREntryToSummaryInfo(icrEntry, crossRef)
                allpkgJson.append(summaryInfo)
                if 'CUSTODIAL PACKAGE' in icrEntry:
                    pkgJson.setdefault(icrEntry['CUSTODIAL PACKAGE'],[]).append(summaryInfo)
        if generateHTML:
            _generateICRSummaryPageImpl(allpkgJson, 'ICR List', 'All', date,
                                        outDir, isForAll=True)
            for pkgName, outJson in pkgJson.iteritems():
                _generateICRSummaryPageImpl(outJson, 'ICR List', pkgName, date,
                                            outDir)
            logger.warn('Total # entry in PACKAGE_MAP is [%s]', len(PACKAGE_MAP))
            logger.warn('Total # entry in pkgJson is [%s]', len(pkgJson))
            _generatePkgDepSummaryPage(inputJson, date, outDir, crossRef)
Example #4
0
def run(args):
    from InitCrossReferenceGenerator import parseCrossRefGeneratorWithArgs
    from FileManDataToHtml import FileManDataToHtml

    # Ensure that output directory exists
    if not os.path.exists(os.path.join(args.outDir, "dox")):
        os.makedirs(os.path.join(args.outDir, "dox"))

    crossRef = parseCrossRefGeneratorWithArgs(args)

    # Populate glbDataParse
    glbDataParser = FileManGlobalDataParser(args.MRepositDir, crossRef)
    glbDataParser.parseZWRGlobalFileBySchemaV2(
        glbDataParser.allFiles['1']['path'][0], '1', '^DIC(')
    del glbDataParser.outFileManData['1']
    glbDataParser.outdir = args.outDir
    glbDataParser.patchDir = args.patchRepositDir

    _doxURL = getDOXURL(args.local)
    _vivianURL = getViViaNURL(args.local)
    htmlGen = FileManDataToHtml(crossRef, glbDataParser.schemaParser,
                                args.outDir, _doxURL, _vivianURL)

    if not args.all:
        assert set(args.fileNos).issubset(glbDataParser.allFiles)
        for fileNo in args.fileNos:
            assert fileNo in glbDataParser.globalLocationMap

        processFiles(glbDataParser, htmlGen, args.fileNos)
    else:
        # Start with 'Strongly connected components'
        fileSet = glbDataParser.schemaParser.sccSet

        # Add files we're specifically interested int
        fileSet.add('101')  #Protocol
        fileSet.add('8994')  #Remote Procedure
        fileSet.add('19')  #Option
        fileSet.add('779.2')  #HLO Application
        fileSet.add('9.7')  #Install
        fileSet.add('.5')  #Function
        fileSet.add('409.61')  #List Template
        fileSet.add('19.1')  #Security Key
        fileSet.add('9.2')  #Help Frame
        fileSet.add('.403')  #Form
        fileSet.add('.401')  #Sort Template
        fileSet.add('771')  #HL7 APPLICATION PARAMETER

        # Make sure to only use files that are in glbDataParser.allFiles.keys()
        fileSet &= set(glbDataParser.allFiles.keys())
        for file in fileSet:
            for zwrFile in glbDataParser.allFiles[file]['path']:
                glbDataParser.generateFileIndex(zwrFile, file)
        processFiles(glbDataParser, htmlGen, fileSet)

    glbDataParser.outRtnReferenceDict()
def run(args):
  from InitCrossReferenceGenerator import parseCrossRefGeneratorWithArgs
  from FileManDataToHtml import FileManDataToHtml

  crossRef = parseCrossRefGeneratorWithArgs(args)
  _doxURL = getDOXURL(args.local)
  glbDataParser = FileManGlobalDataParser(args.MRepositDir, crossRef)
  assert '0' in glbDataParser.allFiles and '1' in glbDataParser.allFiles and set(args.fileNos).issubset(glbDataParser.allFiles)

  # Populate glbDataParser.globalLocationMap
  glbDataParser.parseZWRGlobalFileBySchemaV2(glbDataParser.allFiles['1']['path'], '1', '^DIC(')
  for fileNo in args.fileNos:
    assert fileNo in glbDataParser.globalLocationMap
  del glbDataParser.outFileManData['1']

  glbDataParser.outdir = args.outdir

  glbDataParser.patchDir = args.patchRepositDir
  htmlGen = FileManDataToHtml(crossRef, args.outdir, _doxURL)
  isolatedFiles = glbDataParser.schemaParser.isolatedFiles
  if not args.all or set(args.fileNos).issubset(isolatedFiles):
    for fileNo in args.fileNos:
      gdFile = glbDataParser.allFiles[fileNo]['path']
      logging.info("Parsing file: %s at %s" % (fileNo, gdFile))
      glbDataParser.parseZWRGlobalFileBySchemaV2(gdFile, fileNo)
      htmlGen.outputFileManDataAsHtml(glbDataParser)
      del glbDataParser.outFileManData[fileNo]
  else:
    # Generate all required files
    sccSet = glbDataParser.schemaParser.sccSet
    print sccSet
    fileSet = set(args.fileNos)
    for idx, value in enumerate(sccSet):
      fileSet.difference_update(value)
      if not fileSet:
        break
    for i in xrange(0,idx+1):
      fileSet = fileSet.union(sccSet[i])
      fileSet &= set(glbDataParser.allFiles.keys())
      fileSet.discard('757')
    if len(fileSet) > 1:
      for file in fileSet:
        zwrFile = glbDataParser.allFiles[file]['path']
        globalSub = glbDataParser.allFiles[file]['name']
        logging.info("Generate file key index for: %s at %s" % (file, zwrFile))
        glbDataParser.generateFileIndex(zwrFile, file)
    for file in fileSet:
      zwrFile = glbDataParser.allFiles[file]['path']
      globalSub = glbDataParser.allFiles[file]['name']
      logging.info("Parsing file: %s at %s" % (file, zwrFile))
      glbDataParser.parseZWRGlobalFileBySchemaV2(zwrFile, file)
      htmlGen.outputFileManDataAsHtml(glbDataParser)
      del glbDataParser.outFileManData[file]

  glbDataParser.outRtnReferenceDict()
Example #6
0
def run(args):
  from InitCrossReferenceGenerator import parseCrossRefGeneratorWithArgs
  from FileManDataToHtml import FileManDataToHtml

  # Ensure that output directory exists
  if not os.path.exists(os.path.join(args.outDir, "dox")):
    os.makedirs(os.path.join(args.outDir, "dox"))

  crossRef = parseCrossRefGeneratorWithArgs(args)

  # Populate glbDataParse
  glbDataParser = FileManGlobalDataParser(args.MRepositDir, crossRef)
  glbDataParser.parseZWRGlobalFileBySchemaV2(glbDataParser.allFiles['1']['path'][0], '1', '^DIC(')
  del glbDataParser.outFileManData['1']
  glbDataParser.outdir = args.outDir
  glbDataParser.patchDir = args.patchRepositDir

  _doxURL = getDOXURL(args.local)
  _vivianURL = getViViaNURL(args.local)
  htmlGen = FileManDataToHtml(crossRef, glbDataParser.schemaParser,
                              args.outDir, _doxURL, _vivianURL)

  if not args.all:
    assert set(args.fileNos).issubset(glbDataParser.allFiles)
    for fileNo in args.fileNos:
        assert fileNo in glbDataParser.globalLocationMap

    processFiles(glbDataParser, htmlGen, args.fileNos)
  else:
    # Start with 'Strongly connected components'
    fileSet = glbDataParser.schemaParser.sccSet

    # Add files we're specifically interested int
    fileSet.add('101')    #Protocol
    fileSet.add('8994')   #Remote Procedure
    fileSet.add('19')     #Option
    fileSet.add('779.2')  #HLO Application
    fileSet.add('9.7')    #Install
    fileSet.add('.5')     #Function
    fileSet.add('409.61') #List Template
    fileSet.add('19.1')   #Security Key
    fileSet.add('9.2')    #Help Frame
    fileSet.add('.403')   #Form
    fileSet.add('.401')   #Sort Template
    fileSet.add('771')    #HL7 APPLICATION PARAMETER

    # Make sure to only use files that are in glbDataParser.allFiles.keys()
    fileSet &= set(glbDataParser.allFiles.keys())
    for file in fileSet:
      for zwrFile in glbDataParser.allFiles[file]['path']:
        glbDataParser.generateFileIndex(zwrFile, file)
    processFiles(glbDataParser, htmlGen, fileSet)

  glbDataParser.outRtnReferenceDict()
Example #7
0
def run(args):
    global DOX_URL
    global VIVIAN_URL
    global rpcNameToIenMapping
    crossRef = parseCrossReferenceGeneratorArgs(args.MRepositDir,
                                                args.patchRepositDir)
    rpcNameToIenMapping = createRemoteProcedureMapping(args.MRepositDir, crossRef)
    icrJsonToHtml = ICRJsonToHtml(crossRef, args.outdir, args.pdfOutdir, args.pdf)
    DOX_URL = getDOXURL(args.local)
    VIVIAN_URL = getViViaNURL(args.local)
    if hasattr(args, 'date'):
        date = args.date
    else:
        date = None
    icrJsonToHtml.convertJsonToHtml(args.icrJsonFile, date)
Example #8
0
def run(args):
  from InitCrossReferenceGenerator import parseCrossRefGeneratorWithArgs
  from FileManDataToHtml import FileManDataToHtml

  logger.progress("Start FileMan Global Data Parser")

  # Ensure that output directory exists
  if not os.path.exists(os.path.join(args.outDir, "dox")):
    os.makedirs(os.path.join(args.outDir, "dox"))

  crossRef = parseCrossRefGeneratorWithArgs(args)

  # Populate glbDataParse
  logger.progress("Populate global data parser")
  glbDataParser = FileManGlobalDataParser(args.MRepositDir, crossRef)
  glbDataParser.parseZWRGlobalFileBySchemaV2(glbDataParser.allFiles['1']['path'][0], '1', '^DIC(')
  del glbDataParser.outFileManData['1']
  glbDataParser.outdir = args.outDir
  glbDataParser.patchDir = args.patchRepositDir

  logger.progress("Fileman data to html")
  _doxURL = getDOXURL(args.local)
  _vivianURL = getViViaNURL(args.local)
  _filesURL = getFilesURL(args.local)
  htmlGen = FileManDataToHtml(crossRef, glbDataParser.schemaParser,
                              args.outDir, _doxURL, _vivianURL, _filesURL)

  if not args.all:
    logger.progress("Checking files...")
    assert set(args.fileNos).issubset(glbDataParser.allFiles)
    for fileNo in args.fileNos:
        assert fileNo in glbDataParser.globalLocationMap
    logger.progress("Process files...")
    processFiles(glbDataParser, htmlGen, args.fileNos)
  else:
    # Start with 'Strongly connected components'
    fileSet = glbDataParser.schemaParser.sccSet

    # Add files we're specifically interested int
    fileSet.add('101')    #Protocol
    fileSet.add('8994')   #Remote Procedure
    fileSet.add('19')     #Option
    fileSet.add('779.2')  #HLO Application
    fileSet.add('9.6')    #Build (needs to be before install)
    fileSet.add('9.7')    #Install
    fileSet.add('.5')     #Function
    fileSet.add('409.61') #List Template
    fileSet.add('19.1')   #Security Key
    fileSet.add('9.2')    #Help Frame
    fileSet.add('.403')   #Form
    fileSet.add('.401')   #Sort Template
    fileSet.add('771')    #HL7 APPLICATION PARAMETER

    # Make sure to only use files that are in glbDataParser.allFiles.keys()
    fileSet &= set(glbDataParser.allFiles.keys())
    n = 0
    numFiles = len(fileSet)
    fileList = list(fileSet)
    # HACK: Sorts by File number coerced to float. Fixes some "dependency"
    # issues between files needed for later information.
    #
    #      Depends
    #  9.7    =>   9.6, to write out dependency information for patches
    #  19     =>   9.4, to find and query for Package names of options found.

    fileList = sorted(fileList, key=lambda x: float(x))
    for file in fileList:
      n += 1
      logger.progress("Processing %s (file %d/%d)" % (file, n, numFiles))
      for zwrFile in glbDataParser.allFiles[file]['path']:
        glbDataParser.generateFileIndex(zwrFile, file)
    logger.progress("Process files...")
    processFiles(glbDataParser, htmlGen, fileList)

  glbDataParser.outRtnReferenceDict()
Example #9
0
def convertJson(inputJsonFile, date, MRepositDir, patchRepositDir,
                generateHTML, generatePDF, outDir=None, pdfOutDir=None,
                local=False):
    if not generateHTML and not generatePDF:
        raise Exception("Nothing to generate!")

    global DOX_URL
    global VIVIAN_URL
    DOX_URL = getDOXURL(local)
    VIVIAN_URL = getViViaNURL(local)

    if generateHTML:
        if not outDir:
            raise Exception("Must specify Output directory")
        if not os.path.exists(outDir):
            # Will also create intermediate directories if needed
            os.makedirs(outDir)

    if generatePDF:
        if not pdfOutDir:
            raise Exception("Must specify PDF Output directory")
        # Will also create intermediate directories if needed
        if not os.path.exists(pdfOutDir):
            os.makedirs(pdfOutDir)

    from InitCrossReferenceGenerator import parseCrossReferenceGeneratorArgs
    crossRef = parseCrossReferenceGeneratorArgs(MRepositDir,
                                                patchRepositDir)
    global RPC_NAME_TO_IEN_MAPPING
    RPC_NAME_TO_IEN_MAPPING = generateSingleFileFieldToIenMappingBySchema(MRepositDir,
                                                                          crossRef)


    with open(inputJsonFile, 'r') as inputFile:
        pkgJson = {} # group by package
        allpkgJson = []
        inputJson = json.load(inputFile)

        for icrEntry in inputJson:
            if 'NUMBER' not in icrEntry:
                logger.error("Could not parse entry: " + str(icrEntry))
                continue

            if 'CUSTODIAL PACKAGE' in icrEntry:
                pkgName = icrEntry['CUSTODIAL PACKAGE']
                if crossRef.getMappedPackageName(pkgName) is None:
                    crossRef.addMappedPackage(pkgName,
                                              crossRef.normalizePackageName(pkgName).title())
                    logger.warning("Adding package " + pkgName + " to package name map.")
            if generatePDF:
                _generateICRIndividualPagePDF(icrEntry, date, pdfOutDir)
            if generateHTML:
                _generateICRIndividualPage(icrEntry, date, outDir, crossRef)
                summaryInfo = _convertICREntryToSummaryInfo(icrEntry, crossRef)
                allpkgJson.append(summaryInfo)
                if 'CUSTODIAL PACKAGE' in icrEntry:
                    pkgJson.setdefault(icrEntry['CUSTODIAL PACKAGE'], []).append(summaryInfo)
        if generateHTML:
            _generateICRSummaryPageImpl(allpkgJson, 'ICR List', 'All', date,
                                        outDir, crossRef, isForAll=True)
            for pkgName, outJson in iteritems(pkgJson):
                _generateICRSummaryPageImpl(outJson, 'ICR List', pkgName, date,
                                            outDir, crossRef)
            logger.warn('Total # entry in pkgJson is [%s]', len(pkgJson))
            _generatePkgDepSummaryPage(inputJson, date, outDir, crossRef)