def loadVSMatrices(prefix = 'cmj', loadTFIDF = True, loadLSI = True): """load and return vector space matrices of the documents""" global vecTFIDF, vecLSI vecTFIDF, vecLSI = None, None if loadTFIDF: vecTFIDF = matutils.loadMatrix(common.matrixFile(prefix + 'TFIDF.mm')) if loadLSI: vecLSI = matutils.loadMatrix(common.matrixFile(prefix + 'LSI_VT.mm')) return vecTFIDF, vecLSI
def loadVSMatrices(prefix='cmj', loadTFIDF=True, loadLSI=True): """load and return vector space matrices of the documents""" global vecTFIDF, vecLSI vecTFIDF, vecLSI = None, None if loadTFIDF: vecTFIDF = matutils.loadMatrix(common.matrixFile(prefix + 'TFIDF.mm')) if loadLSI: vecLSI = matutils.loadMatrix(common.matrixFile(prefix + 'LSI_VT.mm')) return vecTFIDF, vecLSI
def buildDocDoc(arts, type, language): ipyutils.loadDicts(prefix="gensim_" + language) arts = [art for art in arts if art.id_int in ipyutils.rdocids] assert len(arts) == len(ipyutils.rdocids) logging.info("loading msc<->id mapping") cats, rcats = loadMsc2Id(language) mscsFile = common.matrixFile("mscs_%s.mm" % type) matMsc = matutils.loadMatrix(mscsFile) mscDict = {} for art in arts: artId = ipyutils.rdocids[art.id_int] mscIds = [rcats[mscs.niceMSC(msc)[0]] for msc in art.msc] mscDict[artId] = mscIds logging.info("computing doc*doc similarity matrix based on %s" % mscsFile) docdoc = numpy.zeros((len(arts), len(arts)), numpy.float32) for i in xrange(len(arts)): if i % 100 == 0: logging.info("PROGRESS: %i/%i" % (i, len(arts))) art1Id = ipyutils.rdocids[arts[i].id_int] for j in xrange(i, len(arts)): art2Id = ipyutils.rdocids[arts[j].id_int] bestScore = 0.0 for msc1Id in mscDict[art1Id]: for msc2Id in mscDict[art2Id]: bestScore = max(bestScore, matMsc[msc1Id, msc2Id]) docdoc[art1Id, art2Id] = docdoc[art2Id, art1Id] = bestScore matutils.saveMatrix(docdoc, common.matrixFile("docdoc_" + language + "_%s.mm" % type), sparse=False) return docdoc
def buildDocDoc(arts, type, language): ipyutils.loadDicts(prefix='gensim_' + language) arts = [art for art in arts if art.id_int in ipyutils.rdocids] assert (len(arts) == len(ipyutils.rdocids)) logging.info("loading msc<->id mapping") cats, rcats = loadMsc2Id(language) mscsFile = common.matrixFile("mscs_%s.mm" % type) matMsc = matutils.loadMatrix(mscsFile) mscDict = {} for art in arts: artId = ipyutils.rdocids[art.id_int] mscIds = [rcats[mscs.niceMSC(msc)[0]] for msc in art.msc] mscDict[artId] = mscIds logging.info("computing doc*doc similarity matrix based on %s" % mscsFile) docdoc = numpy.zeros((len(arts), len(arts)), numpy.float32) for i in xrange(len(arts)): if i % 100 == 0: logging.info("PROGRESS: %i/%i" % (i, len(arts))) art1Id = ipyutils.rdocids[arts[i].id_int] for j in xrange(i, len(arts)): art2Id = ipyutils.rdocids[arts[j].id_int] bestScore = 0.0 for msc1Id in mscDict[art1Id]: for msc2Id in mscDict[art2Id]: bestScore = max(bestScore, matMsc[msc1Id, msc2Id]) docdoc[art1Id, art2Id] = docdoc[art2Id, art1Id] = bestScore matutils.saveMatrix(docdoc, common.matrixFile("docdoc_" + language + "_%s.mm" % type), sparse=False) return docdoc
breaks.append((i, msc)) oldMsc = msc logging.info("%i breaks" % len(breaks)) print "breaks:", breaks # print individual category matrices breaks.append((i, msc)) # append the last category print "breaks with end:", breaks new2old = {} for i, (msc, id) in enumerate(art2msc): new2old[i] = ipyutils.rdocids[id] print '==first 100 new2old:', new2old.items()[:100] logging.info("loading cossim matrix") mat = matutils.loadMatrix(common.matrixFile("gensim_eng%ssim.mm" % (SIM_TYPE))) for i in xrange(len(breaks) - 1): numArts = breaks[i + 1][0] - breaks[i][0] logging.debug("%i articles in category %s" % (numArts, breaks[i][1])) matId = numpy.zeros((numArts, numArts), numpy.float) for i1 in xrange(numArts): print breaks[i][0] + i1, i1, art2mscOld[breaks[i][0] + i1] for i2 in xrange(numArts): pos1, pos2 = breaks[i][0] + i1, breaks[i][0] + i2 matId[i1, i2] = mat[new2old[pos1], new2old[pos2]] processMsc(matId, breaks[i][1][0]) sys.exit(0) new2old = {} for i, (msc, id) in enumerate(art2msc):
def loadConcepts(prefix='cmj'): """load and return the LSI_U matrix of concepts""" global concepts concepts = matutils.loadMatrix(common.matrixFile(prefix + 'LSI_U.mm')) return concepts
def loadSimMatrices(prefix='cmj'): global m, l m = matutils.loadMatrix(common.matrixFile(prefix + 'TFIDFsim.mm')) l = matutils.loadMatrix(common.matrixFile(prefix + 'LSIsim.mm')) return m, l
def rmseFile(matfile1, matfile2): data1 = matutils.loadMatrix(common.matrixFile(matfile1)) data2 = matutils.loadMatrix(common.matrixFile(matfile2)) return matutils.rmse(data1, data2)
def buildMscCentroidMatrix(language): logging.info("building MSC centroid matrix from %s" % ARTS_FILE) arts = [ art for art in docsim.getArts( ARTS_FILE, acceptNoBody=False, acceptNoMsc=False) if art.language == language or language == 'any' ] prefix = 'mscs_serial_%s_' % language matFile = common.matrixFile(prefix + 'TFIDF_T.mm') if os.path.exists(matFile): logging.warning( 'SKIPPING creating TFIDF matrix for %s (file %s present). Is this what you wanted?' % (language, matFile)) tfidf = matutils.loadMatrix(matFile).tocsr() else: logging.info('creating TFIDF matrix for %s to %s' % (language, matFile)) tfidf = docsim.buildTFIDFMatrices(arts, prefix=prefix, saveMatrices=False).tocsr() ipyutils.loadDicts(prefix=prefix) arts = [ art for art in arts if art.id_int in ipyutils.rdocids ] # remove articles that had empty body (according to their tfidf vector) if len(ipyutils.rdocids) != len(arts): logging.error( "no. of TFIDF document = %i, but there are %i documents in the database (mismatch)" % (len(ipyutils.rdocids), len(arts))) raise Exception( "different size of database/dictionary; version mismatch?") cats, rcats = loadMsc2Id(language) # from buildPure # print "mscs:", cats logging.info("loading tfidf collection matrix (for centroids)") tfidf = matutils.loadMatrix( common.matrixFile('gensim_' + language + 'TFIDF_T.mm')).tocsr() logging.debug("loaded %ix%i matrix" % tfidf.shape) logging.info("computing centroids") centroids = numpy.zeros((len(cats), tfidf.shape[1]), numpy.float) # print "centroids.shape =", centroids.shape num = numpy.zeros((len(cats), ), numpy.int) artCnt = 0 for art in arts: if not art.id_int in ipyutils.rdocids: logging.warning("article not found among docids: %s" % art) continue artCnt += 1 artId = ipyutils.rdocids[art.id_int] tops = [mscs.niceMSC(msc)[0] for msc in art.msc] tops = set( tops ) # only count each top-level once (comment out this line to count e.g. 30H55 and 30.13 twice for this article, as cat. 30) for top in tops: mscId = rcats[top] vec = tfidf[artId].toarray() vec.shape = (vec.size, ) # print "vec.shape = ", vec.shape centroids[mscId] += vec num[mscId] += 1 if artCnt < 10 or artCnt % 1000 == 0: logging.debug( "sanity check - article %s has id %i and has mscs=%s, mscsIds=%s" % (art.id_int, artId, art.msc, [rcats[mscs.niceMSC(msc)[0]] for msc in art.msc])) if not artCnt == tfidf.shape[0]: raise Exception("not all articles used; database/matrix mismatch?") for i, vec in enumerate(centroids): logging.info( "centroid for msc %s (id %i) is an average of %i vectors" % (cats[i], i, num[i])) if numpy.sum(numpy.abs(vec)) == 0: logging.warning("empty centroid for msc %s (msc int id %i)" % (cats[i], i)) for mscId in cats.iterkeys(): centroids[mscId] /= num[mscId] logging.info( "used %i articles for %i vectors (articles may have more than one msc and so can be counted more than once)" % (artCnt, sum(num))) logging.info("computing MSC centroid matrix") resultCentroid = numpy.zeros((len(cats), len(cats)), dtype=numpy.float32) for idi, cati in cats.iteritems(): for idj, catj in cats.iteritems(): # print idi, cati, idj, catj sim = matutils.cossim(centroids[idi], centroids[idj]) if numpy.isfinite(sim): resultCentroid[idi, idj] = sim else: resultCentroid[idi, idj] = 0.0 matutils.saveMatrix(resultCentroid, common.matrixFile("mscs_centroid_%s.mm" % language), sparse=False)
oldMsc = msc logging.info("%i breaks" % len(breaks)) print "breaks:", breaks # print individual category matrices breaks.append((i, msc)) # append the last category print "breaks with end:", breaks new2old = {} for i, (msc, id) in enumerate(art2msc): new2old[i] = ipyutils.rdocids[id] print '==first 100 new2old:', new2old.items()[:100] logging.info("loading cossim matrix") mat = matutils.loadMatrix(common.matrixFile("gensim_eng%ssim.mm" % (SIM_TYPE))) for i in xrange(len(breaks) - 1): numArts = breaks[i + 1][0] - breaks[i][0] logging.debug("%i articles in category %s" % (numArts, breaks[i][1])) matId = numpy.zeros((numArts, numArts), numpy.float) for i1 in xrange(numArts): print breaks[i][0] + i1, i1, art2mscOld[breaks[i][0] + i1] for i2 in xrange(numArts): pos1, pos2 = breaks[i][0] + i1, breaks[i][0] + i2 matId[i1, i2] = mat[new2old[pos1], new2old[pos2]] processMsc(matId, breaks[i][1][0]) sys.exit(0) new2old = {} for i, (msc, id) in enumerate(art2msc):
#!/usr/bin/env python2.5 import logging logging.root.level = 10 import common import pylab import numpy import matutils mat = matutils.loadMatrix(common.matrixFile("gensim_engTFIDFsim.mm")) #mat = numpy.zeros((10, 20), float) #mat[2] = 0.2 #mat[8] = 0.9 logging.info("%ix%i matrix loaded" % mat.shape) pylab.figure() pylab.imshow(mat, cmap = pylab.cm.gray, interpolation = "nearest") pylab.savefig("tfidf_sim")
def buildMscCentroidMatrix(language): logging.info("building MSC centroid matrix from %s" % ARTS_FILE) arts = [ art for art in docsim.getArts(ARTS_FILE, acceptNoBody=False, acceptNoMsc=False) if art.language == language or language == "any" ] prefix = "mscs_serial_%s_" % language matFile = common.matrixFile(prefix + "TFIDF_T.mm") if os.path.exists(matFile): logging.warning( "SKIPPING creating TFIDF matrix for %s (file %s present). Is this what you wanted?" % (language, matFile) ) tfidf = matutils.loadMatrix(matFile).tocsr() else: logging.info("creating TFIDF matrix for %s to %s" % (language, matFile)) tfidf = docsim.buildTFIDFMatrices(arts, prefix=prefix, saveMatrices=False).tocsr() ipyutils.loadDicts(prefix=prefix) arts = [ art for art in arts if art.id_int in ipyutils.rdocids ] # remove articles that had empty body (according to their tfidf vector) if len(ipyutils.rdocids) != len(arts): logging.error( "no. of TFIDF document = %i, but there are %i documents in the database (mismatch)" % (len(ipyutils.rdocids), len(arts)) ) raise Exception("different size of database/dictionary; version mismatch?") cats, rcats = loadMsc2Id(language) # from buildPure # print "mscs:", cats logging.info("loading tfidf collection matrix (for centroids)") tfidf = matutils.loadMatrix(common.matrixFile("gensim_" + language + "TFIDF_T.mm")).tocsr() logging.debug("loaded %ix%i matrix" % tfidf.shape) logging.info("computing centroids") centroids = numpy.zeros((len(cats), tfidf.shape[1]), numpy.float) # print "centroids.shape =", centroids.shape num = numpy.zeros((len(cats),), numpy.int) artCnt = 0 for art in arts: if not art.id_int in ipyutils.rdocids: logging.warning("article not found among docids: %s" % art) continue artCnt += 1 artId = ipyutils.rdocids[art.id_int] tops = [mscs.niceMSC(msc)[0] for msc in art.msc] tops = set( tops ) # only count each top-level once (comment out this line to count e.g. 30H55 and 30.13 twice for this article, as cat. 30) for top in tops: mscId = rcats[top] vec = tfidf[artId].toarray() vec.shape = (vec.size,) # print "vec.shape = ", vec.shape centroids[mscId] += vec num[mscId] += 1 if artCnt < 10 or artCnt % 1000 == 0: logging.debug( "sanity check - article %s has id %i and has mscs=%s, mscsIds=%s" % (art.id_int, artId, art.msc, [rcats[mscs.niceMSC(msc)[0]] for msc in art.msc]) ) if not artCnt == tfidf.shape[0]: raise Exception("not all articles used; database/matrix mismatch?") for i, vec in enumerate(centroids): logging.info("centroid for msc %s (id %i) is an average of %i vectors" % (cats[i], i, num[i])) if numpy.sum(numpy.abs(vec)) == 0: logging.warning("empty centroid for msc %s (msc int id %i)" % (cats[i], i)) for mscId in cats.iterkeys(): centroids[mscId] /= num[mscId] logging.info( "used %i articles for %i vectors (articles may have more than one msc and so can be counted more than once)" % (artCnt, sum(num)) ) logging.info("computing MSC centroid matrix") resultCentroid = numpy.zeros((len(cats), len(cats)), dtype=numpy.float32) for idi, cati in cats.iteritems(): for idj, catj in cats.iteritems(): # print idi, cati, idj, catj sim = matutils.cossim(centroids[idi], centroids[idj]) if numpy.isfinite(sim): resultCentroid[idi, idj] = sim else: resultCentroid[idi, idj] = 0.0 matutils.saveMatrix(resultCentroid, common.matrixFile("mscs_centroid_%s.mm" % language), sparse=False)
def loadConcepts(prefix = 'cmj'): """load and return the LSI_U matrix of concepts""" global concepts concepts = matutils.loadMatrix(common.matrixFile(prefix + 'LSI_U.mm')) return concepts
def loadSimMatrices(prefix = 'cmj'): global m, l m = matutils.loadMatrix(common.matrixFile(prefix + 'TFIDFsim.mm')) l = matutils.loadMatrix(common.matrixFile(prefix + 'LSIsim.mm')) return m, l