def __download(self, identifier, server): #print "__download(",identifier,",",server,")" url = server.rsplit('/', 1)[0] + '/' + identifier.filename() localPath = os.path.join(self.__exportDir, identifier.filename()) #print "\t","localPath",localPath,url source = urllib2.urlopen(url) #print "\t","Downloading" destination = open(localPath, 'w') #print "\t","Saving" hash = bArchive.transferAndHash( input=source, output=destination, hashers=bArchive.kMD5Hash, blockTransferSize=bConstants.kReadBlockSize, detextText=False, changeLineEndingsTo=None )[1][0][ 1] # 1 = hashes (instead of number of lines), 0 = 1st, 1 = digest (instead of algorithm or is text) destination.close() source.close() hashMatch = hash.lower() == identifier.hash().lower() #print "\t","hashMatch",hashMatch,"hash",hash,"identifier.hash()",identifier.hash() idMatch = False if hashMatch: #print "\t","Looks good so far" exportZip = zipfile.ZipFile(localPath, 'r') package = bPackage.Package( exportZip.read(bConstants.kPackageFileName)) idMatch = package.asID().equals(identifier) exportZip.close() if idMatch: #print "\t","Still looking good" identifier.merge(package.asID()) identifier.merge(bID.ID(localPath)) identifier.merge(bID.ID(url)) self.addExport(identifier.filename()) return identifier #print "\t","bummer, we failed" os.remove(localPath) return None
def upgrade(self, dependency, upgraded): #print "Upgrading from ",dependency,"to",upgraded packageXML = bDOM.link(self.__path) dependencyList = bDOM.findTagByPath(packageXML, "dependencies") if dependencyList: for dep in dependencyList.getElementsByTagName("dependency"): location = bDOM.extractTextFromTagContents(dep) depID = bID.ID(location) if depID.equals(dependency): bDOM.changeTagContents(packageXML, dep, upgraded.filename()) break packageFile = open(self.__path, 'w') packageXML.writexml(packageFile) packageFile.close() packageXML.unlink()
def __haveLocal(self, identifier=None, upgrade=False): #print ">__haveLocal(",identifier,",",upgrade,")" exportsToFind = [] possibleExports = glob.glob(os.path.join(self.__exportDir, "*.zip")) for possibleExport in possibleExports: #print "\t","possibleExport",possibleExport filename = os.path.split(possibleExport)[1] #print "\t\t","filename",filename thisIdentifier = self.__matchesIdentifier(filename, identifier, upgrade) #print "\t\t","thisIdentifier",thisIdentifier if thisIdentifier: #print "\t\tMatch!" thisIdentifier.merge(bID.ID(possibleExport)) exportsToFind.append(thisIdentifier) #print "<__haveLocal(",identifier,",",upgrade,")" return exportsToFind
def __matchesIdentifier(self, name, identifier=None, upgrade=False): #print ">__matchesIdentifier(",name,",",identifier,",",upgrade,")" if bConstants.kExportNamePattern.match(name): #print "\t", "Matches" thisIdentifier = bID.ID(name) #print "\t", "thisIdentifier",thisIdentifier isSameItem = identifier and identifier.fullName( ) == thisIdentifier.fullName() #print "\t", "isSameItem",isSameItem isUpgrade = upgrade and isSameItem and identifier.compareVersions( thisIdentifier) < 0 #print "\t", "isUpgrade",isUpgrade isMatch = identifier and not upgrade and identifier.equals( thisIdentifier) #print "\t", "isMatch",isMatch,"identifier",identifier,"thisIdentifier",thisIdentifier if not identifier or isMatch or isUpgrade: #print "\t\t", "found" return thisIdentifier #print "<__matchesIdentifier(",name,",",identifier,",",upgrade,")" return None
def __findInStream(self, readlines, servers=None, identifier=None, upgrade=False, listAll=False, onServer=None): #print "__findInStream(readlines,",servers,",",identifier,",",upgrade,",",listAll,")" found = [] if None == servers: servers = [] while True: line = readlines.readline() if not line: break line = line.strip() if line.startswith("http://"): if line not in servers: servers.append(line) else: #print "\t","line:",line.strip() if onServer and bConstants.kExportNamePattern.match(line): identifierForCache = bID.ID(line).filename() if not self.c.has_key(identifierForCache.filename()): self.__locationCache[identifierForCache] = [onServer] else: self.__locationCache[identifierForCache].append( onServer) thisIdentifier = self.__matchesIdentifier( line, identifier, upgrade) if thisIdentifier: #print "\t","match" found.append(thisIdentifier) #print "\t","found another",found if not listAll: #print "\t","found all we need",found break # we found what we were looking for #print "\t","found all",found return (found, servers)
def create(self, package, preferences): identifier = package.asID() filename = "%s_%s_%x-%x-%x.zip" % ( identifier.fullName(), identifier.filenameVersion(), time.time(), random.randrange(0, 100000), os.getpid(), ) intermedeateExportPath = os.path.join(preferences['exports'], filename) intermedeateExportFile = bArchive.ZipArchive(intermedeateExportPath, 'w') manifestFile = intermedeateExportFile.open( bConstants.kManifestFileNameInExport, 'w') signatureFile = intermedeateExportFile.open( bConstants.kSignatureFileNameInExport, 'w') bArchive.generate(package.directory(), manifestFile, bArchive.kAllKnownHashes, bArchive.kStandardCodecs, preferences['key'], signatureFile, intermedeateExportFile, detectText=True, blockTransferSize=bConstants.kReadBlockSize, skipPaths=package['filterPaths'], skipExtensions=package['filterExtensions'], skipNames=package['filterNames']) signatureFile.close() manifestFile.close() intermedeateExportFile.close() intermedeateContents = open(intermedeateExportPath, 'r') hash = bArchive.transferAndHash( intermedeateContents, bArchive.kMD5Hash, blockTransferSize=bConstants.kReadBlockSize, output=None, detectText=False, changeLineEndingsTo=None )[1][0][ 1] # 1 = hashes (instead of number of lines), 0 = 1st, 1 = digest (instead of algorithm or is text) finalExportPath = os.path.join( preferences['exports'], identifier.fullName() + "_" + identifier.filenameVersion() + "_" + hash + ".zip") os.rename(intermedeateExportPath, finalExportPath) #print "finalExportPath",finalExportPath,os.path.isfile(finalExportPath) identifier.merge(bID.ID(finalExportPath)) exportURL = preferences['base_url'] + "/" + os.path.split( finalExportPath)[1] package.addPrevious(identifier) package.bumpVersion( bumpPhase=(len(sys.argv) == 3) and ('phase' == sys.argv[2])) changesPath = os.path.join( package.directory(), os.path.join(*package['changes'].split('/'))) changesFile = open(changesPath, "r") changes = changesFile.read() changesFile.close() #print [changes] changes = changes.replace( package.changesPattern(), package.changesPattern() + "\n\n<li><b>%(version)s</b><br>\nDescription of changes here\n</li><br>\n\n" % { 'version': package['version'], }) #print [changes] changesFile = open(changesPath, "w") changesFile.write(changes) changesFile.close() #print identifier #print identifier.filename() self.addExport(identifier.filename()) return (finalExportPath, exportURL)
def parseXMLListOfExports(xml, pathToList, itemName, itemList, warningList): itemXMLList = bDOM.findTagByPath(xml, pathToList) if itemXMLList: for export in itemXMLList.getElementsByTagName(itemName): location = bDOM.extractTextFromTagContents(export) itemList.append(bID.ID(location))
def asID(self): return bID.ID(self.__contents['full_name'] + "_" + self.__contents['version'])