def get_source_files(self): if self.bundle_obj.version_map_exists and self.bundle_obj.version_map.has_key(self.name): # Strip release version if it exists (so that a bundle conf may have a release attached, # but that it won't apply to the source) ver_wo_rel, = re.compile("(.*)-?").search(self.bundle_obj.version_map[self.name]).groups(1) # Can't include self.name as part of reg because of cases like gtk-sharp-2.x # Account for empty string versions, ex: gtk-sharp="" if ver_wo_rel == "": ver_wo_rel = ".*?" reg = re.compile(".*?-%s%s" % (ver_wo_rel, config.sources_ext_re_string) ) # There's a version map, but this component isn't listed, return nothing elif self.bundle_obj.version_map_exists: return "" # There's no version map, get the latest else: reg = re.compile(".*") candidates = [] for file in os.listdir(self.source_fullpath): # Also match against the version selection reg for this pack def if reg.search(file) and re.compile(self.get_version_selection_reg()).search(file): candidates.append(file) # TODO: need to use rpm sorting on this? return utils.version_sort(candidates)
def get_source_files(self): if self.bundle_obj.version_map_exists and self.bundle_obj.version_map.has_key( self.name): # Strip release version if it exists (so that a bundle conf may have a release attached, # but that it won't apply to the source) ver_wo_rel, = re.compile("(.*)-?").search( self.bundle_obj.version_map[self.name]).groups(1) # Can't include self.name as part of reg because of cases like gtk-sharp-2.x # Account for empty string versions, ex: gtk-sharp="" if ver_wo_rel == "": ver_wo_rel = ".*?" reg = re.compile(".*?-%s%s" % (ver_wo_rel, config.sources_ext_re_string)) # There's a version map, but this component isn't listed, return nothing elif self.bundle_obj.version_map_exists: return "" # There's no version map, get the latest else: reg = re.compile(".*") candidates = [] for file in os.listdir(self.source_fullpath): # Also match against the version selection reg for this pack def if reg.search(file) and re.compile( self.get_version_selection_reg()).search(file): candidates.append(file) # TODO: need to use rpm sorting on this? return utils.version_sort(candidates)
def get_latest_tarball(self, HEAD_or_RELEASE, package_name): """Find latest tarball filename for a component.""" self.load_info() versions = [] # Ex: HEAD:mono-1.1.13:57664=snapshot_sources/mono-1.1.13/mono-1.1.13.4.57664.tar.gz for key in self.info.iterkeys(): stuff = key.split(":") # If this is the release and packagename if stuff[0] == HEAD_or_RELEASE and stuff[1] == package_name: # If the tarball creation succeeded try: key2 = self.info[":".join([HEAD_or_RELEASE, package_name, stuff[2]])] if key2 != "failure" and key2 != "inprogress": versions.append(stuff[2]) except: print "datastore - src_file_repo: corruption, offending data:" print key print stuff # TODO: Better error handling here try: latest = utils.version_sort(versions).pop() latest_filename = self.info[":".join([HEAD_or_RELEASE, package_name, latest])] except: print "Error getting latest tarball (%s, %s)..." % (HEAD_or_RELEASE, package_name) latest_filename = "" return latest_filename
def get_tarball_state_info(self, HEAD_or_RELEASE, component, read_info=True): """Get info map for the monobuild web view. Returns a map with the following keys: name, revisions, state. revisions is a reverse sorted array. Status is another map with revisions as keys and successs or failure as values You can set read_info=False if you don't want to read in the data again. """ if read_info: self.load_info() info = {} info['name'] = component info['revisions'] = [] info['state'] = {} for key in self.info.iterkeys(): H_or_R, name, revision = key.split(":") if H_or_R == HEAD_or_RELEASE and name == component: state = self.info[key] if state != "failure" and state != "inprogress": state = "success" info['revisions'].append(revision) info['state'][revision] = state info['revisions'] = utils.version_sort(info['revisions']) info['revisions'].reverse() return info
def get_tarball_state_info(self, HEAD_or_RELEASE, component, read_info=True): """Get info map for the monobuild web view. Returns a map with the following keys: name, revisions, state. revisions is a reverse sorted array. Status is another map with revisions as keys and successs or failure as values You can set read_info=False if you don't want to read in the data again. """ if read_info: self.load_info() info = {} info["name"] = component info["revisions"] = [] info["state"] = {} for key in self.info.iterkeys(): H_or_R, name, revision = key.split(":") if H_or_R == HEAD_or_RELEASE and name == component: state = self.info[key] if state != "failure" and state != "inprogress": state = "success" info["revisions"].append(revision) info["state"][revision] = state info["revisions"] = utils.version_sort(info["revisions"]) info["revisions"].reverse() return info
def get_versions(HEAD_or_RELEASE, platform, package): versions = [] try: for entry in os.listdir(os.path.join(config.build_info_dir, HEAD_or_RELEASE, platform, package)): if entry != ".svn": versions.append(entry) # Using this version sort is 2x slower than python's string sort, but it's accurate for our versions # Trying to do the same in straight python (without the c extension) was probably >= ~6x slower versions = utils.version_sort(versions) except OSError: pass return versions
def get_versions(HEAD_or_RELEASE, platform, package): versions = [] try: for entry in os.listdir( os.path.join(config.build_info_dir, HEAD_or_RELEASE, platform, package)): if entry != ".svn": versions.append(entry) # Using this version sort is 2x slower than python's string sort, but it's accurate for our versions # Trying to do the same in straight python (without the c extension) was probably >= ~6x slower versions = utils.version_sort(versions) except OSError: pass return versions
def clean_installers(installer, path): cwd = os.getcwd() global archive_basepath try: if archive_basepath: os.chdir(archive_basepath) else: os.chdir(config.release_repo_root) except OSError: return installers = utils.version_sort( glob.glob("*/%s/*" % installer) ) if installers: #print "Found installers (sorted): " + " ".join(installers) for i in installers[:-num_builds]: print "Removing: " + i shutil.rmtree(i) else: print "No installers found for " + installer os.chdir(cwd)
'macosx/output/[[version]]/macos-10-universal', 'md-macosx/output/[[version]]/md-macos-10', ]: #if skip_installers: continue try: _version = archive_version topdir = "archive" if dir.find('md') != -1: _version = md_version topdir = "monodevelop" _dir = os.path.join(dir.replace('[[version]]', _version), '*') candidates = glob.glob(_dir) latest = utils.version_sort(candidates).pop() #installer_dirs.append(latest) cwd = os.getcwd() splitter = os.sep + _version + os.sep (prefix, sync_dir) = latest.split(splitter) os.chdir(prefix) print "Syncing: %s" % os.path.join(_version, sync_dir) cmd = 'rsync -avzR -e ssh %s %s/%s' % (os.path.join( _version, sync_dir), dest, topdir) #print cmd status, output = utils.launch_process(cmd) os.chdir(cwd) except Exception, e:
def run(self): sync_log.log("sync thread starting...\n") while not self.cancelled(): self.load_info() # Must base these dirs off 'trunk/release' dirs = [] # Add tarball_map dirs += ["packaging/tarball_map"] # Add config dirs += ["pyutils/config.py"] # sync_log.log(" *** Gathering dirs ***\n") # Gather dirs to synchronize for i in ["HEAD", "RELEASE"]: i_dir = config.build_info_dir + os.sep + i if os.path.exists(i_dir): for distro in os.listdir(i_dir): distro_dir = i_dir + os.sep + distro if os.path.exists(distro_dir): for component in os.listdir(distro_dir): # Get the last 'num_builds' number of elements from the list versions = build.get_versions(i, distro, component)[-self.sync_num_builds :] for j in versions: dirs.append(os.path.join("monobuild/www/builds", i, distro, component, j)) # Grab latest num_builds for tarball log files as well tarball_path = os.path.join(config.build_info_dir, "..", "tarball_logs", i) if os.path.exists(tarball_path): for component in os.listdir(tarball_path): component_dir = tarball_path + os.sep + component versions = utils.version_sort(os.listdir(component_dir)) for j in versions[-self.sync_num_builds :]: dirs.append(os.path.join("monobuild/www/tarball_logs", i, component, j)) # conduct a dirs string up to the length of the max arg length, and run rsync for each of those blocks (what's the real maximum?) while len(dirs): dir_string = "" counter = 0 for i in dirs: # +1 is for the space char if len(i) + 1 + len(dir_string) < self.sync_max_arg_len: dir_string += " %s" % i else: break counter += 1 # Remove counter elements from dirs dirs = dirs[counter:] # sync_log.log(" *** Syncing ***\n") # For some reason the --delete option crashes when running the second time to go-mono.com and mono.ximian.com ... ? # rsync all files over, and don't include the builds... just logs and info.xml command = ( 'cd %s; rsync -avzR -e "ssh %s" --exclude "files/downloads" --exclude "files/*.tar.*" --exclude "files/steps/*/*.tar.*" %s %s:%s' % (config.release_repo_root, config.ssh_options, dir_string, self.sync_host, self.sync_target_dir) ) # sync_log.log(command + "\n") status, output = utils.launch_process(command, print_output=0, output_timeout=600) # sync_log.log(output) if status: sync_log.log("Error running rsync: " + output) # sync_log.log(" *** sync Sleeping ***\n") time.sleep(self.sync_sleep_time) sync_log.log("sync thread shutting down...\n")
#!/usr/bin/python import sys sys.path.append('../pyutils') import utils #numbers = [1,2,3,4,5,6,100000,99900] numbers = ['1','2','3','4','5','6','100000','99900', '1.2.5.6'] numbers = ['1','2','3','4','5','6','100000','99900', '1.2.5.6'] numbers = range(0, 10000) numbers = map(str, numbers) print utils.version_sort(numbers) #utils.version_sort_native(numbers)
def run(self): sync_log.log("sync thread starting...\n") while not self.cancelled(): self.load_info() # Must base these dirs off 'trunk/release' dirs = [] # Add tarball_map dirs += ['packaging/tarball_map'] # Add config dirs += ['pyutils/config.py'] #sync_log.log(" *** Gathering dirs ***\n") # Gather dirs to synchronize for i in ['HEAD', 'RELEASE']: i_dir = config.build_info_dir + os.sep + i if os.path.exists(i_dir): for distro in os.listdir(i_dir): distro_dir = i_dir + os.sep + distro if os.path.exists(distro_dir): for component in os.listdir(distro_dir): # Get the last 'num_builds' number of elements from the list versions = build.get_versions( i, distro, component)[-self.sync_num_builds:] for j in versions: dirs.append( os.path.join('monobuild/www/builds', i, distro, component, j)) # Grab latest num_builds for tarball log files as well tarball_path = os.path.join(config.build_info_dir, '..', 'tarball_logs', i) if os.path.exists(tarball_path): for component in os.listdir(tarball_path): component_dir = tarball_path + os.sep + component versions = utils.version_sort( os.listdir(component_dir)) for j in versions[-self.sync_num_builds:]: dirs.append( os.path.join('monobuild/www/tarball_logs', i, component, j)) # conduct a dirs string up to the length of the max arg length, and run rsync for each of those blocks (what's the real maximum?) while len(dirs): dir_string = "" counter = 0 for i in dirs: # +1 is for the space char if len(i) + 1 + len(dir_string) < self.sync_max_arg_len: dir_string += " %s" % i else: break counter += 1 # Remove counter elements from dirs dirs = dirs[counter:] #sync_log.log(" *** Syncing ***\n") # For some reason the --delete option crashes when running the second time to go-mono.com and mono.ximian.com ... ? # rsync all files over, and don't include the builds... just logs and info.xml command = 'cd %s; rsync -avzR -e "ssh %s" --exclude "files/downloads" --exclude "files/*.tar.*" --exclude "files/steps/*/*.tar.*" %s %s:%s' % ( config.release_repo_root, config.ssh_options, dir_string, self.sync_host, self.sync_target_dir) #sync_log.log(command + "\n") status, output = utils.launch_process(command, print_output=0, output_timeout=600) #sync_log.log(output) if status: sync_log.log("Error running rsync: " + output) #sync_log.log(" *** sync Sleeping ***\n") time.sleep(self.sync_sleep_time) sync_log.log("sync thread shutting down...\n")
'macosx/output/[[version]]/macos-10-universal', 'md-macosx/output/[[version]]/md-macos-10', ]: #if skip_installers: continue try: _version = archive_version topdir = "archive" if dir.find('md') != -1: _version = md_version topdir = "monodevelop" _dir = os.path.join(dir.replace('[[version]]', _version),'*') candidates = glob.glob(_dir) latest = utils.version_sort(candidates).pop() #installer_dirs.append(latest) cwd = os.getcwd() splitter = os.sep + _version + os.sep (prefix, sync_dir) = latest.split(splitter) os.chdir(prefix) print "Syncing: %s" % os.path.join(_version,sync_dir) cmd ='rsync -avzR -e ssh %s %s/%s' % (os.path.join(_version,sync_dir), dest, topdir) #print cmd status, output = utils.launch_process(cmd) os.chdir(cwd) except Exception, e: print "******* ERROR syncing: " + dir