def setup_linux_x86 (logger, name, x86_cross): '''Hack for using 32 bit compiler on linux-64. Use linux-x86 cross compiler to compile non-64-bit-clean packages such as nsis and odcctools. A plain 32 bit compiler could also be used, but we do not have such a beast. Make sure to have 32-bit compatibility installed: apt-get install ia32-libs ''' x86_bindir = x86_cross + '/bin' x86_cross_bin = x86_cross + '/i686-linux' + '/bin' compiler = x86_bindir + '/i686-linux-gcc' if not os.path.exists (compiler): printf ('error: cannot find 32 bit compiler: %(compiler)s\n' % locals ()) raise Exception ('Package %(name)s depends on target/linux-x86.' % locals ()) if os.system ('''echo 'int main () { return 0; }' > 32bit.c && %(compiler)s -o 32bit 32bit.c && ./32bit''' % locals ()): printf ('error: cannot run 32 bit executable: 32bit\n') raise Exception ('Package %(name)s depends on 32 bit libraries''' % locals ()) os.system ('rm -f 32bit 32bit.c') def check_link (src, dest): dest = x86_cross_bin + '/' + dest if not os.path.exists (dest): # duh, must chdir for relative link #src = '../../bin/i686-linux-' + src src = x86_bindir + '/i686-linux-' + src os.link (src, dest) check_link ('cpp', 'cpp') check_link ('gcc', 'cc') check_link ('g++', 'c++') check_link ('gcc', 'gcc') check_link ('g++', 'g++')
def get_installer (settings, *arguments): installer_class = { # TODO: ipkg/dpkg 'debian' : Shar, 'debian-arm' : Shar, 'debian-mipsel' : Shar, 'darwin-ppc' : DarwinBundle, 'darwin-x86' : DarwinBundle, 'freebsd-x86' : Shar, 'freebsd4-x86' : Shar, 'freebsd6-x86' : Shar, 'freebsd-64' : Shar, 'linux-arm-softfloat' : Shar, 'linux-arm-vfp' : Linux_installer, 'linux-x86' : Shar, 'linux-64' : Shar, 'linux-ppc' : Shar, 'mingw' : Nsis, # 'mingw' : MingwRoot, } ctor = installer_class[settings.platform] printf ('Installer:', ctor) installer = ctor (settings, *arguments) return installer
def create_local_web_dir (options, source): if not os.path.isdir (options.unpack_dir): system ('mkdir -p ' + options.unpack_dir) printf ('creating web root in', options.unpack_dir ) os.chdir (options.unpack_dir) dir = 'v%s' % '.'.join (options.version) system ('rm -rf %s' % dir) system ('mkdir -p %s' % dir) os.chdir (dir) system ('rsync -Wa %s/ . ' % source) if options.dry_run: return printf ('Instrumenting for Google Analytics' ) for f in ['Documentation/web/index.html', 'Documentation/changes/index.html', 'Documentation/notation/index.html', 'Documentation/internals/index.html', 'Documentation/music-glossary/index.html', 'Documentation/learning/index.html', 'input/regression/collated-files.html']: do_urchin (f)
def get_constant_substitution_dict (self): d = {} if self._parent: d = self._parent.get_substitution_dict () d = d.copy () members = inspect.getmembers (self) member_substs = {} for (name, method) in filter (is_subst_method, members): val = method () self.__dict__[name] = ConstantCall (val) member_substs[name] = val if type (val) != str: message = '[' + self.name () + '] non string value: ' + str (val) + ' for subst_method: ' + name printf (message) raise NonStringExpansion (message) string_vars = dict ((k, v) for (k, v) in members if type (v) == str) d.update (string_vars) d.update (member_substs) try: d = recurse_substitutions (d) except KeyError: printf ('self:', self) raise return d
def get_url_versions (url): printf (url) opener = urllib.URLopener () index = opener.open (url).read () versions = [] def note_version (m): name = m.group (2) version = tuple (map (int, m.group (3).split ('.'))) build = 0 build_url = url + re.sub ("(HREF|href)=", '', m.group (0)) build_url = build_url.replace ('"', "") # disregard buildnumber for src tarball. if m.group (4): build = int (m.group (5)) versions.append ((name, version, build, build_url)) return '' # [^0-9] is to force that version no is not swalled by name. Check this for cygwin libfoo3 # packages re.sub (r'(HREF|href)="(.*[^0-9])-([0-9.]+)(-([0-9]+))?\.[0-9a-z-]+\.[.0-9a-z-]+"', note_version, index) return versions
def get_installer(settings, *arguments): installer_class = { # TODO: ipkg/dpkg "debian": Shar, "debian-arm": Shar, "debian-mipsel": Shar, "darwin-ppc": DarwinBundle, "darwin-x86": DarwinBundle, "freebsd-x86": Shar, "freebsd4-x86": Shar, "freebsd6-x86": Shar, "freebsd-64": Shar, "linux-arm-softfloat": Shar, "linux-arm-vfp": Linux_installer, "linux-x86": Shar, "linux-64": Shar, "linux-ppc": Shar, "mingw": Nsis, # 'mingw' : MingwRoot, } ctor = installer_class[settings.platform] printf("Installer:", ctor) installer = ctor(settings, *arguments) return installer
def __init__(self, dir, source="", branch="", module="", revision=""): Repository.__init__(self, dir, source) self.checksums = {} self.source = source if source: # urlib is not good at splitting ssh urls u = misc.Url(source) self.url_host = u.host self.url_dir = u.dir.replace("~", "_") self.source = self.source.replace("git+file://" + u.host, "") # ``I don't handle protocol git+http/http+git'' self.source = self.source.replace("git+http://", "http://") self.source = self.source.replace("http+git://", "http://") else: # repository proxy determined git vcs from dir printf("FIXME: get url from .git dir info") assert False self.branch = self.filter_branch_arg(branch) self.revision = revision if self.revision == "" and self.branch == "": # note that HEAD doesn't really exist as a branch name. self.branch = "master" # We cannot do a shallow download if we're not tracking # we have to get at least enough history to include the # fixed committish ... :-) # no shallow# self.shallow = self.is_tracking () self.shallow = False assert self.url_host assert self.url_dir
def libtool_update_preserve_vars (logger, libtool, vars, file): printf ('preserve: ', file) old = open (file).read () open (file + '.old', 'w').write (old) libtool_update (logger, libtool, file) new = open (file).read () open (file + '.new', 'w').write (new) def subst_vars (o, n): for v in vars: v_re = '(?m)^%(v)s="([^"]*)"' % locals () orig_m = re.search (v_re, o) if not orig_m: # some generated libtool thingies only have the first part # but vars in the second part must always be substituted printf ('from first part') orig_m = re.search (v_re, old) if orig_m: b = n n = re.sub (v_re, orig_m.group (0), n) printf ('replace:', orig_m.group (0)) if b == n: printf ('NODIFF:', v_re) else: printf ('not found:', v_re) return n # libtool comes in two parts which define the same/similar variables marker = '\nexit ' n1 = subst_vars (old[:old.find (marker)], new[:new.find (marker)]) n2 = subst_vars (old[old.find (marker):], new[new.find (marker):]) open (file, 'w').write (n1 + n2) loggedos.chmod (logger, file, octal.o755)
def rewire_mach_o_object_executable_path (self, name): orig_libs = ['/usr/lib'] libs = self.get_libaries (name) subs = [] for f in libs: # FIXME: I do not understand this comment ## ignore self. self.runner.action (os.path.split (f)[1] + ' ' + os.path.split (name)[1] + '\n') if os.path.split (f)[1] == os.path.split (name)[1]: continue for o in orig_libs: if o in f: newpath = re.sub (o, '@executable_path/../lib', f); subs.append ((f, newpath)) elif self.expand ('%(targetdir)s') in f: must_skip = [s for s in self.skip if s in f] if not must_skip: if 'libgcc_s.1.dylib' in f: newpath = '@executable_path/../lib/libgcc_s.1.dylib' subs.append ((f, newpath)) else: raise Exception ('found targetdir in linkage[%(name)s]: %(f)s' % locals ()) printf ('FIXME: skipping[%(name)s]: %(f)s, hope this is ok' % locals ()) self.rewire_mach_o_object (name, subs)
def get_url_versions(url): printf(url) opener = urllib.URLopener() index = opener.open(url).read() versions = [] def note_version(m): name = m.group(2) version = tuple(map(int, m.group(3).split('.'))) build = 0 build_url = url + re.sub("(HREF|href)=", '', m.group(0)) build_url = build_url.replace('"', "") # disregard buildnumber for src tarball. if m.group(4): build = int(m.group(5)) versions.append((name, version, build, build_url)) return '' # [^0-9] is to force that version no is not swalled by name. Check this for cygwin libfoo3 # packages re.sub( r'(HREF|href)="(.*[^0-9])-([0-9.]+)(-([0-9]+))?\.[0-9a-z-]+\.[.0-9a-z-]+"', note_version, index) return versions
def rewire_mach_o_object_executable_path(self, name): orig_libs = ['/usr/lib'] libs = self.get_libaries(name) subs = [] for f in libs: # FIXME: I do not understand this comment ## ignore self. self.runner.action( os.path.split(f)[1] + ' ' + os.path.split(name)[1] + '\n') if os.path.split(f)[1] == os.path.split(name)[1]: continue for o in orig_libs: if o in f: newpath = re.sub(o, '@executable_path/../lib/', f) subs.append((f, newpath)) elif self.expand('%(targetdir)s') in f: must_skip = [s for s in self.skip if s in f] if not must_skip: raise Exception( 'found targetdir in linkage[%(name)s]: %(f)s' % locals()) printf( 'FIXME: skipping[%(name)s]: %(f)s, hope this is ok' % locals()) self.rewire_mach_o_object(name, subs)
def install_fonts (self): printf ('FIXME: deferred workaround') # deferred_dump (self.font_source.update_workdir (fontdir)) fontdir = self.expand ('%(install_prefix)s/share/ghostscript/fonts') def defer (logger): self.fonts_source.update_workdir (fontdir) self.func (defer)
def libtool_update_preserve_vars(logger, libtool, vars, file): printf('preserve: ', file) old = open(file).read() open(file + '.old', 'w').write(old) libtool_update(logger, libtool, file) new = open(file).read() open(file + '.new', 'w').write(new) def subst_vars(o, n): for v in vars: v_re = '(?m)^%(v)s="([^"]*)"' % locals() orig_m = re.search(v_re, o) if not orig_m: # some generated libtool thingies only have the first part # but vars in the second part must always be substituted printf('from first part') orig_m = re.search(v_re, old) if orig_m: b = n n = re.sub(v_re, orig_m.group(0), n) printf('replace:', orig_m.group(0)) if b == n: printf('NODIFF:', v_re) else: printf('not found:', v_re) return n # libtool comes in two parts which define the same/similar variables marker = '\nexit ' n1 = subst_vars(old[:old.find(marker)], new[:new.find(marker)]) n2 = subst_vars(old[old.find(marker):], new[new.find(marker):]) open(file, 'w').write(n1 + n2) loggedos.chmod(logger, file, octal.o755)
def create_local_web_dir(options, source): if not os.path.isdir(options.unpack_dir): system('mkdir -p ' + options.unpack_dir) printf('creating web root in', options.unpack_dir) os.chdir(options.unpack_dir) dir = 'v%s' % '.'.join(options.version) system('rm -rf %s' % dir) system('mkdir -p %s' % dir) os.chdir(dir) system('rsync -Wa %s/ . ' % source) if options.dry_run: return printf('Instrumenting for Google Analytics') for f in [ 'Documentation/web/index.html', 'Documentation/changes/index.html', 'Documentation/notation/index.html', 'Documentation/internals/index.html', 'Documentation/music-glossary/index.html', 'Documentation/learning/index.html', 'input/regression/collated-files.html' ]: do_urchin(f)
def get_installer(settings, *arguments): installer_class = { # TODO: ipkg/dpkg 'debian': Shar, 'debian-arm': Shar, 'debian-mipsel': Shar, 'darwin-ppc': DarwinBundle, 'darwin-x86': DarwinBundle, 'freebsd-x86': Shar, 'freebsd4-x86': Shar, 'freebsd6-x86': Shar, 'freebsd-64': Shar, 'linux-arm-softfloat': Shar, 'linux-arm-vfp': Linux_installer, 'linux-x86': Shar, 'linux-64': Shar, 'linux-ppc': Shar, 'mingw': Nsis, # 'mingw' : MingwRoot, } ctor = installer_class[settings.platform] printf('Installer:', ctor) installer = ctor(settings, *arguments) return installer
def get_debian_package (settings, description): s = description[:description.find ('\nDescription')] d = dict ([line.split (': ', 1) for line in list (map (''.strip, s.split ('\n')))]) # FIXME: should blacklist toplevel bin/gub argument iso lilypond blacklist = [ 'binutils', 'cpp', 'gcc-3.3', 'cpp-3.3', 'gcc', 'gcc-3.4', 'libgcc1', 'libgcc1-3.4', 'lilypond', 'libstdc++6', 'libstdc++-dev', 'libtool', 'perl', 'perl-modules', 'perl-base', # 'pkg-config', ] if d['Package'] in blacklist: d['Package'] += '::blacklisted' package_class = new.classobj (d['Package'], (build.BinaryBuild,), {}) from gub import repository source = repository.DebianPackage (settings.downloads + '/Debian/' + settings.debian_branch, os.path.join (mirror, d['Filename']), d['Version']) package = package_class (settings, source) package.name_dependencies = [] if 'Depends' in d: deps = list (map (''.strip, re.sub ('\([^\)]*\)', '', d['Depends']).split (', '))) # FIXME: BARF, ignore choices deps = [x for x in deps if x.find ('|') == -1] # FIXME: how to handle Provides: ? # FIXME: BARF, fixup libc Provides deps = [re.sub ('libc($|-)', 'libc6\\1', x) for x in deps] deps = [re.sub ('liba52-dev', 'liba52-0.7.4-dev', x) for x in deps] deps = [re.sub ('libpng12-0-dev', 'libpng12-dev', x) for x in deps] # FIXME: ugh, skip some deps = [x for x in deps if x not in blacklist] package.name_dependencies = deps def get_build_dependencies (self): return self.name_dependencies package.get_build_dependencies = misc.bind_method (get_build_dependencies, package) pkg_name = d['Package'] @context.subst_method def name (self): return pkg_name message = 'FIXME: enter .name into package_class; see cygwin.py' printf (message) raise Exception (message) package.name = misc.bind_method (name, package) context.subst_method (package.name) return package
def main (): cli_parser = get_cli_parser () (options, files) = cli_parser.parse_args () if not options.platform or files: raise Exception ('barf') sys.exit (2) settings = Settings (options.platform) printf ('\n'.join (as_variables (settings)))
def execute_deferred_commands (self): commands = self._deferred_commands self._deferred_commands = [] for cmd in commands: cmd.execute (self.logger) if self._deferred_commands: printf ('*** deferred leftovers:', self._deferred_commands) assert self._deferred_commands == list ()
def execute_deferred_commands(self): commands = self._deferred_commands self._deferred_commands = [] for cmd in commands: cmd.execute(self.logger) if self._deferred_commands: printf('*** deferred leftovers:', self._deferred_commands) assert self._deferred_commands == list()
def get_cygwin_packages (settings, package_file, skip=[]): dist = 'curr' dists = {'test': [], 'curr': [], 'prev' : []} chunks = open (package_file).read ().split ('\n\n@ ') for i in chunks[1:]: lines = i.split ('\n') name = lines[0].strip () #name = name.lower () name = name[0].lower () + name[1:] # URG, x11 introduces upcase *and* underscore in package name #name = name.replace ('libx11-6', 'libX11_6') #name = name.replace ('libx11_6', 'libX11_6') #name = name.replace ('libx11', 'libX11') #name = name.replace ('libxt', 'libXt') packages = dists['curr'] records = { 'sdesc': name, 'version': '0-0', 'install': 'urg 0 0', } j = 1 while j < len (lines) and lines[j].strip (): if lines[j][0] == '#': j = j + 1 continue elif lines[j][0] == '[': packages.append (get_cygwin_package (settings, name, records.copy (), skip)) packages = dists[lines[j][1:5]] j = j + 1 continue try: key, value = [x.strip () for x in lines[j].split (': ', 1)] except: printf (lines[j], package_file) raise Exception ('URG') if (value.startswith ('"') and value.find ('"', 1) == -1): while 1: j = j + 1 value += '\n' + lines[j] if lines[j].find ('"') != -1: break records[key] = value j = j + 1 packages.append (get_cygwin_package (settings, name, records, skip)) # debug names = [p.name () for p in dists[dist]] names.sort () return dists[dist]
def get_cygwin_packages(settings, package_file, skip=[]): dist = 'curr' dists = {'test': [], 'curr': [], 'prev': []} chunks = open(package_file).read().split('\n\n@ ') for i in chunks[1:]: lines = i.split('\n') name = lines[0].strip() #name = name.lower () name = name[0].lower() + name[1:] # URG, x11 introduces upcase *and* underscore in package name #name = name.replace ('libx11-6', 'libX11_6') #name = name.replace ('libx11_6', 'libX11_6') #name = name.replace ('libx11', 'libX11') #name = name.replace ('libxt', 'libXt') packages = dists['curr'] records = { 'sdesc': name, 'version': '0-0', 'install': 'urg-no-install-key-for-%(name)s 0 0' % locals(), } j = 1 while j < len(lines) and lines[j].strip(): if lines[j][0] == '#': j = j + 1 continue elif lines[j][0] == '[': packages.append( get_cygwin_package(settings, name, records.copy(), skip)) packages = dists[lines[j][1:5]] j = j + 1 continue try: key, value = [x.strip() for x in lines[j].split(': ', 1)] except: printf(lines[j], package_file) raise Exception('URG') p = value.find('"') + 1 if p and value.find('"', p) == -1: while 1: j = j + 1 value += '\n' + lines[j] if lines[j].find('"') != -1: break records[key] = value j = j + 1 packages.append(get_cygwin_package(settings, name, records, skip)) # debug names = [p.name() for p in dists[dist]] names.sort() return dists[dist]
def expand (self, s, env={}): d = self.get_substitution_dict (env) try: e = s % d except: t, v, b = sys.exc_info () if t == KeyError or t == ValueError: printf ('format string: >>>' + s + '<<<') printf ('self:', self) raise return e
def main(): cli_parser = get_cli_parser() (options, files) = cli_parser.parse_args() if 0: pass elif options.test: test() elif options.full_branch_name: repo = get_repository_proxy(".", files[0]) printf(repo.full_branch_name()) elif options.branch_dir: repo = get_repository_proxy(".", files[0]) printf(repo.branch_dir().replace("//", "/"))
def topologically_sorted_one(todo, done, dependency_getter, recurse_stop_predicate=None): sorted = [] if todo in done: return sorted done[todo] = 1 def type_equal(a, b): return ((type(a) == type(b)) or inspect.isclass(type(a)) == inspect.isclass(type(b))) deps = dependency_getter(todo) for d in deps: if recurse_stop_predicate and recurse_stop_predicate(d): continue if not type_equal(d, todo): printf(type(d), '!=', type(todo)) printf(d.__class__, todo.__class__) printf(d.__dict__, todo.__dict__) printf(inspect.isclass(type(d)), inspect.isclass(type(todo))) assert type_equal(a, b) sorted += topologically_sorted_one( d, done, dependency_getter, recurse_stop_predicate=recurse_stop_predicate) sorted.append(todo) return sorted
def live_hosts (hosts, port = 3633): live = [] for h in hosts: try: t = telnetlib.Telnet (h, port) t.close () except socket.error: continue live.append ('%s:%d' % (h,port)) if live: printf ('DISTCC live hosts: ', live) return live
def topologically_sorted_one (todo, done, dependency_getter, recurse_stop_predicate=None): sorted = [] if todo in done: return sorted done[todo] = 1 def type_equal (a, b): return ((type (a) == type (b)) or inspect.isclass (type (a)) == inspect.isclass (type (b))) deps = dependency_getter (todo) for d in deps: if recurse_stop_predicate and recurse_stop_predicate (d): continue if not type_equal (d, todo): printf (type (d), '!=', type (todo)) printf (d.__class__, todo.__class__) printf (d.__dict__, todo.__dict__) printf (inspect.isclass (type (d)), inspect.isclass (type (todo))) assert type_equal (a, b) sorted += topologically_sorted_one (d, done, dependency_getter, recurse_stop_predicate=recurse_stop_predicate) sorted.append (todo) return sorted
def live_hosts(hosts, port=3633): live = [] for h in hosts: try: t = telnetlib.Telnet(h, port) t.close() except socket.error: continue live.append('%s:%d' % (h, port)) if live: printf('DISTCC live hosts: ', live) return live
def spec_conflict_resolution(self, spec, pkg): pkg_name = pkg.name() install_candidate = pkg subname = '' if spec.name() != pkg_name: subname = pkg_name.split('-')[-1] manager = self.manager(spec.platform()) if subname in spec.get_conflict_dict(): for c in spec.get_conflict_dict()[subname]: if manager.is_installed(c): printf(' %(c)s conflicts with %(pkg_name)s' % locals()) conflict_source = manager.source_name(c) # FIXME: implicit provides: foo-* provides foo-core, # should implement explicit provides if conflict_source + '-core' == pkg_name: printf( ' non-core %(conflict_source)s already installed' % locals()) printf( ' skipping request to install %(pkg_name)s' % locals()) install_candidate = None continue printf(' removing %(c)s' % locals()) manager.uninstall_package(c) return install_candidate
def is_class_subst_method (name, cls): try: if name in cls.__dict__: classmethod (cls.__dict__[name]) except: printf ('self:', cls) printf ('name:', name) raise if (name in cls.__dict__ and type (cls.__dict__[name]) != type (_C.__init__) and classmethod (cls.__dict__[name]) and 'substitute_me' in cls.__dict__[name].__dict__): return True return False
def do_urchin (filename): s = open (filename).read () if re.search ('UA-68969', s): return printf (filename) urchin_track = """<script src="http://www.google-analytics.com/urchin.js" type="text/javascript"> </script> <script type="text/javascript"> _uacct = "UA-68969-1"; urchinTracker(); </script>""" s = re.sub ("(?i)</head>", urchin_track + '\n</head>', s) open (filename, 'w').write (s)
def test(): date = '2007-09-14 11:39:21 +0200' printf(parse(date)) printf(format(parse(date))) date = '2007-09-14 11:39:21' printf(parse(date)) printf(format(parse(date)))
def do_urchin(filename): s = open(filename).read() if re.search('UA-68969', s): return printf(filename) urchin_track = """<script src="http://www.google-analytics.com/urchin.js" type="text/javascript"> </script> <script type="text/javascript"> _uacct = "UA-68969-1"; urchinTracker(); </script>""" s = re.sub("(?i)</head>", urchin_track + '\n</head>', s) open(filename, 'w').write(s)
def test(): date = "2007-09-14 11:39:21 +0200" printf(parse(date)) printf(format(parse(date))) date = "2007-09-14 11:39:21" printf(parse(date)) printf(format(parse(date)))
def test(): printf(forall(x for x in [1, 1])) printf( dissect_url( 'git://anongit.freedesktop.org/git/fontconfig?revision=1234')) printf(dissect_url('http://lilypond.org/foo-123.tar.gz&patch=a&patch=b')) printf( rewrite_url('ftp://foo.com/pub/foo/foo-123.tar.gz', 'http://lilypond.org/downloads'))
def main(): (opts, args) = parse_options() lock_file_name = args[0] cmd = args[1] ## need to include binary too. args = args[1:] try: stat = run_command_with_lock(lock_file_name, cmd, args) sys.exit(stat) except locker.LockedError: printf("Can't acquire lock %s" % lock_file_name) if opts.skip: sys.exit(0) else: sys.exit(1)
def set_dict (self, dict, sub_name): self._dict = dict.copy () self._dict['sub_name'] = sub_name if sub_name: sub_name = '-' + sub_name try: s = ('%(name)s' % dict) + sub_name except: printf ('NO NAME IN:', dict) raise self._dict['split_name'] = s self._dict['split_ball'] = ('%(packages)s/%(split_name)s%(ball_suffix)s.%(platform)s.gup') % self._dict self._dict['split_hdr'] = ('%(packages)s/%(split_name)s%(vc_branch_suffix)s.%(platform)s.hdr') % self._dict self._dict['conflicts_string'] = ';'.join (self._conflicts) self._dict['dependencies_string'] = ';'.join (self._dependencies) self._dict['source_name'] = self.name () if sub_name: self._dict['source_name'] = self.name ()[:-len (sub_name)]
def main (): (opts, args) = parse_options () lock_file_name = args[0] cmd = args[1] ## need to include binary too. args = args[1:] try: stat = run_command_with_lock (lock_file_name, cmd, args) sys.exit (stat) except locker.LockedError: printf ("Can't acquire lock %s" % lock_file_name) if opts.skip: sys.exit (0) else: sys.exit (1)
def recurse_substitutions (d): for (k, v) in list (d.items ()): if type (v) != str: del d[k] continue try: while v.index ('%(') >= 0: v = v % d except: t, vv, b = sys.exc_info () if t == ValueError: pass elif t == KeyError or t == ValueError: printf ('variable: >>>' + k + '<<<') printf ('format string: >>>' + v + '<<<') raise else: raise d[k] = v return d
def download(self): if not self.have_client(): # sorry, no can do [yet] return if not os.path.isdir(os.path.join(self.dir, "refs")): source = self.source dir = self.dir ### AARGH, GIT forces us to download the full history? WTF? """invoking cd /home/janneke/vc/gub/downloads/ghostscript && git clone --depth 10 -l -s /home/janneke/vc/gub/downloads/ghostscript /home/janneke/vc/gub/target/mingw/src/ghostscript-0.0 Initialized empty Git repository in /home/janneke/vc/gub/target/mingw/src/ghostscript-0.0/.git/ fatal: attempt to fetch/clone from a shallow repository fatal: The remote end hung up unexpectedly """ if self.shallow: self.git("clone --depth 10 --bare %(source)s %(dir)s" % locals(), dir=".") else: printf("GIT: FIXME: shallow branching broken? -- getting *whole* history...") self.git("clone --bare %(source)s %(dir)s" % locals(), dir=".") if self.branch and not (self.revision and self.is_downloaded()): self.git("fetch %(source)s %(branch)s:refs/heads/%(url_host)s/%(url_dir)s/%(branch)s" % self.__dict__) self.checksums = {}
def main (): cli_parser = get_cli_parser () (options, commands) = cli_parser.parse_args () global host_spec host_spec = options.upload_host repo = get_repository (options) version_dict = misc.grok_sh_variables_str (repo.read_file ('VERSION')) version_tup = tuple (map (version_dict.get, ('MAJOR_VERSION', 'MINOR_VERSION', 'PATCH_LEVEL'))) version_tup = tuple (map (int, version_tup)) version_db = versiondb.VersionDataBase (options.version_db) cmds = upload_binaries (repo, version_tup, version_db) if options.execute: cmds = [c for c in cmds if 'test-binary' not in c] for cmd in cmds: print (cmd) system (cmd) else: printf ('\n\n') printf ('\n'.join (cmds)) printf ('\n\n')
def main(): cli_parser = get_cli_parser() (options, commands) = cli_parser.parse_args() global host_spec host_spec = options.upload_host repo = get_repository(options) version_dict = misc.grok_sh_variables_str(repo.read_file('VERSION')) version_tup = tuple( map(version_dict.get, ('MAJOR_VERSION', 'MINOR_VERSION', 'PATCH_LEVEL'))) version_tup = tuple(map(int, version_tup)) version_db = versiondb.VersionDataBase(options.version_db) cmds = upload_binaries(repo, version_tup, version_db) if options.execute: cmds = [c for c in cmds if 'test-binary' not in c] for cmd in cmds: print(cmd) system(cmd) else: printf('\n\n') printf('\n'.join(cmds)) printf('\n\n')
def get_binaries_from_url (self, url): package = os.path.basename (os.path.splitext (self.file_name)[0]) for p in self.platforms: if p == 'source': continue u = '%(url)sbinaries/%(p)s/' % locals () if p == 'cygwin': u += 'release/%(package)s/' % locals () try: self._db[p] = get_url_versions (u) except: t, v, b = sys.exc_info () if t == IOError: printf ('problem loading', u) sys.path.insert (0, 'gub') # FIXME: do want to be inside gub framework or not? printf (misc.exception_string (v)) continue raise
def setup_linux_x86(logger, name, x86_cross): '''Hack for using 32 bit compiler on linux-64. Use linux-x86 cross compiler to compile non-64-bit-clean packages such as nsis and odcctools. A plain 32 bit compiler could also be used, but we do not have such a beast. Make sure to have 32-bit compatibility installed: apt-get install ia32-libs ''' x86_bindir = x86_cross + '/bin' x86_cross_bin = x86_cross + '/i686-linux' + '/bin' compiler = x86_bindir + '/i686-linux-gcc' if not os.path.exists(compiler): printf('error: cannot find 32 bit compiler: %(compiler)s\n' % locals()) raise Exception('Package %(name)s depends on target/linux-x86.' % locals()) if os.system( '''echo 'int main () { return 0; }' > 32bit.c && %(compiler)s -o 32bit 32bit.c && ./32bit''' % locals()): printf('error: cannot run 32 bit executable: 32bit\n') raise Exception('Package %(name)s depends on 32 bit libraries' '' % locals()) os.system('rm -f 32bit 32bit.c') def check_link(src, dest): dest = x86_cross_bin + '/' + dest if not os.path.exists(dest): # duh, must chdir for relative link #src = '../../bin/i686-linux-' + src src = x86_bindir + '/i686-linux-' + src os.link(src, dest) check_link('cpp', 'cpp') check_link('gcc', 'cc') check_link('g++', 'c++') check_link('gcc', 'gcc') check_link('g++', 'g++')
def set_dict(self, dict, sub_name): self._dict = dict.copy() self._dict['sub_name'] = sub_name if sub_name: sub_name = '-' + sub_name try: s = ('%(name)s' % dict) + sub_name except: printf('NO NAME IN:', dict) raise self._dict['split_name'] = s self._dict['split_ball'] = ( '%(packages)s/%(split_name)s%(ball_suffix)s.%(platform)s.gup' ) % self._dict self._dict['split_hdr'] = ( '%(packages)s/%(split_name)s%(vc_branch_suffix)s.%(platform)s.hdr' ) % self._dict self._dict['conflicts_string'] = ';'.join(self._conflicts) self._dict['dependencies_string'] = ';'.join(self._dependencies) self._dict['source_name'] = self.name() if sub_name: self._dict['source_name'] = self.name()[:-len(sub_name)]
def get_binaries_from_url(self, url): package = os.path.basename(os.path.splitext(self.file_name)[0]) for p in self.platforms: if p == 'source': continue u = '%(url)sbinaries/%(p)s/' % locals() if p == 'cygwin': u += 'release/%(package)s/' % locals() try: self._db[p] = get_url_versions(u) except: t, v, b = sys.exc_info() if t == IOError: printf('problem loading', u) sys.path.insert(0, 'gub') # FIXME: do want to be inside gub framework or not? printf(misc.exception_string(v)) continue raise
def test_required (logger): if required: logger.write ('\n') printf ('********************************') printf ('Please install required packages') for i in required: printf ('%s: %s-%s or newer (found: %s %s)\n' % i) sys.exit (1)
def test_required(logger): if required: logger.write('\n') printf('********************************') printf('Please install required packages') for i in required: printf('%s: %s-%s or newer (found: %s %s)\n' % i) sys.exit(1)
def test_build(bin): if bin.find(':') < 0: bin = os.path.abspath(bin) base = os.path.split(bin)[1] platform = re.search('lilypond-[0-9.]+-[0-9]+.([a-z0-9-]+).*', bin).group(1) viewer = 'evince' if not platform: printf('unknown platform for', bin) return ending_found = 0 for e in ['.sh', '.zip', '.exe', 'tar.bz2']: ending_found = ending_found or bin.endswith(e) if not ending_found: printf('unknown extension for', base) return try: os.unlink('typography-demo.pdf') except: pass printf('testing platform %s' % platform) logdir = "log/" try: (uid, host, dir, test_file) = test_settings[platform] except KeyError: system('touch %(logdir)s/%(base)s.test.pdf' % locals()) return if test_file == None: test_file = 'test-lily/typography-demo.ly' base_test_file = os.path.split(test_file)[1] base_test_file_stem = os.path.splitext(base_test_file)[0] system('ssh %(uid)s@%(host)s mkdir %(dir)s' % locals(), ignore_error=True) system('ssh %(uid)s@%(host)s rm %(dir)s/%(base_test_file_stem)s.*' % locals(), ignore_error=True) test_platform_script = 'test-%s-gub.sh' % platform_test_script_types.get( platform, platform) system('scp %(test_file)s test-lily/%(test_platform_script)s ' ' %(bin)s ' ' %(uid)s@%(host)s:%(dir)s/' % locals()) system( 'ssh %(uid)s@%(host)s sh %(dir)s/%(test_platform_script)s %(dir)s %(base)s %(base_test_file)s' % locals()) system( 'scp %(uid)s@%(host)s:%(dir)s/%(base_test_file_stem)s.pdf %(logdir)s/%(base)s.test.pdf' % locals()) system('%(viewer)s %(logdir)s/%(base)s.test.pdf' % locals())
def url (self): if not self._url: self._url = self.build_class ().source if not self._url: logging.warning ('no source specified in class: ' + self.build_class ().__name__ + '\n') if not self._url: self._url = self.settings.dependency_url (self.name ()) if not self._url: raise Exception ('No URL for: ' + misc.with_platform (self._name, self.settings.platform)) if type (self._url) == str: try: self._url = self._url % self.settings.__dict__ except: printf ('URL:', self._url) raise x, parameters = misc.dissect_url (self._url) if parameters.get ('patch'): self._cls.patches = parameters['patch'] if parameters.get ('dependency'): self._cls.build_dependencies = parameters['dependency'] return self._url
def compare_test_info (options): outputs = glob.glob (options.upload_dir + '/lilypond-*.test-output*') outputs += glob.glob (options.regtest_dir + '/lilypond-*.test-output*') current_version = tuple (map (int, options.version)) current_tuple = (current_version, options.build) versions_found = [] current_test_output = '' for f in outputs: m = re.search ('lilypond-([.0-9]+)-([0-9]+).test-output.tar.bz2', f) if not m: printf (f) assert 0 version = list (map (int, m.group (1).split ('.'))) build = int (m.group (2)) tup = (version, build) if tup <= current_tuple: versions_found.append ((tup, f)) versions_found.sort () compare_test_tarballs (options, versions_found[-3:])
def subst_vars(o, n): for v in vars: v_re = '(?m)^%(v)s="([^"]*)"' % locals() orig_m = re.search(v_re, o) if not orig_m: # some generated libtool thingies only have the first part # but vars in the second part must always be substituted printf('from first part') orig_m = re.search(v_re, old) if orig_m: b = n n = re.sub(v_re, orig_m.group(0), n) printf('replace:', orig_m.group(0)) if b == n: printf('NODIFF:', v_re) else: printf('not found:', v_re) return n
def check_files(tarball, repo): error_found = False tarball = os.path.abspath(tarball) tarball_dirname = re.sub('\.tar.*', '', os.path.split(tarball)[1]) dir = tempfile.mkdtemp() files = popen('cd %(dir)s && tar xzvf %(tarball)s' % locals()).readlines() files = [f.strip() for f in files] ## .ly files ly_files = [f for f in files if re.search(r'\.ly$', f)] ly_file_str = ' '.join(ly_files) no_version = popen(r"cd %(dir)s && grep '\\version' -L %(ly_file_str)s" % locals()).readlines() if no_version: printf('Files without \\version: ') printf('\n'.join(no_version)) error_found = True ## tarball <-> CVS file_dict = dict((f, 1) for f in files) entries = repo.all_files() exceptions = ['.cvsignore', 'stepmake/.cvsignore'] for e in entries: filename = e if filename in exceptions: continue filename = os.path.join(tarball_dirname, filename) if filename not in file_dict: printf('file from VC not distributed: %s' % filename) error_found = True system('rm -rf %(dir)s' % locals()) if error_found: raise Exception('dist error found')
def uninstall_package (self, name): gub_log.action ('uninstalling package: %s\n' % name) lst = self.package_installed_files (name) dirs = [] files = [] for i in lst: f = os.path.join (self.root, i) if os.path.islink (f): files.append (f) elif (not os.path.exists (f) and not self.is_distro): printf ('FileManager: uninstall: %s' % name) printf ('FileManager: no such file: %s' % f) elif os.path.isdir (f): dirs.append (f) else: files.append (f) for f in files: os.unlink (f) for d in reversed (dirs): try: os.rmdir (d) except OSError: gub_log.verbose ('warning: %(d)s not empty\n' % locals ()) for f in lst: ## fixme (?) -- when is f == '' if not f or f.endswith ('/'): continue try: del self._file_package_db[f] except: printf ('db delete failing for ', f) del self._package_file_db[name]