def prep_simple_make(path=False, configure=False, alt_command=False): if path: _os.chdir(_os.path.sep.join(path)) if configure: _subprocess.call(['./configure']) if alt_command: _subprocess.call([alt_command]) else: _subprocess.call(['make']) if path: _os.chdir(_os.path.sep.join([_os.path.pardir] * len(path)))
def prep_simple_make(path = False, configure = False, alt_command = False): if path: _os.chdir(_os.path.sep.join(path)) if configure: _subprocess.call(['./configure']) if alt_command: _subprocess.call([alt_command]) else: _subprocess.call(['make']) if path: _os.chdir(_os.path.sep.join([_os.path.pardir]*len(path)))
def get_git(name, description, source, url, commit, checksum, destination, preparation, checker): ''' Get a dependency from git ''' if _os.path.realpath(_os.path.curdir) != destination: try: _os.chdir(destination) except OSError: _os.makedirs(destination) _os.chdir(destination) try: # clear any previous verions _shutil.rmtree(url.split('/')[-1].replace('.git', '')) except OSError: pass git_server = url.replace('https://', '').replace('http://', '').split('/')[0] print('Downloading {} via git from {} . . .'.format(name, git_server)) _subprocess.call(['git', 'clone', url]) _os.chdir(url.split('/')[-1].replace('.git', '')) _subprocess.call(['git', 'checkout', commit]) # if repo uses git submodules, those will be set to the correct revisions for this commit # else will do nothing _subprocess.call(['git', 'submodule', 'update', '--init']) working_dir = _os.path.sep.join( [destination, url.split('/')[-1].replace('.git', '')]) if preparation is not None: for do_this in preparation: if isinstance(do_this['arguments'], dict): do_this['function'](**do_this['arguments']) else: do_this['function'](*do_this['arguments']) # restore position in path if a prepare changed it if working_dir != _os.path.realpath(_os.path.curdir): _os.chdir(working_dir) _os.chdir(_os.path.pardir) _os.chdir(_os.path.pardir)
def get_download(name, description, source, url, commit, checksum, destination, preparation, checker): ''' Download and unpack a dependancy ''' ## initialdir = _os.path.abspath(_os.curdir) try: _os.chdir(destination) except OSError: _os.makedirs(destination) _os.chdir(destination) if checksum: hasher_algorithm = checksum.split('=')[0] if hasher_algorithm == 'md5': hasher = _hashlib.md5() elif hasher_algorithm == 'sha1': hasher = _hashlib.sha1() elif hasher_algorithm == 'sha224': hasher = _hashlib.sha224() elif hasher_algorithm == 'sha256': hasher = _hashlib.sha256() elif hasher_algorithm == 'sha384': hasher = _hashlib.sha384() elif hasher_algorithm == 'sha512': hasher = _hashlib.sha512() else: print("{} checksums not implemented in Python's hashlib!".format( hasher_algorithm)) print('Downloading: %s' % url) req = _urllib2.urlopen(url) CHUNK = 16 * 1024 * 16 data = _cStringIO.StringIO() c = 0 for chunk in iter(lambda: req.read(CHUNK), ''): c += CHUNK print("{:,} bytes".format(c)) data.write(chunk) print('Download complete . . .') data.seek(0) if checksum: buff = data.read(65536) while len(buff) > 0: hasher.update(buff) buff = data.read(65536) e = '. . . checksum fail!' assert hasher.hexdigest() == checksum.split('=')[1], e print('. . . checksum passed!') data.seek(0) if url[-6:] == 'tar.gz': archive = _tarfile.open(mode="r:gz", fileobj=data) elif url[-7:] == 'tar.bz2': archive = _tarfile.open(mode="r:bz2", fileobj=data) elif url[-4:] == '.zip': archive = _zipfile.ZipFile(data) if destination == 'local_packages': # extract as a pypi python package release = url.split('/')[-1][:-7] print('Extracting {} to {}'.format( release, _os.path.sep.join([destination, name]))) c = 0 nostrip = {'pysam'} if name in nostrip: try: _shutil.rmtree(archive.getnames()[0]) except OSError: pass #_shutil.rmtree(_os.path.sep.join([destination, archive.getnames()[0]])) # some python modules should not be stripped . . more complex install for member in archive.getmembers(): if member.isreg(): archive.extract(member) print(member.name) c += 1 else: # others don't need additional compilation check_path1 = '{}/{}'.format(release, name) for member in archive.getmembers(): if member.isreg() and check_path1 in member.name: member.name = _os.path.sep.join( member.name.split(_os.path.sep)[1:]) archive.extract(member) c += 1 print('Extracted {} files'.format(c)) else: # extract as a generic external program archive.extractall() if preparation: for do_this in preparation: if 'just_packages' in do_this['arguments']: # this is the only thing that differentiates this prepare() # from others that need some chdir <== this should be improved # see dep dict curdir = _os.path.abspath(_os.curdir) _os.chdir(_os.path.pardir) do_this['function'](*do_this['arguments']['package_list']) # return to previous folder _os.chdir(curdir) else: extracted_base_dir = archive.getnames()[0].split( _os.path.sep)[0] curdir = _os.path.abspath(_os.curdir) # go to installed folder _os.chdir(_os.path.sep.join([destination, extracted_base_dir])) do_this['function'](**do_this['arguments']) # return to previous folder _os.chdir(curdir) _os.chdir(initialdir)
def get_download(name, description, source, url, commit, checksum, destination, preparation, checker): ''' Download and unpack a dependancy ''' ## initialdir = _os.path.abspath(_os.curdir) try: _os.chdir(destination) except OSError: _os.makedirs(destination) _os.chdir(destination) if checksum: hasher_algorithm = checksum.split('=')[0] if hasher_algorithm == 'md5': hasher = _hashlib.md5() elif hasher_algorithm == 'sha1': hasher = _hashlib.sha1() elif hasher_algorithm == 'sha224': hasher = _hashlib.sha224() elif hasher_algorithm == 'sha256': hasher = _hashlib.sha256() elif hasher_algorithm == 'sha384': hasher = _hashlib.sha384() elif hasher_algorithm == 'sha512': hasher = _hashlib.sha512() else: print("{} checksums not implemented in Python's hashlib!".format(hasher_algorithm)) print('Downloading: %s' % url) req = _urllib2.urlopen(url) CHUNK = 16 * 1024 * 16 data = _cStringIO.StringIO() c = 0 for chunk in iter(lambda: req.read(CHUNK), ''): c += CHUNK print("{:,} bytes".format(c)) data.write(chunk) print('Download complete . . .') data.seek(0) if checksum: buff = data.read(65536) while len(buff) > 0: hasher.update(buff) buff = data.read(65536) e = '. . . checksum fail!' assert hasher.hexdigest() == checksum.split('=')[1], e print('. . . checksum passed!') data.seek(0) if url[-6:] == 'tar.gz': archive = _tarfile.open(mode="r:gz", fileobj = data) elif url[-7:] == 'tar.bz2': archive = _tarfile.open(mode="r:bz2", fileobj = data) elif url[-4:] == '.zip': archive = _zipfile.ZipFile(data) if destination == 'local_packages': # extract as a pypi python package release = url.split('/')[-1][:-7] print('Extracting {} to {}'.format(release, _os.path.sep.join([destination,name]))) c = 0 nostrip = {'pysam'} if name in nostrip: try: _shutil.rmtree(archive.getnames()[0]) except OSError: pass #_shutil.rmtree(_os.path.sep.join([destination, archive.getnames()[0]])) # some python modules should not be stripped . . more complex install for member in archive.getmembers(): if member.isreg(): archive.extract(member) print(member.name) c += 1 else: # others don't need additional compilation check_path1 = '{}/{}'.format(release,name) for member in archive.getmembers(): if member.isreg() and check_path1 in member.name: member.name = _os.path.sep.join(member.name.split(_os.path.sep)[1:]) archive.extract(member) c += 1 print('Extracted {} files'.format(c)) else: # extract as a generic external program archive.extractall() if preparation: for do_this in preparation: if 'just_packages' in do_this['arguments']: # this is the only thing that differentiates this prepare() # from others that need some chdir <== this should be improved # see dep dict curdir = _os.path.abspath(_os.curdir) _os.chdir(_os.path.pardir) do_this['function'](*do_this['arguments']['package_list']) # return to previous folder _os.chdir(curdir) else: extracted_base_dir = archive.getnames()[0].split(_os.path.sep)[0] curdir = _os.path.abspath(_os.curdir) # go to installed folder _os.chdir(_os.path.sep.join([destination,extracted_base_dir])) do_this['function'](**do_this['arguments']) # return to previous folder _os.chdir(curdir) _os.chdir(initialdir)
def get_git(name, description, source, url, commit, checksum, destination, preparation, checker): ''' Get a dependency from git ''' if _os.path.realpath(_os.path.curdir) != destination: try: _os.chdir(destination) except OSError: _os.makedirs(destination) _os.chdir(destination) try: # clear any previous verions _shutil.rmtree(url.split('/')[-1].replace('.git','')) except OSError: pass git_server = url.replace('https://','').replace('http://','').split('/')[0] print('Downloading {} via git from {} . . .'.format(name, git_server)) _subprocess.call(['git', 'clone', url]) _os.chdir(url.split('/')[-1].replace('.git','')) _subprocess.call(['git', 'checkout', commit]) # if repo uses git submodules, those will be set to the correct revisions for this commit # else will do nothing _subprocess.call(['git', 'submodule', 'update', '--init']) working_dir = _os.path.sep.join([destination,url.split('/')[-1].replace('.git','')]) if preparation is not None: for do_this in preparation: if isinstance(do_this['arguments'], dict): do_this['function'](**do_this['arguments']) else: do_this['function'](*do_this['arguments']) # restore position in path if a prepare changed it if working_dir != _os.path.realpath(_os.path.curdir): _os.chdir(working_dir) _os.chdir(_os.path.pardir) _os.chdir(_os.path.pardir)