Ejemplo n.º 1
0
 def create_repository(self, github_repo_name):
     utility.execute(
         lambda: self.github_organization.create_repo(
             github_repo_name, has_wiki=False, has_issues=False,
             has_downloads=True),
         "Repository " + github_repo_name + " created",
         "Cannot create repository '" + github_repo_name + "' since it already exists")
Ejemplo n.º 2
0
def main():
    t0 = time.time()
    sherlock, sh_length = pull_sherlock_ebook()
    pull_time = time.time() - t0
    print "Length of Sherlock Holmes Text is " + str(sh_length) + " characters"
    print "[" + str(pull_time) + 's Elapsed]'

    zipped_sherlock = zipped(sherlock)
    zip_time = time.time()
    print "Length of Zipped Sherlock Holmes Text is " + str(
        len(zipped_sherlock)) + " characters"
    print "Zipped in " + str(time.time() - zip_time) + " seconds"

    write_time = time.time()
    utility.write('sherlock.txt', sherlock)
    print 'Wrote uncompressed Sherlock Holmes text to file sherlock.txt in ' +\
        str(time.time()-write_time) + ' seconds'

    encryption_time = time.time()
    encrypted_sherlock_data, key = crypt.encrypt_file('sherlock.txt', False)
    utility.write('encrypted_sherlock.txt', encrypted_sherlock_data)
    print "Encrypted uncompressed text and wrote to file encrypted_sherlock in " +\
          str(time.time() - encryption_time) + ' seconds'

    decrypt_time = time.time()
    utility.write('key_file', key)
    utility.execute('python crypt.py -d encrypted_sherlock.txt  key_file',
                    False)
    print "Decrypted uncompressed text and wrote to file encrypted_sherlock in " + \
          str(time.time() - decrypt_time) + ' seconds'
Ejemplo n.º 3
0
 def generate_new_account(self):
     folder = self.host
     utility.execute("echo 123 > ./network-data/{}/pass.txt".format(folder))
     output = utility.execute(
         '{} --datadir "./network-data/{}/data" --password "./network-data/{}/pass.txt" account new'
         .format(self.autonity_path, folder, folder))
     self.logger.debug(output)
     m = re.findall(r'0x(.{40})', output[0], re.MULTILINE)
     if len(m) == 0:
         self.logger.error("Aborting - account creation failed")
         return None
     else:
         self.coin_base = m[0]
         return self.coin_base
Ejemplo n.º 4
0
    def edit_repository(self, github_repo_name, description=NotSet,
                        homepage=NotSet, private=NotSet, has_issues=NotSet,
                        has_wiki=NotSet, default_branch=NotSet):
        repo = self.get_repository(github_repo_name)

        description = Migration.transform_string(description)
        homepage = Migration.transform_string(homepage)
        private = Migration.transform_bool(private)
        has_issues = Migration.transform_bool(has_issues)
        has_wiki = Migration.transform_bool(has_wiki)
        default_branch = Migration.transform_string(default_branch)

        utility.execute(
            lambda: repo.edit(github_repo_name, description, homepage, private,
                              has_issues, has_wiki, default_branch),
            "Repository " + github_repo_name + " edited",
            "Cannot edit repository '" + github_repo_name + "' since it does not exist")
Ejemplo n.º 5
0
def get_song(path_to_song, song, verbose):
    cmd = 'cp ' + path_to_song + ' $PWD; ffmpeg -i ' + song + ' song.wav;clear'
    utility.execute(cmd)
    song_data = wave.open('song.wav')

    acquire_song('song.wav')

    utility.execute('rm song.wav; rm deify.mp3')
    if verbose:
        os.system('clear')
        n_frames = song_data.getnframes()
        sample_size = song_data.getsampwidth()
        frame_rate = song_data.getframerate()
        print "N Frames: " + str(n_frames)
        print "Sample Size: " + str(sample_size)
        print "Frame Rate: " + str(frame_rate)

    return song_data
Ejemplo n.º 6
0
 def redirect_system_log(self, log_folder):
     try:
         zip_file = "{}/{}.tgz".format(log_folder, self.host)
         log_file = "{}/{}.log".format(log_folder, self.host)
         # untar file,
         utility.execute("tar -zxvf {} --directory {}".format(
             zip_file, log_folder))
         # read file and print into log file.
         self.logger.info(
             "\t\t\t **** node_%s logs started from here. **** \n\n\n",
             self.host)
         with open(log_file, "r", encoding="utf-8") as fp:
             for _, line in enumerate(fp):
                 self.logger.info("NODE_%s_%s: %s", self.index, self.host,
                                  line.encode("utf-8"))
         # remove file.
         utility.execute("rm -f {}".format(log_file))
     except Exception as e:
         self.logger.error('Exception happens. %s', e)
Ejemplo n.º 7
0
def main():
    if 'buggy_0' in sys.argv:
        print utility.execute('python p4x.py -read 420', True)
    if 'overfloweth' in sys.argv:
        print PointToMe(sys.argv[2], 'int')
    if 'stacksmash' in sys.argv:
        stack = []
        if len(sys.argv) == 4:
            stack_depth = sys.argv[3]
        else:
            stack_depth = 20
        for i in range(int(sys.argv[2])):
            inner_pointer = utility.execute('sh sma.sh ' + str(stack_depth), True).pop().replace('>', '')
            stack.append(inner_pointer)
        for stack_pointer in stack:
            print str(os.system('printf %d ' + stack_pointer)) + " : " + stack_pointer
            process_memory_location = utility.execute('./m3m '+stack_pointer, True)
            log_process_memory_location(process_memory_location, 'log.txt')
            get_stack_mem_pointer(process_memory_location)
            os.system('rm log.txt')
            print '-----------------------------------------'
Ejemplo n.º 8
0
    def generate_enode(self):
        folder = self.host

        keystores_dir = "./network-data/{}/data/keystore".format(folder)
        keystore_file_path = keystores_dir + "/" + os.listdir(keystores_dir)[0]
        with open(keystore_file_path) as keyfile:
            encrypted_key = keyfile.read()
            account_private_key = w3.eth.account.decrypt(encrypted_key,
                                                         "123").hex()[2:]
        with open("./network-data/{}/boot.key".format(folder), "w") as bootkey:
            bootkey.write(account_private_key)

        pub_key = \
            utility.execute("{} -writeaddress -nodekey ./network-data/{}/boot.key".
                            format(self.bootnode_path, folder))[0].rstrip()
        self.e_node = "enode://{}@{}:{}".format(pub_key, self.host,
                                                self.p2p_port)
        return self.e_node
Ejemplo n.º 9
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

# travis-build.py - build script
# Written in 2015 by MNMLSTC
# To the extent possible under law, the author(s) have dedicated all copyright
# and related and neighboring rights to this software to the public domain
# worldwide. This software is distributed without any warranty. You should have
# received a copy of the CC0 Public Domain Dedication along with this software.
# If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.

from __future__ import print_function
from utility import LocateError
from utility import execute
from utility import which
from utility import exit

if __name__ == '__main__':
    try:
        execute(which('cmake'), '--build', 'build', '--target', 'check')
    except LocateError as e:
        exit(e)
Ejemplo n.º 10
0
from utility import getenv
from utility import which
from utility import exit
from os.path import join
from os import getcwd
from os import mkdir

if __name__ == '__main__':
    try:
        build_type = getenv('BUILD_TYPE')
        version = getenv('PACKAGE')
        cxx = getenv('CXX')
    except EnvironError as e:
        exit(e)
    current = getcwd()
    build = join(current, 'build')
    arguments = [
        current, '-DCMAKE_CXX_COMPILER:STRING={}-{}'.format(cxx, version),
        '-DCMAKE_BUILD_TYPE:STRING={}'.format(build_type),
        '-DBUILD_WITH_LIBCXX:BOOL=OFF', '-DBUILD_TESTING:BOOL=ON'
    ]
    try:
        arguments.insert(0, which('cmake'))
    except LocateError as e:
        exit(e)
    try:
        mkdir('build')
    except OSError as e:
        exit(e)
    execute(*arguments, cwd=build)
Ejemplo n.º 11
0
 def generate_package(self):
     folder = self.host
     utility.execute('cp {} ./network-data/'.format(self.autonity_path))
     utility.execute(
         'tar -zcvf ./network-data/{}.tgz ./network-data/{}/ ./network-data/genesis.json ./network-data/autonity'
         .format(folder, folder))
Ejemplo n.º 12
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

# travis-build.py - build script
# Written in 2015 by MNMLSTC
# To the extent possible under law, the author(s) have dedicated all copyright
# and related and neighboring rights to this software to the public domain
# worldwide. This software is distributed without any warranty. You should have
# received a copy of the CC0 Public Domain Dedication along with this software.
# If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.


from __future__ import print_function
from utility import LocateError
from utility import execute
from utility import which
from utility import exit

if __name__ == '__main__':
    try: execute(which('cmake'), '--build', 'build', '--target', 'check')
    except LocateError as e: exit(e)
Ejemplo n.º 13
0
 def delete_repository(self, github_repo_name):
     utility.execute(
         lambda: self.get_repository(github_repo_name).delete(),
         "Repository " + github_repo_name + " deleted",
         "Cannot delete repository '" + github_repo_name + "' since it does not exist")
Ejemplo n.º 14
0
from utility import EnvironError
from utility import LocateError
from utility import execute
from utility import getenv
from utility import which
from utility import exit
from os.path import join
from os import getcwd
from os import mkdir

if __name__ == '__main__':
    try:
        build_type = getenv('BUILD_TYPE')
        version = getenv('PACKAGE')
        cxx = getenv('CXX')
    except EnvironError as e: exit(e)
    current = getcwd()
    build = join(current, 'build')
    arguments = [
        current,
        '-DCMAKE_CXX_COMPILER:STRING={}-{}'.format(cxx, version),
        '-DCMAKE_BUILD_TYPE:STRING={}'.format(build_type),
        '-DBUILD_WITH_LIBCXX:BOOL=OFF',
        '-DBUILD_TESTING:BOOL=ON'
    ]
    try: arguments.insert(0, which('cmake'))
    except LocateError as e: exit(e)
    try: mkdir('build')
    except OSError as e: exit(e)
    execute(*arguments, cwd=build)
Ejemplo n.º 15
0
    pkgbuild_args = [
        pkgbuild,
        '--quiet',
        '--root', join(PKG_ROOT, pkg_name),
        '--identifier', pkg_identifier,
        '--install-location', args.prefix,
        '--version', pkg_version,
        join(PKG_ROOT, '{}.pkg'.format(pkg_name))
    ]

    productbuild_args = [
        productbuild,
        '--quiet',
        '--distribution', distribution_xml,
        '--resource', resources_dir,
        '--package-path', PKG_ROOT,
        '--version', pkg_version,
        join(package_dir, '{}.pkg'.format(pkg_name))
    ]

    with pushd(build_dir):
        if exists(PKG_ROOT): rmtree(PKG_ROOT)
        makedirs(PKG_ROOT)
        makedirs(resources_dir)
        copytree(join(SRC_ROOT, pkg_name), join(PKG_ROOT, pkg_name))
        copy(license_file, resources_dir)
        with open(distribution_xml, 'w') as dist:
            dist.write(distribution.safe_substitute(data))
        execute(*pkgbuild_args)
        execute(*productbuild_args)
Ejemplo n.º 16
0
    def process(self, scene, out_path, args=None):
        """
        entry point to class functionality
        """

        # update arguments
        self.getArguments(args)
        tmp_path = os.path.join(out_path, 'tmp')

        if os.path.isdir(tmp_path):
            shutil.rmtree(tmp_path)

        # extract scene zip
        print('Extracting dataset: {}'.format(scene))
        dataset_files = utility.unpackFiles(scene, '(.*?)', tmp_path)
        print('... OK!')

        # load metadata into dictionary
        meta = metadata.getManifest(
            utility.matchFile(dataset_files, '.*\/manifest.safe'))
        meta.update(
            metadata.getAnnotation(
                utility.matchFile(dataset_files,
                                  '.*\/annotation\/s1.*vv.*\.xml')))

        # build pipeline schema
        schema = self.buildSchema(copy.deepcopy(self._base), meta)
        outname = os.path.join(tmp_path, self.getOutName(schema, meta))

        ##### determine if scene crosses antemeridian #####
        extent = self.getSceneExtent(meta)
        if extent['lon']['max'] - extent['lon']['min'] > self._fat_swath:

            # densify annotated geolocation grid
            self._densify.process(utility.matchFiles(
                dataset_files, '.*\/annotation\/s1.*\.xml'),
                                  grid_pts=250)
            meta.update(
                metadata.getGeolocationGrid(
                    utility.matchFile(dataset_files,
                                      '.*\/annotation\/s1.*vv.*\.xml')))

            ##### set parameters of reader task #####
            parameter = self.getParameterSet(schema, 'Read')
            parameter['file'] = dataset_files[
                0]  # parent path to extracted dataset
            parameter['formatName'] = 'SENTINEL-1'

            ##### insert subset task #####
            schema = self.insertNewTask(schema, 'Subset', after='Read')
            param = self.getParameterSet(schema, 'Subset')
            param['geoRegion'] = ''

            # split gcps into east / west sub-groups
            gcps = self.splitGcps(meta['gcps'])
            chunk_size = int(
                math.ceil(float(meta['image']['lines']) / float(self._chunks)))

            # process subset blocks either side of antemeridian
            results = []
            for hemisphere in ['east', 'west']:

                # for each row block
                start_row = 0
                offset = 10  # ensure subsets overlap
                while start_row < meta['image']['lines']:

                    # derive subset parameters
                    block = {
                        'start':
                        max(start_row - offset, 0),
                        'end':
                        min(start_row + chunk_size + offset,
                            meta['image']['lines'] - 1),
                        'samples':
                        meta['image']['samples'],
                        'lines':
                        meta['image']['lines']
                    }

                    subset = self.getSubset(gcps[hemisphere], block)

                    # copy values into schema dictionary
                    param = self.getParameterSet(schema, 'Subset')
                    param['region'] = ','.join(str(int(x)) for x in subset)

                    # create subset-specific output path
                    param = self.getParameterSet(schema, 'Write')
                    subset_name = '_'.join(str(int(x)) for x in subset)

                    param['file'] = os.path.join(outname,
                                                 'subset_' + subset_name)
                    results.append(param['file'])

                    # transform dict back to xml schema
                    out = xmltodict.unparse(schema, pretty=True)

                    # write serialized xml schema to file
                    cfg_pathname = os.path.join(
                        tmp_path, '{}_{}.xml'.format(outname, subset_name))
                    with open(cfg_pathname, 'w+') as file:
                        file.write(out)

                    ##### execute ard processing for subset #####
                    print('Processing {} subset: {}'.format(
                        hemisphere, subset_name))
                    out, err, code = utility.execute(self._gpt, [cfg_pathname])
                    print('... OK!')

                    # move onto next block
                    start_row += chunk_size

            # mosaic subsets into single image
            self.generateImage(out_path, results, 'VV')
            self.generateImage(out_path, results, 'VH')

        #else:

        ##### do usual stuff #####

        return
Ejemplo n.º 17
0
    build_dir = join(getcwd(), args.build)
    package_dir = join(build_dir, 'package')
    distribution_xml = join(PKG_ROOT, 'distribution.xml')
    resources_dir = join(PKG_ROOT, 'resources')

    pkgbuild_args = [
        pkgbuild, '--quiet', '--root',
        join(PKG_ROOT, pkg_name), '--identifier', pkg_identifier,
        '--install-location', args.prefix, '--version', pkg_version,
        join(PKG_ROOT, '{}.pkg'.format(pkg_name))
    ]

    productbuild_args = [
        productbuild, '--quiet', '--distribution', distribution_xml,
        '--resource', resources_dir, '--package-path', PKG_ROOT, '--version',
        pkg_version,
        join(package_dir, '{}.pkg'.format(pkg_name))
    ]

    with pushd(build_dir):
        if exists(PKG_ROOT): rmtree(PKG_ROOT)
        makedirs(PKG_ROOT)
        makedirs(resources_dir)
        copytree(join(SRC_ROOT, pkg_name), join(PKG_ROOT, pkg_name))
        copy(license_file, resources_dir)
        with open(distribution_xml, 'w') as dist:
            dist.write(distribution.safe_substitute(data))
        execute(*pkgbuild_args)
        execute(*productbuild_args)