Пример #1
0
    def test_it_can_write_to_a_file(self):

        content = "This is some file content"
        sh("save", self.test_root, "test_file.txt", content)

        with open(join(self.test_root, "test_file.txt"), 'rb') as file:
            self.assertEqual(content, file.read())
Пример #2
0
    def main(self):
        data = requests.head(self.targetURL)
        #print(data.headers)
        headerDict = ast.literal_eval(str(data.headers))
        filesize = headerDict['Content-Length']
        # print 'Filesize is %s' % filesize

        startPos = 0
        partSize = int(filesize) / self.numWorkers
        endPos = partSize
        # print 'End Position is %s' % endPos

        threads = []

        for i in range(0, self.numWorkers):
            endPos = partSize * (i + 1)
            if (i + 1) == self.numWorkers:  # Is last iteration?
                endPos = filesize
            # print '%i> Start: %s End: %s' % (i, startPos, endPos)
            t = threading.Thread(target=self.downloadSection,
                                 args=(i, startPos, endPos))
            t.start()
            threads.append(t)
            startPos = int(endPos) + 1
            # endPos = partSize * (i + 1)

        map(lambda t: t.join(), threads)
        print('combining all pieces...')
        sh('./combine.sh')
        fileName = self.targetURL.split('/')[-1]
        mv('movie.mkv', fileName)
        print('Moving file to video storage')
        mv(fileName, '/media/storage/videos/')
        return 0
Пример #3
0
    def compile( self, source_dir, build_dir, install_dir ):
        package_source_dir = os.path.join( source_dir, self.dirname )
        assert( os.path.exists( package_source_dir ) )
        package_build_dir = os.path.join( build_dir, self.dirname )

        sh.cd( os.path.join( package_source_dir, 'scripts/Resources' ) )
        sh.sh( './copyresources.sh' )
        # the install target doesn't copy the stuff that copyresources.sh puts in place
        sh.cp( '-v', os.path.join( package_source_dir, 'bin/Release/Readme.txt' ), os.path.join( install_dir, 'Readme.meshy.txt' ) )
        sh.cp( '-v', '-r', os.path.join( package_source_dir, 'bin/Release_Linux/Resources/' ), install_dir )

        sh.mkdir( '-p', package_build_dir )
        sh.cd( package_build_dir )
        if ( platform.system() == 'Darwin' ):
            sh.cmake(
                '-G', 'Xcode',
                '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir,
                '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'CMake' ),
                package_source_dir,
                _out = sys.stdout )
            sh.xcodebuild( '-configuration', 'Release', _out = sys.stdout )
        else:
            sh.cmake(
                '-D', 'CMAKE_INSTALL_PREFIX=%s' % install_dir,
                '-D', 'CMAKE_MODULE_PATH=%s' % os.path.join( install_dir, 'lib/OGRE/cmake' ),
                package_source_dir,
                _out = sys.stdout )
            sh.make( '-j4', 'VERBOSE=1', _out = sys.stdout )
            sh.make.install( _out = sys.stdout )
Пример #4
0
    def test_it_knows_how_big_a_file_is(self):
        content = "Test file content"
        sh("save", self.test_root, "test_file.txt", content)

        size = sh("stat -s", self.test_root, "test_file.txt")

        self.assertEquals(len(content), size)
Пример #5
0
    def test_it_can_list_a_dir_with_things(self):

        sh("mkdir", self.test_root, "test1")
        sh("mkdir", self.test_root, "test2")

        content = sh("ls", self.test_root)
        self.assertTrue(len(content) == 2)
        self.assertIn("test1", content)
        self.assertIn("test2", content)
Пример #6
0
    def test_throws_exception_when_file_exists_as_parent(self):

        test_dir = join(self.test_root, "test_dir")
        open(test_dir, "wb").close()

        with self.assertRaises(SHException):
            sh("mkdir -p", test_dir, "target_dir")

        self.assertFalse(exists(join(test_dir, "target_dir")))
Пример #7
0
    def test_it_can_remove_a_file(self):

        test_file = join(self.test_root, "test_file.bin")

        open(test_file, "wb")
        self.assertTrue(exists(join(self.test_root, "test_file.bin")))

        sh("rm", test_file)

        self.assertFalse(exists(test_file))
Пример #8
0
    def test_it_can_expand_several_environment_variables(self):

        os.environ['USER'] = "******"
        os.environ['OTHER'] = "OtherThing"
        path = sh('join', '/home/$USER/Desktop/$OTHER')

        self.assertEqual(path, '/home/TestUser/Desktop/OtherThing')
Пример #9
0
def dupsh(sock = 'ebp', os = None):
    """Args: [sock (imm/reg) = ebp]
    Duplicates sock to stdin, stdout and stderr and spawns a shell."""
    if os in ['freebsd', 'linux']:
        return dup(sock), sh()
    else:
        bug('OS was neither linux nor freebsd')
Пример #10
0
 def execute(self, name, command):
     """executes the command on the named host"""
     if name in ["localhost"]:
         r = '\n'.join(sh.sh("-c", command).split()[-1:])
     else:
         r = '\n'.join(sh.ssh(name, command).split()[-1:])
     return r
Пример #11
0
 def get_real_url(self, url):
     ret = None
     m = self.proto_fmt.search(url)
     if m:
         proto = m.group(1).strip().lower()
         if proto == 'file':
             value = m.group(2).strip()
             exists = False
             i = 0
             while i < 2:
                 if not os.path.exists(value):
                     value = os.path.join(prog_home, value)
                     i += 1
                     continue
                 else:
                     exists = True
                     break
             if exists : 
                 ret = []
                 try:
                     for line in sh.sh(value): ## exec the script
                         line = self.remove_newline(line) 
                         if line: ret.append( line )
                 except Exception, e:
                     cblog("get_real_url: exception" + repr(e))
                     print e
         else:
             ret = url
Пример #12
0
 def execute(self, name, command):
     """executes the command on the named host"""
     if name in ["localhost"]:
         r = '\n'.join(sh.sh("-c", command).split()[-1:])
     else:
         r = '\n'.join(sh.ssh(name, command).split()[-1:])
     return r
Пример #13
0
def dupsh(sock='ebp', os=None):
    """Args: [sock (imm/reg) = ebp]
    Duplicates sock to stdin, stdout and stderr and spawns a shell."""
    if os in ['freebsd', 'linux']:
        return dup(sock), sh()
    else:
        bug('OS was neither linux nor freebsd')
Пример #14
0
 def check_symbol(self, env, sofile, symbol):
     nm = env.get('NM', 'nm')
     syms = sh.sh('-c', "{} -gp {} | cut -d' ' -f3".format(
             nm, sofile), _env=env).splitlines()
     if symbol in syms:
         return True
     print('{} missing symbol {}; rebuilding'.format(sofile, symbol))
     return False
Пример #15
0
 def check_symbol(self, env, sofile, symbol):
     nm = env.get('NM', 'nm')
     syms = sh.sh('-c', "{} -gp {} | cut -d' ' -f3".format(
             nm, sofile), _env=env).splitlines()
     if symbol in syms:
         return True
     print('{} missing symbol {}; rebuilding'.format(sofile, symbol))
     return False
Пример #16
0
    def test_it_can_remove_a_file_recursively(self):

        test_dir = join("a", "b", "c", "d")
        sh("mkdir -p", self.test_root, test_dir)

        self.assertTrue(sh("exists", self.test_root, test_dir))

        sh("cd", self.test_root)
        sh("rm -r", ".", test_dir)

        self.assertFalse(sh("exists", self.test_root, test_dir))
Пример #17
0
    def test_it_can_remove_a_single_empty_directory(self):

        target = "target"

        sh("mkdir", self.test_root, target)
        self.assertTrue(sh("isdir", self.test_root, target))
        self.assertTrue(sh("exists", self.test_root, target))

        sh("rm", self.test_root, target)
        self.assertFalse(sh("exists", self.test_root, target))
Пример #18
0
    def test_it_can_list_the_cwd(self):

        sh("mkdir", self.test_root, "test1")
        sh("mkdir", self.test_root, "test2")

        sh("cd", self.test_root)

        content = sh("ls")
        self.assertTrue(len(content) == 2)
        self.assertIn("test1", content)
        self.assertIn("test2", content)
Пример #19
0
    def compile(self, source_dir, build_dir, install_dir):
        package_source_dir = os.path.join(source_dir, self.dirname)
        assert (os.path.exists(package_source_dir))
        package_build_dir = os.path.join(build_dir, self.dirname)

        sh.cd(os.path.join(package_source_dir, 'scripts/Resources'))
        sh.sh('./copyresources.sh')
        # the install target doesn't copy the stuff that copyresources.sh puts in place
        sh.cp('-v', os.path.join(package_source_dir, 'bin/Release/Readme.txt'),
              os.path.join(install_dir, 'Readme.meshy.txt'))
        sh.cp('-v', '-r',
              os.path.join(package_source_dir, 'bin/Release_Linux/Resources/'),
              install_dir)

        sh.mkdir('-p', package_build_dir)
        sh.cd(package_build_dir)
        if (platform.system() == 'Darwin'):
            sh.cmake('-G',
                     'Xcode',
                     '-D',
                     'CMAKE_INSTALL_PREFIX=%s' % install_dir,
                     '-D',
                     'CMAKE_MODULE_PATH=%s' %
                     os.path.join(install_dir, 'CMake'),
                     package_source_dir,
                     _out=sys.stdout)
            sh.xcodebuild('-configuration', 'Release', _out=sys.stdout)
        else:
            sh.cmake('-D',
                     'CMAKE_INSTALL_PREFIX=%s' % install_dir,
                     '-D',
                     'CMAKE_MODULE_PATH=%s' %
                     os.path.join(install_dir, 'lib/OGRE/cmake'),
                     package_source_dir,
                     _out=sys.stdout)
            sh.make('-j4', 'VERBOSE=1', _out=sys.stdout)
            sh.make.install(_out=sys.stdout)
Пример #20
0
def main(args=None):
    parser = gitboots.argparse_setup(globals())

    parser.add_argument("-e", metavar="COMMAND", default="rm -f", help="execute command on each found lock")
    opts = parser.parse_args(args)

    path = git_dir()
    for lock in sh.find(path, "-type", "f", "-name", "index.lock"):
        lock = lock.strip()
        if opts.e != "rm -f":
            if "{}" in opts.e:
                cmd = opts.e.replace("{}", lock)
            else:
                cmd = "{cmd} {lock}".format(cmd=opts.e, lock=lock)
            logger.info(cmd)
            if sh.which("sh"):
                sh.sh("-c", cmd)
            elif sh.which("cmd.exe"):
                sh.cmd("\c", cmd)
            else:
                raise ValueError("Do not know how to run this command: %s" % cmd)
        else:
            logger.info("rm -f %s", lock)
            os.remove(lock)
Пример #21
0
 def _get_routes(self):
     routes = []
     for iface in self.model.db:
         match = self._cmd_regex.match(iface)
         if not match:
             routes.append(iface)
             continue
         try:
             with open("{}/iface_cmd.sh".format(self._path_root), "w") as f:
                 f.write(match.group(1))
             _iface = sh.sh("{}/iface_cmd.sh".format(self._path_root))
             routes.append(str(_iface).rstrip())
         except Exception as e:
             _logger.debug(e)
     return routes
Пример #22
0
    def upload(self):
        """upload converted image to openstack
        Returns:
            bool: True is success, or False for failed"""
        from sh import sh
        size_name, size = get_size(self.convert_name)
        # if not all True
        if not all([size_name, size]):
            return False

        openrc_path = os.path.abspath(os.path.curdir)
        source_cmd = "source " + os.path.join(openrc_path, "openrc")

        # logger.debug("use glancn command to upload image.")
        cmd = source_cmd + ";glance image-create --name %s --min-disk %s --disk-format raw --container-format bare --file %s --progress"
        # pass vmname, min-disk, file path
        cmd = cmd % (self.vmname + "-" + size_name, int(size),
                     self.convert_name)

        try:
            logger.info("excute command: %s" % cmd)
            logger.info("convert vm: %s" % self.vmname)
            proc = sh("-c", cmd, _iter=True)
            logger.info("glance upload command start.")
        except Exception as e:
            logger.critical("upload image failed.")
            logger.critical("excute command failed: %s " % cmd)
            logger.critical(e)
            return False

        for line in proc:
            logger.debug(line)

        ret = not proc.exit_code

        if ret:
            if not os.path.exists(".vm"):
                os.mkdir(".vm")
            logger.info("upload completed.....")
            open(".vm/" + self.vmname + ".upload", "a").close()
            logger.info("Remove the converted image  file")
            # remove orignal vmdk file
            if VmwareVM.remove(self.convert_name):
                logger.warning("remove converted image file success.")
            else:
                logger.warning("remove converted image file failed.")
        return ret
Пример #23
0
    def virt_v2v_convert(self, out="./"):
        """"convert the vm to raw format with virt-v2v

        Args:
            out(str): path for output, default ./

        Returns:
            bool: True is success, or False for failed"""
        from sh import sh
        vm = self.down_name
        cmd = "export LIBGUESTFS_BACKEND=direct; virt-v2v -v -i disk %s -of raw -o local -os %s "
        cmd = cmd % (vm, out)

        try:
            logger.info("excute command: %s" % cmd)
            logger.info("convert vm: %s" % self.vmname)
            proc = sh("-c", cmd, _iter=True)
            logger.info("virt-v2v convert command start.")
        except Exception as e:
            logger.critical("convert image failed.")
            logger.critical("excute command failed: %s " % cmd)
            logger.critical(e)
            return False

        for line in proc:
            logger.debug(line)

        # 0 mean success
        ret = not proc.exit_code

        if ret:
            if not os.path.exists(".vm"):
                os.mkdir(".vm")
            logger.info("convert completed.....")
            open(".vm/" + self.vmname + ".convert", "a").close()
            logger.debug("try remove the original vmdk file")
            # remove orignal vmdk file
            if VmwareVM.remove(vm):
                logger.info("remove orignal vmdk file success.")
            else:
                logger.warning("remove orignal vmdk file failed.")
        return ret
Пример #24
0
def push(name):
    # a terrible CIDR check
    if not any([request.remote_addr.startswith(x) for x in ALLOWED_RANGES]):
        return jsonify({'status': 'access denied'}), 401

    site = mongo.db.sites.find_one({'_id': name})
    if site == None:
        return jsonify({'status': 'invalid repo spec'}), 404

    rev = str(git('rev-parse', 'origin/master', _cwd=site['path']))
    sudo.git.fetch(_cwd=site['path'])
    after_rev = str(git('rev-parse', 'origin/master', _cwd=site['path']))

    if rev != after_rev:
        git_output, deploy, restart, err = [''] * 4
        try:
            git_output = str(sudo.git.pull(_cwd=site['path'])).strip()
            deploy = str(sh('./deploy.sh', _cwd=site['path'])).strip()
            restart = str(sudo('service', name, 'restart'))
        except ErrorReturnCode as e:
            err = str(e)
            print('weird error', err)

        output = {
            'site': name,
            'git_revs': [rev, after_rev],
            'git_output': git_output,
            'deploy': deploy,
            'restart': restart,
            'err': err,
        }
        mongo.db.deploys.insert(output)
        output['_id'] = str(output['_id'])

        return jsonify(output), 201
    return jsonify({
        'status': 'same',
        'rev': rev or '...',
    }), 200
Пример #25
0
def push(name):
    # a terrible CIDR check
    if not any([request.remote_addr.startswith(x) for x in ALLOWED_RANGES]):
        return jsonify({'status': 'access denied'}), 401

    site = mongo.db.sites.find_one({'_id': name})
    if site == None:
        return jsonify({'status': 'invalid repo spec'}), 404

    rev = str(git('rev-parse', 'origin/master', _cwd=site['path']))
    sudo.git.fetch(_cwd=site['path'])
    after_rev = str(git('rev-parse', 'origin/master', _cwd=site['path']))

    if rev != after_rev:
        git_output, deploy, restart, err = [''] * 4
        try:
            git_output = str(sudo.git.pull(_cwd=site['path'])).strip()
            deploy = str(sh('./deploy.sh', _cwd=site['path'])).strip()
            restart = str(sudo('service', name, 'restart'))
        except ErrorReturnCode as e:
            err = str(e)
            print('weird error', err)

        output = {
            'site': name,
            'git_revs': [rev, after_rev],
            'git_output': git_output,
            'deploy': deploy,
            'restart': restart,
            'err': err,
        }
        mongo.db.deploys.insert(output)
        output['_id'] = str(output['_id'])

        return jsonify(output), 201
    return jsonify({
        'status': 'same',
        'rev': rev or '...',
    }), 200
Пример #26
0
    def test_it_converts_all_wslashes_to_uslashes(self):

        path = sh("join", "c:\\my_dir\\my_sub")
        self.assertEqual(path, "c:/my_dir/my_sub")
Пример #27
0
def status():
    try:
        return sh("git status --porcelain 2> /dev/null")
    except ValueError:
        return None
Пример #28
0
for variant_file in os.listdir('simulator/configs'):
    log.info("Building {}".format(variant_file))

    variant = os.path.basename(variant_file)

    build_variant_dir = os.path.join(BUILD_DIR, variant)
    mkdir(build_variant_dir)
    cp('-r', PRISTINE, build_variant_dir)

    with sh.pushd(os.path.join(build_variant_dir, 'simulator')):
        tup('init')
        tup('generate', 'build.sh', '--config',
            os.path.join('configs', variant))
        try:
            errbuf = io.StringIO()
            sh.sh('-x', 'build.sh', _err=errbuf)
        except:
            errbuf.seek(0)
            for l in errbuf.readlines():
                print(l, end='')
            raise
        sh.test('-x', 'simulator')

        log.info("Built {}".format(variant_file))

        any_fail = False

        sim = sh.Command('./simulator')
        for test in glob.iglob('tests/**/*.bin', recursive=True):
            log.info("\t\ttest: %s", test)
            try:
Пример #29
0
 def general_command_run(self, command):
     result = sh.sh(command, _bg=True)
     result_status = "Success" if result.exit_code() == 0 else "Fail"
     return {"status": result_status, "result": result}
#!/usr/bin/python
import json
import base64
import sh

sh.sh("./make-zip.sh")
open('resources/communication-with-storage/request/payload.bin','wb').write(
    json.dumps(
        dict( __nt_name="ExportToKramerius",
              uuid='e65d9072-2c9b-11e5-99fd-b8763f0a3d61',
              b64_data=base64.encodestring(open("/tmp/e65d9072-2c9b-11e5-99fd-b8763f0a3d61.zip","rb").read())
          )
    )
)
Пример #31
0
    def test_it_joins_deep_paths_with_backslashes(self):

        path = sh("join", "c:\\my_file", "a\\b\\c\\d")
        self.assertEqual("c:/my_file/a/b/c/d", path)
Пример #32
0
image = "654874881458.dkr.ecr.us-east-1.amazonaws.com/hoopla/pgbouncer"


# Copy configuration from the default to the project folder.
# hoopla-deployment/pgbouncer/ENVIRONMENT/BUILD/*
# ex: hoopla-deployment/pgbouncer/staging/default/*
pgbouncer = 'hoopla-deployment/pgbouncer'
for environment in os.listdir(pgbouncer):
    environment_folder = "{}/{}".format(pgbouncer, environment)

    for build in os.listdir(environment_folder):
        build_folder = '{}/{}'.format(environment_folder, build)

        files_copied = []
        for configuration_file in os.listdir(build_folder):
            configuration_file_path = '{}/{}'.format(build_folder, configuration_file)
            shutil.copyfile(configuration_file_path, configuration_file)
            files_copied.append(configuration_file)

        # Configuration had been copied, build and publish docker image
        full_image_name = "{}:{}.{}".format(image, environment, build)
        logging.info("Creating and publishing image {}".format(full_image_name))
        sh.sh("-c", "sudo docker build -t {} .".format(full_image_name), _out=log_sh)
        sh.sh("-c", "sudo docker push {}".format(full_image_name))

        logging.info("Deleting temporary configuration files")
        for file_copied in files_copied:
            os.remove(file_copied)

logging.info("Done!")
Пример #33
0
    def test_it_doesnt_butcher_paths(self):
        raw_path = "c:/users/jcsoko~1/appdata/local/temp/tmpkfex8c/test_file.bin"

        path = sh("join", raw_path)
        self.assertEqual(raw_path, path)
Пример #34
0
 def test_short_option(self):
     from sh import sh
     s1 = sh(c="echo test").strip()
     s2 = "test"
     self.assertEqual(s1, s2)
Пример #35
0
def branch():
    """The current git branch."""
    try:
        return sh("git symbolic-ref --short HEAD 2> /dev/null").strip()
    except ValueError:
        return None
Пример #36
0
def get_current_arch():
    return sh(
        "-c",
        f". {PROJECT_DIR / 'shared-build-context/shared/scripts/functions.sh; normalize_to_docker_arch'}",
    ).strip()
Пример #37
0
    def handle(self, *args, **options):
        session = requests.session()
        session.verify = False  # internal network, self-signed cert
        session.headers['User-Agent'] = 'python-requests/ConfigMaster'

        # global lock - remove after Repository integration is done
        global_lock = locking.FileLock("/run/configmaster-esxi-backup")
        global_lock.acquire()

        for name, uri in settings.ESXI_BACKUP:
            self.stdout.write("Acquiring lock for %s..." % name)
            run_lock = locking.FileLock("/run/esxi-%s" % name)
            tempdir = tempfile.mkdtemp('esxi')
            self.stdout.write("Lock acquired, running... (%s)" % tempdir)

            try:
                run_lock.acquire(non_blocking=True)
            except IOError:
                self.stderr.write(
                    "A run is already in progress")
                return

            try:
                resp = session.get(uri, timeout=5)
            except IOError as e:
                self.stderr.write(
                    "Config backup failure for %s: %s" % (name, str(e)))
                continue

            if resp.status_code != 200:
                self.stderr.write(
                    "Config backup failed for %s: %r" % (name, resp.content))
                continue

            stage_1 = os.path.join(tempdir, 'stage1.tgz')

            with open(stage_1, 'wb') as f:
                f.write(resp.content)

            os.chdir(tempdir)

            try:
                stage1_ret = sh.aunpack('stage1.tgz')
            except sh.ErrorReturnCode:
                self.stderr.write(
                    "Config backup failed for %s: failed to unpack stage1"
                    % name)
                continue

            if not "\nstate.tgz" in stage1_ret.stdout or not os.path.exists(
                    'stage1'):
                self.stderr.write(
                    "Config backup failed for %s: invalid stage1" % name)
                continue

            os.chdir('stage1')

            try:
                stage2_ret = sh.aunpack('state.tgz')
            except sh.ErrorReturnCode:
                self.stderr.write(
                    "Config backup failed for %s: failed to unpack state.tgz"
                    % name)
                continue

            if not os.path.exists('local.tgz'):
                self.stderr.write(
                    "Config backup failed for %s: invalid state.tgz, "
                    "local.tgz missing" % name)
                continue

            try:
                stage3_ret = sh.aunpack('local.tgz')
            except sh.ErrorReturnCode:
                self.stderr.write(
                    "Config backup failed for %s: "
                    "failed to unpack local.tgz" % name)
                continue

            if not os.path.exists('etc'):
                self.stderr.write(
                    "Config backup failed for %s: "
                    "invalid local.tgz, etc/ missing" % name)
                continue

            for path in settings.ESXI_FILE_BLACKLIST:
                if os.path.exists(path):
                    os.unlink(path)

            repo_dir = os.path.join(settings.ESXI_BACKUP_REPO, name)

            if not os.path.exists(repo_dir):
                os.mkdir(repo_dir)

            sh.sh('-c', '/usr/bin/find -type d -exec chmod 750 {} \;'.split())
            sh.sh('-c', '/usr/bin/find -type d -exec chmod 640 {} \;'.split())

            for root, dirs, files in os.walk(repo_dir):
                for d in dirs:
                    os.chmod(os.path.join(root,d), 0o750)
                for f in files:
                    os.chmod(os.path.join(root,f), 0o640)

            sh.cp('-r', '.', repo_dir+'/')
            os.chdir(repo_dir)

            try:
                sh.git.add('-u', '.')
                sh.git.add('.')
                if NetworkDeviceConfigBackupHandler._git_commit(
                                "ESXi config change (%s)" % name):
                    sh.git.push()
            except sh.ErrorReturnCode as e:
                self.stderr.write(
                    "Git commit or push failed: " + str(e))
                continue

            sh.cp(
                os.path.join(tempdir, 'stage1.tgz'),
                os.path.join(settings.ESXI_BACKUP_REPO_RAW, '%s.tgz' % name)
            )

            os.chdir(settings.ESXI_BACKUP_REPO_RAW)

            try:
                sh.git.add('-u', '.')
                sh.git.add('.')
                if NetworkDeviceConfigBackupHandler._git_commit(
                                "ESXi raw config change (%s)" % name):
                    sh.git.push()
            except sh.ErrorReturnCode as e:
                self.stderr.write(
                    "Git commit or push failed: " + str(e))
                continue

            shutil.rmtree(tempdir)
            run_lock.release()

        for path in (settings.ESXI_BACKUP_REPO_RAW, settings.ESXI_BACKUP_REPO):
            os.chdir(path)
            sh.git.push()

        global_lock.release()
        self.stdout.write("Run completed.")
Пример #38
0
    def test_it_joins_windows_paths(self):

        path = sh("join", "c:/my_dir", "my_file.txt")
        self.assertEqual(path, "c:/my_dir/my_file.txt")
Пример #39
0
que = subprocess.Popen(cmd,stdout=subprocess.PIPE,shell=True).communicate()[0]
queue = json.loads(que)
queue = queue['QueueUrl']

cmd = "aws sqs receive-message --queue-url '"+queue+"'"
done = "aws sqs delete-message --queue-url '"+queue+"' --receipt-handle "

if queue:
	report(env+queue)
	failed_receive_message = 0
	while maxfails == 0 or ( maxfails > 0 and failed_receive_message < maxfails ):
		if maxfails > 0:
			failed_receive_message += 1
		mes = subprocess.Popen(cmd,stdout=subprocess.PIPE,shell=True).communicate()[0]
		if not mes:
			continue
		messages = json.loads(mes)
		for message in messages['Messages']:
			body = urllib.unquote_plus(message['Body']).replace('\n',' ').replace('\r','')
			rcpt = "'"+message['ReceiptHandle']+"'"
			try:
				sh.sh('-c',env+body,_tty_in=True,_out=job+'/out.txt',_err=job+'/err.txt')
				subprocess.call(done+rcpt,shell=True)
				failed_receive_message = 0
			except Exception as e:
				report(str(e) + ': cannot service '+body)
		if maxfails < 0:
			break
	if failed_receive_message > 0:
		report('giving up... nothing received after '+str(failed_receive_message)+' trys')
Пример #40
0
	def execPort(self, code):
		sh(self.__cmd, self.__timeout)
		return ERoutingResult.STOP_OK
Пример #41
0
def setup_canopy(ip_address):
    ssh = digitalocean.get_ssh("root", ip_address, process_out(ip_address))
    # log("creating primary virtual environment")
    # ssh("/root/python/bin/python3 -m venv /root/canopy")
    # ssh(
    #     "cat > /root/runinenv",
    #     stdin=textwrap.dedent(
    #         """\
    #         #!/usr/bin/env bash
    #         VENV=$1
    #         . ${VENV}/bin/activate
    #         shift 1
    #         exec "$@"
    #         deactivate"""
    #     ),
    # )
    ssh("chmod", "+x", "/root/runinenv")
    log("installing canopy")
    ssh("/root/runinenv", "/root/canopy", "pip", "install", "canopy-platform")
    return

    # if self.env_dir.exists():
    #     return
    # log("creating primary virtual environment")
    get_python_sh()("-m", "venv", self.env_dir)
    sh.echo(
        textwrap.dedent("""\
            #!/usr/bin/env bash
            VENV=$1
            . ${VENV}/bin/activate
            shift 1
            exec "$@"
            deactivate"""),
        _out=f"{self.home_dir}/runinenv",
    )
    sh.chmod("+x", self.home_dir / "runinenv")

    # log("installing SQLite")
    # log(" ", "downloading")
    # sh.wget("https://www.sqlite.org/src/tarball/sqlite.tar.gz", _cwd=self.src_dir)
    # log(" ", "extracting")
    # sh.tar("xf", "sqlite.tar.gz", _cwd=self.src_dir)
    # sqlite_dir = self.src_dir / "sqlite"
    # log(" ", "configuring")
    # sh.bash("./configure", _cwd=sqlite_dir)
    # sh.make("sqlite3.c", _cwd=sqlite_dir)
    # sh.git("clone", "https://github.com/coleifer/pysqlite3", _cwd=self.src_dir)
    # pysqlite_dir = self.src_dir / "pysqlite3"
    # sh.cp(sqlite_dir / "sqlite3.c", ".", _cwd=pysqlite_dir)
    # sh.cp(sqlite_dir / "sqlite3.h", ".", _cwd=pysqlite_dir)
    # sh.sh(
    #     self.home_dir / "runinenv",
    #     self.env_dir,
    #     "python",
    #     "setup.py",
    #     "build_static",
    #     _cwd=pysqlite_dir,
    # )
    # sh.sh(
    #     self.home_dir / "runinenv",
    #     self.env_dir,
    #     "python",
    #     "setup.py",
    #     "install",
    #     _cwd=pysqlite_dir,
    # )

    log("installing Gaea")
    sh.sh(
        self.home_dir / "runinenv",
        self.env_dir,
        "pip",
        "install",
        "libgaea",
    )

    log("installing Poetry")
    get_python_sh()(
        sh.wget(
            "https://raw.githubusercontent.com/python-poetry/poetry"
            "/master/install-poetry.py",
            "-q",
            "-O",
            "-",
        ),
        "-",
    )
Пример #42
0
for variant_file in os.listdir('simulator/configs'):
	log.info("Building {}".format(variant_file))

	variant = os.path.basename(variant_file)

	build_variant_dir = os.path.join(BUILD_DIR, variant)
	mkdir(build_variant_dir)
	cp('-r', PRISTINE, build_variant_dir)

	with sh.pushd(os.path.join(build_variant_dir, 'simulator')):
		tup('init')
		tup('generate', 'build.sh', '--config', os.path.join('configs', variant))
		try:
			errbuf = io.StringIO()
			sh.sh('-x', 'build.sh', _err=errbuf)
		except:
			errbuf.seek(0)
			for l in errbuf.readlines():
				print(l, end='')
			raise
		sh.test('-x', 'simulator')

		log.info("Built {}".format(variant_file))

		any_fail = False

		sim = sh.Command('./simulator')
		for test in glob.iglob('tests/**/*.bin', recursive=True):
			log.info("\t\ttest: %s", test)
			try:
Пример #43
0
import sh

sh.cd ("ext/deps/")
sh.wget ("http://download.nanomsg.org/nanomsg-0.3-beta.tar.gz")
sh.tar ("xzf", "nanomsg-0.3-beta.tar.gz")
sh.cd ("nanomsg-0.3-beta")
sh.sh ("configure")
sh.make ()
Пример #44
0
def run(input_file: KGTKFiles,
        output_file: KGTKFiles,
        columns: typing.Optional[typing.List[str]] = None,
        reverse: bool = False,
        extra: typing.Optional[str] = None,
        verbose: bool = False) -> int:
    import os
    from pathlib import Path
    import sh  # type: ignore
    import sys

    from kgtk.io.kgtkreader import KgtkReader

    input_path: Path = KGTKArgumentParser.get_input_file(input_file)
    output_path: Path = KGTKArgumentParser.get_output_file(output_file)

    error_file = sys.stderr

    try:
        header_read_fd: int
        header_write_fd: int
        header_read_fd, header_write_fd = os.pipe()
        os.set_inheritable(header_write_fd, True)
        if verbose:
            print("header pipe: read_fd=%d write_fd=%d" %
                  (header_read_fd, header_write_fd),
                  file=error_file,
                  flush=True)

        sortopt_read_fd: int
        sortopt_write_fd: int
        sortopt_read_fd, sortopt_write_fd = os.pipe()
        os.set_inheritable(sortopt_read_fd, True)
        if verbose:
            print("sort options pipe: read_fd=%d write_fd=%d" %
                  (sortopt_read_fd, sortopt_write_fd),
                  file=error_file,
                  flush=True)

        cmd: str = ""

        if str(input_path) != "-":
            # Feed the named file into the data processing pipeline,
            # otherwise read from standard input.
            cmd += "cat " + repr(str(input_path)) + " | "

        cmd += " { IFS= read -r header ; "  # Read the header line
        cmd += " { printf \"%s\\n\" \"$header\" >&" + str(
            header_write_fd) + " ; } ; "  # Send the header to Python
        cmd += " printf \"%s\\n\" \"$header\" ; "  # Send the header to standard output (which may be redirected to a file, below).
        cmd += " IFS= read -u " + str(
            sortopt_read_fd
        ) + " -r options ; "  # Read the sort command options from Python.
        cmd += " LC_ALL=C sort -t '\t' $options ; } "  # Sort the remaining input lines using the options read from Python.

        if str(output_path) != "-":
            # Feed the sorted output to the named file.  Otherwise, the sorted
            # output goes to standard output without passing through Python.
            cmd += " > " + repr(str(output_path))

        if verbose:
            print("sort command: %s" % cmd, file=error_file, flush=True)

        # Sh version 1.13 or greater is required for _pass_fds.
        proc = sh.sh("-c",
                     cmd,
                     _in=sys.stdin,
                     _out=sys.stdout,
                     _err=sys.stderr,
                     _bg=True,
                     _pass_fds={header_write_fd, sortopt_read_fd})

        if verbose:
            print("Reading the KGTK input file header line with KgtkReader",
                  file=error_file,
                  flush=True)
        kr: KgtkReader = KgtkReader.open(Path("<%d" % header_read_fd))
        if verbose:
            print("KGTK header: %s" % " ".join(kr.column_names),
                  file=error_file,
                  flush=True)

        sort_options: str = ""
        if reverse:
            sort_options += " --reverse"

        if extra is not None and len(extra) > 0:
            sort_options += " " + extra

        sort_idx: int
        if columns is not None and len(columns) > 0:
            # Process the list of column names, including splitting
            # comma-separated lists of column names.
            column_name: str
            for column_name in columns:
                column_name_2: str
                for column_name_2 in column_name.split(","):
                    column_name_2 = column_name_2.strip()
                    if len(column_name_2) == 0:
                        continue
                    if column_name_2.isdigit():
                        sort_idx = int(column_name_2)
                        if sort_idx > len(kr.column_names):
                            proc.kill_group()
                            raise KGTKException(
                                "Invalid column number %d (max %d)." %
                                (sort_idx, len(kr.column_names)))
                    else:
                        if column_name_2 not in kr.column_names:
                            proc.kill_group()
                            raise KGTKException("Unknown column_name %s" %
                                                column_name_2)
                        sort_idx = kr.column_name_map[column_name_2] + 1
                    sort_options += " -k %d,%d" % (sort_idx, sort_idx)
        else:
            if kr.is_node_file:
                sort_idx = kr.id_column_idx + 1
                sort_options += " -k %d,%d" % (sort_idx, sort_idx)

            elif kr.is_edge_file:
                if kr.id_column_idx >= 0:
                    sort_idx = kr.id_column_idx + 1
                    sort_options += " -k %d,%d" % (sort_idx, sort_idx)

                sort_idx = kr.node1_column_idx + 1
                sort_options += " -k %d,%d" % (sort_idx, sort_idx)

                sort_idx = kr.label_column_idx + 1
                sort_options += " -k %d,%d" % (sort_idx, sort_idx)

                sort_idx = kr.node2_column_idx + 1
                sort_options += " -k %d,%d" % (sort_idx, sort_idx)
            else:
                proc.kill_group()
                raise KGTKException(
                    "Unknown KGTK file mode, please specify the sorting columns."
                )

        if verbose:
            print("sort options: %s" % sort_options,
                  file=error_file,
                  flush=True)

        kr.close()  # We are done with the KgtkReader now.

        # Send the sort options back to the data processing pipeline.
        with open(sortopt_write_fd, "w") as options_file:
            options_file.write(sort_options + "\n")

        proc.wait()

        # Clean up the file descriptors, just in case.
        #
        # Note: Should close these when we raise an exception, too.
        try:
            os.close(header_read_fd)
        except os.error:
            pass
        try:
            os.close(header_write_fd)
        except os.error:
            pass

        try:
            os.close(sortopt_read_fd)
        except os.error:
            pass
        try:
            os.close(sortopt_write_fd)
        except os.error:
            pass

        return 0

    except Exception as e:
        #import traceback
        #traceback.print_tb(sys.exc_info()[2], 10)
        raise KGTKException('INTERNAL ERROR: ' + type(e).__module__ + '.' +
                            str(e) + '\n')
Пример #45
0
def main():
    nupdated = 0
    if os.path.exists("wip"):
        print(
            "In-progress commits detected: wip/ exists. Please resolve manually."
        )
        sys.exit(1)

    override_commit = None
    if "--override" in sys.argv:
        override_commit = sys.argv[sys.argv.index("--override") + 1]

    if "-m" in sys.argv:
        override_msg = sys.argv[sys.argv.index("-m") + 1]

    for repo, proj in PROJECTS.items():
        osc = proj["cmd"]
        commit = override_commit or proj["version-tag"]

        if "--fetch-changes" in sys.argv:
            f, filename = tempfile.mkstemp('.txt', text=True)
            tf = os.fdopen(f, "w")
            fetch_changelog(osc, proj["version-tag"], tf)
            tf.close()
            print(open(filename).read())
            os.remove(filename)
            sys.exit(0)

        try:
            tarball = osc.api(
                "-X", "GET",
                "/source/{}/{}/update-tarball.sh".format(repo, PACKAGE))
            m = re.search('ROOK_REV="(.+)"', tarball.stdout.decode('utf-8'))
            if m and m.group(1) == commit:
                continue
        except sh.ErrorReturnCode as err:
            print("Error code {}, skipping {}/{}...".format(
                err.exit_code, repo, PACKAGE))
            continue

        print("Updating {}/{} to version {}...".format(repo, PACKAGE, commit))
        curr = os.getcwd()
        try:
            wip = os.path.join(curr, "wip")
            try:
                os.mkdir(wip)
            except os.error:
                pass
            os.chdir(wip)
            osc("bco", repo, PACKAGE)
            os.chdir(os.path.join(wip, BRANCHBASE.format(repo), PACKAGE))
            if proj["version-tag"] == commit:
                update_changelog(osc, proj["version-tag"])
            else:
                update_changelog_with_message(
                    osc, override_msg or "Update to commit {}".format(commit))
            update_tarball(commit)
            for toremove in glob.glob('./rook-*.xz'):
                print("Deleting {}...".format(toremove))
                os.remove(toremove)
            print(sh.sh("./update-tarball.sh"))
            print(osc.ar())
            print(osc.commit("-m", "Update to version {}:".format(commit)))
            nupdated = nupdated + 1

        finally:
            os.chdir(curr)

    if nupdated > 0:
        print("{} updated projects now in:\nwip".format(nupdated))
Пример #46
0
    def test_joining_a_dot_path(self):

        path = sh("join", "./abc", "def")
        self.assertEqual(path, "./abc/def")