示例#1
0
文件: gg.py 项目: eorbay/gg
def read_thunk(thunk_hash, s3_bucket=None):
    tpath = GGPaths.blob_path(thunk_hash)

    if not os.path.exists(tpath) and not s3_bucket:
        raise Exception("thunk is not locally available")
    elif not os.path.exists(tpath):
        try:
            s3_client.download_file(Bucket=s3_bucket,
                                    Key=thunk_hash,
                                    Filename=tpath)
            tags = s3_client.get_object_tagging(Bucket=s3_bucket,
                                                Key=thunk_hash)
            tags = tags.get('TagSet', [])

            for tag in tags:
                if tag['Key'] == 'gg:executable' and tag['Value'] == 'true':
                    make_executable(tpath)
                    break
        except:
            Exception("thunk is not available: {}".format(thunk_hash))

    with open(tpath, "rb") as fin:
        magic = fin.read(len(MAGIC_CODE))

        if magic != MAGIC_CODE:
            raise NotAThunkException("not a thunk: {}".format(thunk_hash))

        thunk = gg_pb2.Thunk()
        thunk.ParseFromString(fin.read())

        return thunk
示例#2
0
  def test_js_engine_path(self):
    # Test that running JS commands works for node, d8, and jsc and is not path dependent
    restore_and_set_up()

    sample_script = test_file('print_args.js')

    # Fake some JS engines
    # Note that the path contains 'd8'.
    test_path = self.in_dir('fake', 'abcd8765')
    ensure_dir(test_path)

    jsengines = [('d8',     config.V8_ENGINE),
                 ('d8_g',   config.V8_ENGINE),
                 ('js',     config.SPIDERMONKEY_ENGINE),
                 ('node',   config.NODE_JS),
                 ('nodejs', config.NODE_JS)]
    for filename, engine in jsengines:
      try_delete(SANITY_FILE)
      if type(engine) is list:
        engine = engine[0]
      if not engine:
        print('WARNING: Not testing engine %s, not configured.' % (filename))
        continue

      print(filename, engine)

      test_engine_path = os.path.join(test_path, filename)
      with open(test_engine_path, 'w') as f:
        f.write('#!/bin/sh\n')
        f.write('exec %s $@\n' % (engine))
      make_executable(test_engine_path)

      out = self.run_js(sample_script, engine=test_engine_path, args=['--foo'])

      self.assertEqual('0: --foo', out.strip())
示例#3
0
def make_fake_llc(filename, targets):
  """Create a fake llc that only handles --version and writes target
  list to stdout.
  """
  print('make_fake_llc: %s' % filename)
  ensure_dir(os.path.dirname(filename))
  with open(filename, 'w') as f:
    f.write('#!/bin/sh\n')
    f.write('echo "llc fake output\nRegistered Targets:\n%s"' % targets)
  make_executable(filename)
示例#4
0
def make_fake_tool(filename, version, report_name=None):
  if not report_name:
    report_name = os.path.basename(filename)
  print('make_fake_tool: %s' % filename)
  ensure_dir(os.path.dirname(filename))
  with open(filename, 'w') as f:
    f.write('#!/bin/sh\n')
    f.write('echo "%s version %s"\n' % (report_name, version))
    f.write('echo "..."\n')
    f.write('exit 0\n')
  make_executable(filename)
示例#5
0
  def test_node(self):
    NODE_WARNING = 'node version appears too old'
    NODE_WARNING_2 = 'cannot check node version'

    restore_and_set_up()

    # Clang should report the version number we expect, and emcc should not warn
    assert shared.check_node_version()
    output = self.check_working(EMCC)
    self.assertNotContained(NODE_WARNING, output)

    # Fake a different node version
    restore_and_set_up()
    with open(EM_CONFIG, 'a') as f:
      f.write('NODE_JS = "' + self.in_dir('fake', 'nodejs') + '"')

    ensure_dir('fake')

    for version, succeed in [('v0.8.0', False),
                             ('v4.1.0', False),
                             ('v4.1.1', True),
                             ('v4.2.3-pre', True),
                             ('cheez', False)]:
      print(version, succeed)
      try_delete(SANITY_FILE)
      f = open(self.in_dir('fake', 'nodejs'), 'w')
      f.write('#!/bin/sh\n')
      f.write('''if [ $1 = "--version" ]; then
echo "%s"
else
%s $@
fi
''' % (version, ' '.join(config.NODE_JS)))
      f.close()
      make_executable(self.in_dir('fake', 'nodejs'))
      if not succeed:
        if version[0] == 'v':
          self.check_working(EMCC, NODE_WARNING)
        else:
          self.check_working(EMCC, NODE_WARNING_2)
      else:
        output = self.check_working(EMCC)
        self.assertNotContained(NODE_WARNING, output)
def main(event):
    startTime = datetime.now()
    print('startTime: %s' % startTime.strftime('[%Y-%m-%dT%H:%M:%S.%fZ]'))

    #return params
    os.environ['GG_STORAGE_URI'] = event['storageBackend']
    thunks = event['thunks']
    timelog = event.get('timelog')

    # Remove old thunk-execute directories
    os.system("rm -rf /tmp/thunk-execute.*")

    # Write thunks to disk

    tried_once = False

    while True:
        try:
            for thunk_item in thunks:
                thunk_data = b64decode(thunk_item['data'])
                if os.path.exists(GGPaths.blob_path(thunk_item['hash'])):
                    os.remove(GGPaths.blob_path(thunk_item['hash']))
                with open(GGPaths.blob_path(thunk_item['hash']), "wb") as fout:
                    fout.write(thunk_data)

            # Move executables from Lambda package to .gg directory
            executables_dir = os.path.join(curdir, 'executables')
            if os.path.exists(executables_dir):
                for exe in os.listdir(executables_dir):
                    blob_path = GGPaths.blob_path(exe)
                    exe_path = os.path.join(executables_dir, exe)

                    if not os.path.exists(blob_path):
                        shutil.copy(exe_path, blob_path)
                        make_executable(blob_path)

            break

        except OSError as ex:
            if not tried_once and ex.errno == errno.ENOSPC:
                # there's no space left; let's get rid of GG_DIR and try again
                tried_once = True
                os.system("rm -rf '{}'".format(GGPaths.blobs))
                os.system("rm -rf '{}'".format(GGPaths.reductions))
                make_gg_dirs()
                continue
            else:
                raise

    # Execute the thunk, and upload the result

#    command = ["gg-execute-static",
#               "--get-dependencies",
#               "--put-output",
#               "--cleanup"]

    command = "./gg-execute-static --get-dependencies --put-output --cleanup"

    if timelog:
        #command += ["--timelog"]
        command += " --timelog"

    for x in thunks:
        command += " " + x['hash']

    print('command: ' + command)

    return_code, stdout = run_command(command)
    #    return_code, stdout = run_command(command +
    #        [x['hash'] for x in thunks])
    print('command output: ' + stdout)

    executed_thunks = []

    for thunk in thunks:
        outputs = []

        for output_tag in thunk['outputs']:
            output_hash = GGCache.check(thunk['hash'], output_tag)

            if not output_hash:
                return {'returnCode': return_code, 'stdout': stdout}

            data = None
            if is_hash_for_thunk(output_hash):
                with open(GGPaths.blob_path(output_hash), 'rb') as tin:
                    data = b64encode(tin.read()).decode('ascii')

            outputs += [{
                'tag':
                output_tag,
                'hash':
                output_hash,
                'size':
                os.path.getsize(GGPaths.blob_path(output_hash)),
                'executable':
                is_executable(GGPaths.blob_path(output_hash)),
                'data':
                data
            }]

        executed_thunks += [{'thunkHash': thunk['hash'], 'outputs': outputs}]

    return {'returnCode': 0, 'stdout': '', 'executedThunks': executed_thunks}
示例#7
0
文件: function.py 项目: eorbay/gg
def handler(event, context):
    gginfo = GGInfo()

    gginfo.thunk_hash = event['thunk_hash']
    gginfo.s3_bucket = event['s3_bucket']
    gginfo.s3_region = event['s3_region']
    gginfo.infiles = event['infiles']

    enable_timelog = event.get('timelog', True)
    timelogger = TimeLog(enabled=enable_timelog)

    thunk_data = b64decode(event['thunk_data'])

    with open(GGPaths.blob_path(gginfo.thunk_hash), "wb") as fout:
        fout.write(thunk_data)

    timelogger.add_point("write thunk to disk")

    executables_dir = os.path.join(curdir, 'executables')

    if os.path.exists(executables_dir):
        for exe in os.listdir(executables_dir):
            blob_path = GGPaths.blob_path(exe)
            exe_path = os.path.join(executables_dir, exe)

            if not os.path.exists(blob_path):
                shutil.copy(exe_path, blob_path)
                make_executable(blob_path)

    timelogger.add_point("copy executables to ggdir")

    # only clean up the gg directory if running on Lambda.
    if not fetch_dependencies(gginfo, gginfo.infiles, not GG_RUNNER):
        return {'errorType': 'GG-FetchDependenciesFailed'}

    for infile in gginfo.infiles:
        if infile['executable']:
            print("exe %s" % infile['hash'])
            make_executable(GGPaths.blob_path(infile['hash']))

    timelogger.add_point("fetching the dependencies")

    return_code, output = run_command(["gg-execute-static", gginfo.thunk_hash])

    if return_code:
        return {
            'errorType': 'GG-ExecutionFailed',
        }

    timelogger.add_point("gg-execute")

    result = GGCache.check(gginfo.thunk_hash)

    if not result:
        return {'errorType': 'GG-ExecutionFailed'}

    executable = is_executable(GGPaths.blob_path(result))

    timelogger.add_point("check the outfile")

    s3_client = boto3.client('s3')

    print("writing %s" % GGPaths.blob_path(result))
    with open(GGPaths.blob_path(result), "rb") as bfile:
        blob = bfile.read()
        print("file read")
        r.set(result, blob)
        print("redis key set")
    """
    s3_client.upload_file(GGPaths.blob_path(result), gginfo.s3_bucket, result)

    s3_client.put_object_acl(
        ACL='public-read',
        Bucket=gginfo.s3_bucket,
        Key=result
    )

    s3_client.put_object_tagging(
        Bucket=gginfo.s3_bucket,
        Key=result,
        Tagging={
            'TagSet': [
                { 'Key': 'gg:reduced_from', 'Value': gginfo.thunk_hash },
                { 'Key': 'gg:executable', 'Value': 'true' if executable else 'false' }
            ]
        }
    )
    """
    timelogger.add_point("upload outfile to s3")

    if enable_timelog:
        print("s3 timelog uploading..")
        s3_client.put_object(ACL='public-read',
                             Bucket=gginfo.s3_bucket,
                             Key="runlogs/{}".format(gginfo.thunk_hash),
                             Body=str({
                                 'output_hash': result,
                                 'started': timelogger.start,
                                 'timelog': timelogger.points
                             }).encode('utf-8'))
    print("s3 timelog uploaded!")
    return {
        'thunk_hash': gginfo.thunk_hash,
        'output_hash': result,
        'output_size': os.path.getsize(GGPaths.blob_path(result)),
        'executable_output': executable
    }
示例#8
0
def handler(event, context):
    os.environ['GG_STORAGE_URI'] = event['storageBackend']
    thunks = event['thunks']
    timelog = event.get('timelog')

    # Remove old thunk-execute directories
    os.system("rm -rf /tmp/thunk-execute.*")

    # Write thunks to disk
    for thunk_item in thunks:
        thunk_data = b64decode(thunk_item['data'])
        if os.path.exists(GGPaths.blob_path(thunk_item['hash'])):
            os.remove(GGPaths.blob_path(thunk_item['hash']))
        with open(GGPaths.blob_path(thunk_item['hash']), "wb") as fout:
            fout.write(thunk_data)

    # Move executables from Lambda package to .gg directory
    executables_dir = os.path.join(curdir, 'executables')
    if os.path.exists(executables_dir):
        for exe in os.listdir(executables_dir):
            blob_path = GGPaths.blob_path(exe)
            exe_path = os.path.join(executables_dir, exe)

            if not os.path.exists(blob_path):
                shutil.copy(exe_path, blob_path)
                make_executable(blob_path)

    # Execute the thunk, and upload the result
    command = [
        "gg-execute-static", "--get-dependencies", "--put-output", "--cleanup"
    ]

    if timelog:
        command += ["--timelog"]

    return_code, stdout = run_command(command + [x['hash'] for x in thunks])

    executed_thunks = []

    for thunk in thunks:
        outputs = []

        for output_tag in thunk['outputs']:
            output_hash = GGCache.check(thunk['hash'], output_tag)

            if not output_hash:
                return {'returnCode': return_code, 'stdout': stdout}

            data = None
            if is_hash_for_thunk(output_hash):
                with open(GGPaths.blob_path(output_hash), 'rb') as tin:
                    data = b64encode(tin.read()).decode('ascii')

            outputs += [{
                'tag':
                output_tag,
                'hash':
                output_hash,
                'size':
                os.path.getsize(GGPaths.blob_path(output_hash)),
                'executable':
                is_executable(GGPaths.blob_path(output_hash)),
                'data':
                data
            }]

        executed_thunks += [{'thunkHash': thunk['hash'], 'outputs': outputs}]

    return {'returnCode': 0, 'stdout': '', 'executedThunks': executed_thunks}
示例#9
0
 def make_new_executable(name):
   open(os.path.join(temp_bin, name), 'w').close()
   make_executable(os.path.join(temp_bin, name))