Exemple #1
0
def get_main():
  args = get_parser.parse_args()
  (access_id, secret_key) = load_creds(args.creds)
  with Connection(access_id, secret_key, args.host, args.port) as s3:
    with safeopen(args.output) as outfile:
      data = s3.get_object(args.bucket, args.key)
      outfile.write(data)
Exemple #2
0
def get_main(argv=None):
    args = docopt(GET_USAGE, argv)
    (access_id, secret_key) = load_creds(args.get('--creds'))
    with Connection(access_id, secret_key, args.get('--host'),
                    args.get('--port')) as s3:
        with safeopen(args.get('--output'), 'wb') as outfile:
            data = s3.get_object(args.get('<bucket>'), args.get('<key>'))
            copy(data, outfile)
Exemple #3
0
def ls_main():
  args = ls_parser.parse_args()
  (access_id, secret_key) = load_creds(args.creds)
  with Connection(access_id, secret_key, args.host, args.port) as s3:
    with safeopen(args.output) as outfile:
      if args.bucket:
        keys = s3.list_bucket(args.bucket, args.mark, args.prefix, args.batch)
        for key in keys:
          print >> outfile, key
      else:
        buckets = s3.list_buckets()
        for bucket in buckets:
          print >> outfile, bucket
Exemple #4
0
 def output_stream(self):
     o = self.arg_output
     buf = StringIO.StringIO()
     try:
         yield buf
     except:
         raise
     if o in ('-', '/dev/stdout') or None:
         sys.stdout.write(buf.getvalue())
         sys.stdout.flush()
         return
     if hasattr(o, 'write') and callable(o.write):
         o.write(buf.getvalue())
         return
     assert isinstance(o, basestring), type(o)
     with safeopen(o) as fd:
         fd.write(buf.getvalue())
Exemple #5
0
def ls_main(argv=None):
    args = docopt(LS_USAGE, argv)
    (access_id, secret_key) = load_creds(args.get('--creds'))
    with Connection(access_id, secret_key, args.get('--host'),
                    args.get('--port')) as s3:
        with safeopen(args.get('--output')) as outfile:
            bucket = args.get('<bucket>')
            if bucket:
                keys = s3.list_bucket(bucket,
                                      start=args.get('--mark'),
                                      prefix=args.get('--prefix'),
                                      batch_size=args.get('--batch'))
                for key in keys:
                    print(key, file=outfile)
            else:
                buckets = s3.list_buckets()
                for bucket in buckets:
                    print(bucket, file=outfile)
Exemple #6
0
 def copy(self, dst):
     assert isinstance(dst, Path)
     with open(self._path, 'rb') as src_fd:
         with safeopen(dst._path, 'wb') as dst_fd:
             copyfileobj(src_fd, dst_fd)
Exemple #7
0
def validate_main():
    usage = "usage: %prog [options] yaml [yaml ...]"
    parser = OptionParser(usage=usage)
    parser.add_option("-i", "--ignore_requirements", dest="ignored",
                      default=None,
                      help="requirements to ignore for validation purposes.")
    parser.add_option("-f", "--format", dest="format",
                      default="yaml",
                      help="Output format: yaml, json, sh, make are supported.")
    parser.add_option("-o", "--output", dest="output",
                      help="Output destination: path where to write output. If not provided, stdout is used.")
    parser.add_option("-t", "--test", dest="test",
                      help="Tests to see if the contents in the file match whats specified by the yamls.")

    (options, yamls) = parser.parse_args()
    if not yamls:
        parser.print_usage()
        exit(-1)
    if options.test and options.output:
        parser.print_usage()
        exit(-1)

    b, params = build(*yamls)
    errs = b.validate(params, ignored=options.ignored.split(",") if options.ignored else [])
    if errs:
        d = defaultdict(dict)
        for (section, key), err in errs.iteritems():
            d[section][key] = str(err)
        unparse(sys.stderr, dict(d), default_flow_style=False)
        sys.exit(1)
        return

    if options.test:
      render_fd = NamedTemporaryFile()
    else:
      render_fd = safeopen(options.output)

    with render_fd as output:
        if options.format == 'yaml':
            unparse(output, dict(params.iteritems()), default_flow_style=False)
        elif options.format == 'pickle':
            pickle.dump(dict(params), output)
        elif options.format == 'json':
            json.dump(dict(params),
                      output,
                      sort_keys=True,
                      indent=2,
                      separators=(',', ': '))
        elif options.format == 'sh':
            for section in params:
                for key, value in params[section].iteritems():
                    if value is None:
                        print >> output, "# %s__%s is unset" % (_norm_sh_key(section), _norm_sh_key(key))
                    else:
                        print >> output, "read -r -d '' %s__%s<<EOF\n%s\nEOF\n" % (_norm_sh_key(section), _norm_sh_key(key), str(value))
                        print >> output, "export %s__%s\n" % (_norm_sh_key(section), _norm_sh_key(key))
        elif options.format == 'make':
            for section in params:
                for key, value in params[section].iteritems():
                    if value is None:
                        print >> output, "# %s__%s is unset" % (_norm_sh_key(section), _norm_sh_key(key))
                    else:
                        print >> output, "define %s__%s\n%s\nendef\n" % (_norm_sh_key(section), _norm_sh_key(key), str(value))
        else:
            print >> sys.stderr, "Invalid output format."
            sys.exit(2)


        if options.test:
          output.flush()
          same = _check_same(options.test, output.name)
          if not same:
            print >> sys.stderr, "Config mismatch!"
            sys.exit(3)