Пример #1
0
def handle(args):
	cmd.find_config()
	config = cmd.load_config()

	path = join(basedir.save_config_path('0install.net', '0repo'), 'repositories.json')
	if os.path.exists(path):
		with open(path, 'rb') as stream:
			db = json.load(stream)
	else:
		db = {}
	
	existing = db.get(config.REPOSITORY_BASE_URL, None)

	entry = {'type': 'local', 'path': os.getcwd()}

	if existing and existing == entry:
		print("Already registered in {path} (no changes made):\n{base}: {json}".format(
			path = path,
			base = config.REPOSITORY_BASE_URL,
			json = json.dumps(entry)))
		return

	db[config.REPOSITORY_BASE_URL] = entry

	with open(path + '.new', 'wb') as stream:
		json.dump(db, stream)
	os.rename(path + '.new', path)
	
	if existing:
		print("Updated entry in {path} to:".format(path = path))
	else:
		print("Created new entry in {path}:".format(path = path))

	print("{base}: {json}".format(base = config.REPOSITORY_BASE_URL, json = json.dumps(entry)))
Пример #2
0
def handle(args):
	cmd.find_config()
	config = cmd.load_config()

	# Only serve files under these prefixes
	public_prefix = os.path.realpath('public') + os.path.sep
	archives_prefix = os.path.realpath('archives') + os.path.sep
	DOCUMENT_ROOTS = [ public_prefix, archives_prefix ]

	os.chdir(public_prefix)

	class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
		pass

	class Proxy(SimpleHTTPRequestHandler):
		def do_GET(self):
			try:
				def send(src, headers):
					self.send_response(200)
					for name, val in headers:
						self.send_header(name, val)
					self.end_headers()
					try:
						self.copyfile(src, self.wfile)
					finally:
						src.close()

				if self.path.startswith(config.REPOSITORY_BASE_URL):
					rel_path = self.path[len(config.REPOSITORY_BASE_URL):]
					full_path = os.path.realpath(os.path.abspath(rel_path))
					if not any (full_path.startswith(prefix) for prefix in DOCUMENT_ROOTS):
						self.send_error(403, "Forbidden: %s" % rel_path)
						raise Exception("Attempt to fetch file outside of '%s': %s'" %
								(public_prefix, full_path))

					try:
						headers = [('Content-Length', os.stat(rel_path).st_size)]
						send(open(rel_path), headers)
					except OSError:
						traceback.print_exc()
						self.send_error(404, "GET Not Found: %s" % rel_path)
				else:
					stream = urllib2.urlopen(self.path)
					send(stream, stream.headers.items())
			except:
				traceback.print_exc()
				self.send_response(500)

	httpd = ThreadedHTTPServer(('127.0.0.1', args.port), Proxy)
	print("To use:\nenv http_proxy='http://localhost:%s/' 0install [...]" % (args.port,))
	try:
		httpd.serve_forever()
	except:
		httpd.socket.close()
		raise
Пример #3
0
def handle(args):
	files = [abspath(f) for f in args.path]

	if not cmd.find_config(missing_ok = True):
		# Import into appropriate registry for this feed
		with open(files[0], 'rb') as stream:
			doc = qdom.parse(stream)
		master = incoming.get_feed_url(doc, files[0])

		from_registry = registry.lookup(master)

		assert from_registry['type'] == 'local', 'Unsupported registry type in %s' % from_registry
		os.chdir(from_registry['path'])

		print("Adding to registry '{path}'".format(path = from_registry['path']))

	config = cmd.load_config()

	messages = []
	for feed in files:
		print("Adding", feed)
		msg = incoming.process(config, feed, delete_on_success = False)
		if msg:
			messages.append(msg)
	update.do_update(config, messages = messages)
Пример #4
0
def handle(args):
    files = [abspath(f) for f in args.path]

    if not cmd.find_config(missing_ok=True):
        # Import into appropriate registry for this feed
        with open(files[0], 'rb') as stream:
            doc = qdom.parse(stream)
        master = incoming.get_feed_url(doc, files[0])

        from_registry = registry.lookup(master)

        assert from_registry[
            'type'] == 'local', 'Unsupported registry type in %s' % from_registry
        os.chdir(from_registry['path'])

        print("Adding to registry '{path}'".format(path=from_registry['path']))

    config = cmd.load_config()

    messages = []
    for feed in files:
        print("Adding", feed)
        msg = incoming.process(config, feed, delete_on_success=False)
        if msg:
            messages.append(msg)
    update.do_update(config, messages=messages)
Пример #5
0
def handle(args):
    cmd.find_config()
    config = cmd.load_config()

    path = join(basedir.save_config_path('0install.net', '0repo'),
                'repositories.json')
    if os.path.exists(path):
        with open(path, 'rb') as stream:
            db = json.load(stream)
    else:
        db = {}

    existing = db.get(config.REPOSITORY_BASE_URL, None)

    entry = {'type': 'local', 'path': os.getcwd()}

    if existing and existing == entry:
        print(
            "Already registered in {path} (no changes made):\n{base}: {json}".
            format(path=path,
                   base=config.REPOSITORY_BASE_URL,
                   json=json.dumps(entry)))
        return

    db[config.REPOSITORY_BASE_URL] = entry

    with open(path + '.new', 'w') as stream:
        json.dump(db, stream)
    support.portable_rename(path + '.new', path)

    if existing:
        print("Updated entry in {path} to:".format(path=path))
    else:
        print("Created new entry in {path}:".format(path=path))

    print("{base}: {json}".format(base=config.REPOSITORY_BASE_URL,
                                  json=json.dumps(entry)))
Пример #6
0
def handle(args):
    if not cmd.find_config(missing_ok=True):
        from_registry = registry.lookup(args.uri)
        assert from_registry[
            'type'] == 'local', 'Unsupported registry type in %s' % from_registry
        os.chdir(from_registry['path'])

    config = cmd.load_config()

    rel_uri = args.uri[len(config.REPOSITORY_BASE_URL):]
    feed_path = join('feeds', config.get_feeds_rel_path(rel_uri))
    with open(feed_path, 'rb') as stream:
        doc = minidom.parse(stream)

    messages = []
    for impl in merge.find_impls(doc.documentElement):
        impl_id = impl.getAttribute("id")
        impl_version = impl.getAttribute("version")
        impl_stability = impl.getAttribute("stability")
        if impl_id == args.id or impl_version == args.id:
            if args.stability and impl_stability != args.stability:
                messages.append(
                    'Implementation {id} (version {version}) stability set to {stability}'
                    .format(id=impl_id,
                            version=impl_version,
                            stability=args.stability))
                impl.setAttribute("stability", args.stability)

    if len(messages) > 0:
        commit_msg = 'Modified {uri}\n\n{messages}'.format(
            uri=args.uri, messages='\n'.join(messages))
        new_xml = formatting.format_doc(doc)
        incoming.write_to_git(feed_path, new_xml, commit_msg, config)
        update.do_update(config)
    else:
        print("No changes made.")
Пример #7
0
def handle(args):
    cmd.find_config()
    config = cmd.load_config()
    messages = incoming.process_incoming_dir(config)
    do_update(config, messages)
Пример #8
0
def handle(args):
    cmd.find_config()
    config = cmd.load_config()

    # Only serve files under these prefixes
    public_prefix = os.path.realpath('public') + os.path.sep
    archives_prefix = os.path.realpath('archives') + os.path.sep
    DOCUMENT_ROOTS = [public_prefix, archives_prefix]

    os.chdir(public_prefix)

    class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
        pass

    class Proxy(SimpleHTTPRequestHandler):
        def do_GET(self):
            try:

                def send(src, headers):
                    self.send_response(200)
                    for name, val in headers:
                        self.send_header(name, val)
                    self.end_headers()
                    try:
                        self.copyfile(src, self.wfile)
                    finally:
                        src.close()

                if self.path.startswith(config.REPOSITORY_BASE_URL):
                    rel_path = self.path[len(config.REPOSITORY_BASE_URL):]
                    full_path = os.path.realpath(os.path.abspath(rel_path))
                    if not any(
                            full_path.startswith(prefix)
                            for prefix in DOCUMENT_ROOTS):
                        self.send_error(403, "Forbidden: %s" % rel_path)
                        raise Exception(
                            "Attempt to fetch file outside of '%s': %s'" %
                            (public_prefix, full_path))

                    try:
                        headers = [('Content-Length',
                                    os.stat(rel_path).st_size)]
                        with open(rel_path, 'rb') as stream:
                            send(stream, headers)
                    except OSError:
                        traceback.print_exc()
                        self.send_error(404, "GET Not Found: %s" % rel_path)
                else:
                    stream = urllib.request.urlopen(self.path)
                    send(stream, list(stream.headers.items()))
            except:
                traceback.print_exc()
                self.send_response(500)

    httpd = ThreadedHTTPServer(('127.0.0.1', args.port), Proxy)
    print("To use:\nenv http_proxy='http://localhost:%s/' 0install [...]" %
          (args.port, ))
    try:
        httpd.serve_forever()
    except:
        httpd.socket.close()
        raise
Пример #9
0
def handle(args):
	cmd.find_config()
	config = cmd.load_config()
	messages = incoming.process_incoming_dir(config)
	do_update(config, messages)
Пример #10
0
def handle(args):
	cmd.find_config()
	config = cmd.load_config()

	assert config.LOCAL_ARCHIVES_BACKUP_DIR, "No LOCAL_ARCHIVES_BACKUP_DIR!"

	db  = config.archive_db
	old_dir = os.getcwd()
	os.chdir(config.LOCAL_ARCHIVES_BACKUP_DIR)

	missing = set(db.entries.keys())
	seen = set()

	changes = 0
	need_backup = False

	for root, dirs, files in os.walk('.'):
		for f in files:
			if f.startswith('.'): continue

			rel_path = relpath(join(root, f), '.')

			sha1 = archives.get_sha1(rel_path)
			new = archives.StoredArchive(url = config.ARCHIVES_BASE_URL + rel_path, sha1 = sha1)

			existing = db.entries.get(f, None)

			if f in seen:
				raise SafeException("{}: DUPLICATE basename - not allowed!\nFirst:{}\nSecord:{}".format(f, existing, new))
			seen.add(f)

			if existing:
				missing.remove(f)

				if existing != new:
					need_backup = True
					changes += 1

					print("{}:".format(rel_path))
					if existing.sha1 != new.sha1:
						print("  Old SHA1: {old}\n  New SHA1: {new}".format(file = rel_path, old = existing.sha1, new = new.sha1))
					if existing.url != new.url:
						print("  Old URL: {old}\n  New URL: {new}".format(file = rel_path, old = existing.url, new = new.url))
			else:
				changes += 1
				print("{}: added to database: {url}".format(rel_path, url = new.url))

			db.entries[f] = new
	
	if missing:
		print("These archives were missing (but were not removed from the database)")
		for m in sorted(missing):
			print("  " + m)
	
	os.chdir(old_dir)
	
	if need_backup:
		backup_path = db.path + '.old'
		print("Old database saved as {}".format(backup_path))
		shutil.copyfile(db.path, backup_path)
	
	if changes:
		db.save_all()
		print("Updated {} (changes: {})".format(db.path, changes))

		if need_backup:
			print("Run '0repo update' to update public feeds.")
	else:
		print("No changes found")