Exemplo n.º 1
0
def write_stratum_metafiles(defs, stratum):
    '''Write the .meta files for a stratum to the baserock dir

    The split rules are used to divide up the installed components into
    artifacts in the 'products' list in the stratum .meta file. Each artifact
    contains a list of chunk artifacts which match the stratum splitting rules

    '''

    app.log(stratum['name'], 'splitting stratum')
    rules, splits = compile_rules(defs, stratum)

    for item in stratum['contents']:
        chunk = defs.get(item)
        if chunk.get('build-mode', 'staging') == 'bootstrap':
            continue

        metadata = get_metadata(defs, chunk)
        split_metadata = {'ref': metadata['ref'],
                          'repo': metadata['repo'],
                          'products': []}

        chunk_artifacts = defs.get(chunk).get('artifacts', {})
        for artifact, target in chunk_artifacts.items():
            splits[target].append(artifact)

        for element in metadata['products']:
            for artifact, rule in rules:
                if rule.match(element['artifact']):
                    split_metadata['products'].append(element)
                    splits[artifact].append(element['artifact'])
                    break

        split_metafile = os.path.join(stratum['baserockdir'],
                                      chunk['name'] + '.meta')

        with open(split_metafile, "w") as f:
            yaml.safe_dump(split_metadata, f, default_flow_style=False)

    write_metafile(rules, splits, stratum)
Exemplo n.º 2
0
 def metadata(self):
     value = cache.get_metadata(self.title)
     if value is None:
         value = super(WikiPage, self).metadata
         cache.set_metadata(self.title, value)
     return value
Exemplo n.º 3
0
def install_stratum_artifacts(defs, component, stratum, artifacts):
    '''Create the .meta files for a split stratum

    Given a stratum and a list of artifacts to split, writes new .meta files to
    the baserock dir in the 'sandbox' dir of the component and copies the files
    from the .unpacked directory of each individual chunk to the sandbox

    '''
    if os.path.exists(os.path.join(component['sandbox'], 'baserock',
                                   stratum['name'] + '.meta')):
        return

    stratum_metadata = get_metadata(defs, stratum)
    split_stratum_metadata = {}
    split_stratum_metadata['products'] = []
    components = []
    for product in stratum_metadata['products']:
        for artifact in artifacts:
            if artifact == product['artifact']:
                components += product['components']
                split_stratum_metadata['products'].append(product)

    if app.config.get('log-verbose'):
        app.log(component, 'Installing artifacts: ' + str(artifacts) +
                ' components: ' + str(components))

    baserockpath = os.path.join(component['sandbox'], 'baserock')
    if not os.path.isdir(baserockpath):
        os.mkdir(baserockpath)
    split_stratum_metafile = os.path.join(baserockpath,
                                          stratum['name'] + '.meta')
    with open(split_stratum_metafile, "w") as f:
        yaml.safe_dump(split_stratum_metadata, f, default_flow_style=False)

    for path in stratum['contents']:
        chunk = defs.get(path)
        if chunk.get('build-mode', 'staging') == 'bootstrap':
            continue

        metafile = os.path.join(get_cache(defs, chunk) + '.unpacked',
                                'baserock', chunk['name'] + '.meta')
        try:
            with open(metafile, "r") as f:
                filelist = []
                metadata = yaml.safe_load(f)
                split_metadata = {'ref': metadata['ref'],
                                  'repo': metadata['repo'],
                                  'products': []}
                for element in metadata['products']:
                    if element['artifact'] in components:
                        filelist += element.get('files', [])
                        split_metadata['products'].append(element)

                if split_metadata['products'] != []:
                    split_metafile = os.path.join(baserockpath,
                                                  os.path.basename(metafile))
                    with open(split_metafile, "w") as f:
                        yaml.safe_dump(split_metadata, f,
                                       default_flow_style=False)

                    cachepath, cachedir = os.path.split(get_cache(defs, chunk))
                    path = os.path.join(cachepath, cachedir + '.unpacked')
                    utils.copy_file_list(path, component['sandbox'], filelist)
        except:
            app.log(stratum, 'WARNING: problem loading ', metafile)
Exemplo n.º 4
0
 def _get_metadata(self):
     if self._metadata is None:
         self._metadata = cache.get_metadata(self.path)
     return self._metadata
Exemplo n.º 5
0
def process_request(client_sock, cfg):
    recvbuf = b""

    # read one byte at a time until we have the whole header
    while not recvbuf.endswith(TERMB):
        recvbuf += sock_recv(client_sock, 1)

    # connect to destination

    request, headers = HTTPHeader.parse(recvbuf.decode(ENC))
    is_tunnel = request["method"] == "CONNECT"

    # access control
    if request["hostname"] in cfg["access_control"]["domains"]:
        eprint("ACCESS CONTROL: Blocked attempt to access {}".format(request["hostname"]))
        status_message = get_status_message(cfg["access_control"]["status"])
        client_sock.send(
            bytes(
                request["version"] + " " + status_message + ENDL
                + "Connection: close" + ENDL
                + f"Content-Length: {len(status_message)}" + ENDL
                + ENDL
                + status_message + ENDL,
                ENC
                )
            )
        return sock_close(client_sock)

    # resolve address
    try:
        dest_ip = gethostbyname(request["hostname"])
    except: # name resolution failed
        status_message = get_status_message(404)
        client_sock.send(
            bytes(
                request["version"] + " " + status_message + ENDL
                + "Connection: close" + ENDL
                + f"Content-Length: {len(status_message)}" + ENDL
                + ENDL
                + status_message + ENDL,
                ENC
                )
            )
        return sock_close(client_sock)

    # connect
    dest_port = PORT_HTTPS if is_tunnel else PORT_HTTP
    dest_sock = socket(AF_INET, SOCK_STREAM)
    dest_sock.connect((dest_ip, dest_port))
    if is_tunnel:
        print(f"Tunnel: connection established with {dest_ip}:{dest_port}")
        client_sock.send(bytes(request["version"] + " 200 Connection Established", ENC) + TERMB)

    if is_tunnel:
        eprint("TUNNEL MODE")

        # start process to relay data from destination to client
        Process(target=forward_responses, args=(dest_sock, client_sock)).start()

        recvbuf = b""
        while True:
            # print("wait for client")
            recvbuf = sock_recv(client_sock, RECV_SIZE)
            # print("recvbuf:", recvbuf)
            # print("done waiting for client")
            if not len(recvbuf):
                return sock_close(client_sock, dest_sock)
            size = len(recvbuf)
            sent = 0
            while sent < size:
                sent += dest_sock.send(recvbuf[sent:])
        # o tunnel mode, thine simplicity bewilst me unto joy
    else:
        eprint("HTTP MODE")
        is_header = True
        body_left = 0
        wait_for_dest = False  # True -> request complete, wait for response;
                               #         i.e. it is dest's turn

        resource_method = None
        resource_hostname = None
        resource_path = None  # keep track of the URL for cacheing
        cache_file = None
        is_cache_validate = False
        responded_from_cache = False

        while True:
            if not wait_for_dest:
                is_cache_validate = False
                responded_from_cache = False
                if cache_file is not None:
                    eprint(f"Closing (write) cache file '{cache_file.name}'...")
                    cache_file.close()
                    cache_file = None
                # eprint("client's turn")
                if is_header and recvbuf.endswith(TERMB):  # done collecting headers
                    request, headers = HTTPHeader.parse(recvbuf.decode(ENC))
                    eprint(request)

                    resource_hostname = request["hostname"]
                    resource_path = request["path"]
                    resource_method = request["method"]

                    # check if we have the requested resource in cache
                    request_is_conditional = bool(
                        dict_get_insensitive(headers, "If-Modified-Since")
                        or dict_get_insensitive(headers, "ETag")
                        )
                    # leave the request alone if client is already doing conditional
                    if resource_method in CACHEABLE_METHODS and not request_is_conditional:
                        resource_url = "http://" + resource_hostname + resource_path
                        cache_metadata = cache.get_metadata(resource_url)  # None or (<last modified>, <etag>)
                        if cache_metadata is not None and cache_metadata[0] is not None:
                            is_cache_validate = True
                            headers["If-Modified-Since"] = cache_metadata[0]
                            # modifies the client's request into a conditional
                            # based on our cached metadata

                    header_b = bytes(HTTPHeader.generate(request, headers), ENC) + TERMB
                    dest_sock.send(header_b)  # forward to dest
                    body_left = int(headers.get("Content-Length", 0))
                    if body_left > 0:
                        is_header = False
                    else:
                        wait_for_dest = True
                    recvbuf = b""
                elif not is_header:  # receiving body
                    dest_sock.send(recvbuf)  # forward to dest
                    body_left -= 1
                    recvbuf = b""
                    if body_left == 0:
                        is_header = True
                        wait_for_dest = True
                if not wait_for_dest:
                    recvbuf += sock_recv(client_sock, 1)
                    if not recvbuf:
                        return sock_close(client_sock, dest_sock)
            else:  # wait_for_dest == True
                # eprint("dest's turn")

                dest_d = sock_recv(dest_sock, 1)
                if not dest_d:
                    return sock_close(client_sock, dest_sock)
                recvbuf += dest_d
                if is_header and recvbuf.endswith(TERMB):  # done collecting headers
                    response, headers = HTTPHeader.parse(recvbuf.decode(ENC), is_response=True)
                    eprint(response)
                    resource_url = "http://" + resource_hostname + resource_path

                    if is_cacheable(resource_method, response, headers):
                        cache.create_entry(resource_url)
                        cache_file = cache.open_file(resource_url, "wb")

                        # insert a custom x-header at the bottom
                        mod_recvbuf = recvbuf[:-2] + b"X-zjguard-Cache: 1\r\n" + recvbuf[-2:]

                        cache_file.write(mod_recvbuf[:-1])  # chop off the last byte ('\n'), otherwise
                                                            # it will be double-written below

                    if is_cache_validate and response["status"]["code"] == 304:  # Not Modified
                        # just send client what we have in cache and go back to client's turn
                        # note that this block is mutually exclusive with the
                        # above block, since 304 is not cacheable
                        with cache.open_file(resource_url, "rb") as f:
                            print(f"RESPONDING FROM CACHE: {f.name}")
                            cbd = f.read(48)  # arbitrary size
                            while cbd:
                                # print("send from cache:", cbd)
                                client_sock.send(cbd)
                                cbd = f.read(48)
                        responded_from_cache = True
                        wait_for_dest = False
                        is_cache_validate = False
                    elif is_cache_validate:
                        client_sock.send(recvbuf)
                    # else, echo back to client and save to cache as usual...

                    body_left = int(headers.get("Content-Length", 0))
                    if body_left > 0:
                        is_header = False
                    else:
                        wait_for_dest = False
                    recvbuf = b""
                elif not is_header:  # receiving body
                    body_left -= 1
                    recvbuf = b""
                    if body_left == 0:
                        is_header = True
                        wait_for_dest = False
                try:
                    if not is_cache_validate and not responded_from_cache:
                        # print("send from dest:", dest_d, "; responded_from_cache =", responded_from_cache)
                        client_sock.send(dest_d)  # forward to client
                    if cache_file is not None:
                        cache_file.write(dest_d)  # write to cache file
                except:
                    return sock_close(client_sock, dest_sock)
Exemplo n.º 6
0
 def _get_metadata(self):
     if self._metadata is None:
         self._metadata = cache.get_metadata(self.path)
     return self._metadata
Exemplo n.º 7
0
def main():
    parser = OptionParser(
        usage="Usage: scene-info.py [--missing-to file.txt] scene.json",
        description="Prints information about a JSON scene file.")
    parser.add_option(
        "-m",
        "--missing-to",
        dest="missing_to",
        help="Write a list of paths missing progressive info to file",
        metavar="MISSING_TO")
    (options, args) = parser.parse_args()

    if len(args) != 1:
        parser.print_help()
        parser.exit(1, "Wrong number of arguments.\n")

    if not os.path.isfile(args[0]):
        parser.print_help()
        parser.exit(1, "Input file '%s' is not a valid file.\n" % args[0])

    missing_to = None
    if options.missing_to is not None:
        missing_to = open(options.missing_to, 'w')

    fname = args[0]
    json_data = json.load(open(fname))

    total_triangles = 0
    total_draw_calls = 0
    total_ram_cache = {}
    total_mesh_size = {}
    total_texture_size = {}

    total_base_tris = 0
    base_ram_cache = {}
    total_base_draw_calls = 0
    total_base_mesh_bytes = {}
    total_base_texture_bytes = {}
    total_base_stream_bytes = {}

    total_full_tris = 0
    full_ram_cache = {}
    total_full_draw_calls = 0
    total_texture_sum = {}

    missing_progressive = set()
    missing_metadata = set()
    too_big = set()

    for m in json_data:
        metadata = cache.get_metadata(m['path'])

        if 'progressive' not in metadata['metadata']['types']:
            missing_progressive.add(m['path'])
        else:
            progressive = metadata['metadata']['types']['progressive']
            if 'metadata' not in progressive:
                missing_metadata.add(m['path'])
            else:
                total_base_tris += min(
                    progressive['metadata']['num_triangles'], 40000)
                total_base_mesh_bytes[m['path']] = progressive['size_gzip']
                total_base_stream_bytes[m['path']] = progressive.get(
                    'progressive_stream_size_gzip', 0)
                total_full_tris += progressive['metadata']['num_triangles']

                if progressive['metadata']['num_triangles'] > 40000:
                    too_big.add(m['path'])

                for mapname, mapinfo in progressive['mipmaps'].iteritems():
                    byte_ranges = mapinfo['byte_ranges']
                    byte_size = 0
                    for levelinfo in byte_ranges:
                        width, height = levelinfo['width'], levelinfo['height']
                        if width >= 128 or height >= 128:
                            ram_size = width * height * 4
                            byte_size = levelinfo['length']
                            break
                    base_ram_cache[m['path']] = ram_size
                    total_base_texture_bytes[m['path']] = byte_size

                    if len(byte_ranges) > 0:
                        full_res = byte_ranges[-1]
                        width, height = full_res['width'], full_res['height']
                        full_ram_cache[m['path']] = width * height * 4

                    total_texture_sum[m['path']] = sum(
                        [l['length'] for l in byte_ranges])

                total_base_draw_calls += progressive['metadata'][
                    'num_draw_calls']
                total_full_draw_calls += progressive['metadata'][
                    'num_draw_calls']

        optimized = metadata['metadata']['types']['optimized']
        total_triangles += optimized['metadata']['num_triangles']
        total_ram_cache[m['path']] = optimized['metadata']['texture_ram_usage']
        total_draw_calls += optimized['metadata']['num_draw_calls']
        total_mesh_size[m['path']] = optimized['size_gzip']
        total_texture_size[m['path']] = sum(
            optimized['subfile_sizes_gzip'].values())

    total_ram = sum(total_ram_cache.values())
    total_base_ram = sum(base_ram_cache.values())
    total_full_ram = sum(full_ram_cache.values())
    total_mesh_size = sum(total_mesh_size.values())
    total_texture_size = sum(total_texture_size.values())
    total_base_mesh_bytes = sum(total_base_mesh_bytes.values())
    total_base_texture_bytes = sum(total_base_texture_bytes.values())
    total_base_stream_bytes = sum(total_base_stream_bytes.values())
    total_texture_sum = sum(total_texture_sum.values())

    for m in missing_progressive:
        if missing_to is not None:
            puts_err(m, stream=missing_to.write)
        else:
            puts_err('Warning: missing progressive version for: "%s"' % m)
    if missing_to is None and len(missing_progressive) > 0:
        puts()

    for m in too_big:
        metadata = cache.get_metadata(m)
        puts_err("Warning '%s' too big at %s triangles %s" %
                 (m,
                  pretty(metadata['metadata']['types']['progressive']
                         ['metadata']['num_triangles']),
                  pretty(metadata['metadata']['types']['progressive']
                         ['progressive_stream_num_triangles'])))
    if len(too_big) > 0:
        puts()

    for m in missing_metadata:
        puts_err("Warning '%s' missing metadata" % m)
    if len(missing_metadata) > 0:
        puts()

    puts('Number of models in the scene: %s' % pretty(len(json_data)))
    puts('Number of unique models in the scene: %s' %
         pretty(len(set(m['path'] for m in json_data))))
    puts()

    puts("Type 'optimized'")
    with indent(4):
        puts('Triangles: %s' % pretty(total_triangles))
        puts('Texture RAM: %s' % humanize_bytes(total_ram))
        puts('Draw Calls: %s' % pretty(total_draw_calls))
        puts('Mesh Download Size: %s' % humanize_bytes(total_mesh_size))
        puts('Textures Download Size: %s' % humanize_bytes(total_texture_size))
        puts('Total Download Size: %s' %
             humanize_bytes(total_mesh_size + total_texture_size))

    puts()
    puts("Type 'progressive' base mesh")
    with indent(4):
        puts('Triangles: %s' % pretty(total_base_tris))
        puts('Texture RAM: %s' % humanize_bytes(total_base_ram))
        puts('Draw Calls: %s' % pretty(total_base_draw_calls))
        puts('Base Mesh Download Size: %s' %
             humanize_bytes(total_base_mesh_bytes))
        puts('Base Texture Download Size: %s' %
             humanize_bytes(total_base_texture_bytes))
        puts('Total Base Download Size: %s' %
             humanize_bytes(total_base_mesh_bytes + total_base_texture_bytes))

    puts()
    puts("Type 'progressive' full quality")
    with indent(4):
        puts('Triangles: %s' % pretty(total_full_tris))
        puts('Texture RAM: %s' % humanize_bytes(total_full_ram))
        puts('Draw Calls: %s' % pretty(total_full_draw_calls))
        puts('Progressive Stream Download Size: %s' %
             humanize_bytes(total_base_stream_bytes))
        puts('All Textures Download Size: %s' %
             humanize_bytes(total_texture_sum))
        puts('Total Download Size : %s' %
             humanize_bytes(total_base_mesh_bytes + total_base_stream_bytes +
                            total_texture_sum))
Exemplo n.º 8
0
def main():
    parser = OptionParser(usage="Usage: scene-info.py [--missing-to file.txt] scene.json",
                          description="Prints information about a JSON scene file.")
    parser.add_option("-m", "--missing-to", dest="missing_to",
                          help="Write a list of paths missing progressive info to file", metavar="MISSING_TO")
    (options, args) = parser.parse_args()
    
    if len(args) != 1:
        parser.print_help()
        parser.exit(1, "Wrong number of arguments.\n")
    
    if not os.path.isfile(args[0]):
        parser.print_help()
        parser.exit(1, "Input file '%s' is not a valid file.\n" % args[0])
    
    missing_to = None
    if options.missing_to is not None:
        missing_to = open(options.missing_to, 'w')
    
    fname = args[0]
    json_data = json.load(open(fname))
    
    total_triangles = 0
    total_draw_calls = 0
    total_ram_cache = {}
    total_mesh_size = {}
    total_texture_size = {}
    
    total_base_tris = 0
    base_ram_cache = {}
    total_base_draw_calls = 0
    total_base_mesh_bytes = {}
    total_base_texture_bytes = {}
    total_base_stream_bytes = {}
    
    total_full_tris = 0
    full_ram_cache = {}
    total_full_draw_calls = 0
    total_texture_sum = {}
    
    missing_progressive = set()
    missing_metadata = set()
    too_big = set()
    
    
    for m in json_data:
        metadata = cache.get_metadata(m['path'])
        
        if 'progressive' not in metadata['metadata']['types']:
            missing_progressive.add(m['path'])
        else:
            progressive = metadata['metadata']['types']['progressive']
            if 'metadata' not in progressive:
                missing_metadata.add(m['path'])
            else:
                total_base_tris += min(progressive['metadata']['num_triangles'], 40000)
                total_base_mesh_bytes[m['path']] = progressive['size_gzip']
                total_base_stream_bytes[m['path']] = progressive.get('progressive_stream_size_gzip', 0)
                total_full_tris += progressive['metadata']['num_triangles']
                
                if progressive['metadata']['num_triangles'] > 40000:
                    too_big.add(m['path'])
                
                for mapname, mapinfo in progressive['mipmaps'].iteritems():
                    byte_ranges = mapinfo['byte_ranges']
                    byte_size = 0
                    for levelinfo in byte_ranges:
                        width, height = levelinfo['width'], levelinfo['height']
                        if width >= 128 or height >= 128:
                            ram_size = width * height * 4
                            byte_size = levelinfo['length']
                            break
                    base_ram_cache[m['path']] = ram_size
                    total_base_texture_bytes[m['path']] = byte_size
                    
                    if len(byte_ranges) > 0:
                        full_res = byte_ranges[-1]
                        width, height = full_res['width'], full_res['height']
                        full_ram_cache[m['path']] = width * height * 4
                
                    total_texture_sum[m['path']] = sum([l['length'] for l in byte_ranges])
                
                total_base_draw_calls += progressive['metadata']['num_draw_calls']
                total_full_draw_calls += progressive['metadata']['num_draw_calls']
        
        optimized = metadata['metadata']['types']['optimized']
        total_triangles += optimized['metadata']['num_triangles']
        total_ram_cache[m['path']] = optimized['metadata']['texture_ram_usage']
        total_draw_calls += optimized['metadata']['num_draw_calls']
        total_mesh_size[m['path']] = optimized['size_gzip']
        total_texture_size[m['path']] = sum(optimized['subfile_sizes_gzip'].values())
    
    total_ram = sum(total_ram_cache.values())
    total_base_ram = sum(base_ram_cache.values())
    total_full_ram = sum(full_ram_cache.values())
    total_mesh_size = sum(total_mesh_size.values())
    total_texture_size = sum(total_texture_size.values())
    total_base_mesh_bytes = sum(total_base_mesh_bytes.values())
    total_base_texture_bytes = sum(total_base_texture_bytes.values())
    total_base_stream_bytes = sum(total_base_stream_bytes.values())
    total_texture_sum = sum(total_texture_sum.values())
    
    for m in missing_progressive:
        if missing_to is not None:
            puts_err(m, stream=missing_to.write)
        else:
            puts_err('Warning: missing progressive version for: "%s"' % m)
    if missing_to is None and len(missing_progressive) > 0:
        puts()
    
    for m in too_big:
        metadata = cache.get_metadata(m)
        puts_err("Warning '%s' too big at %s triangles %s" % (m, pretty(metadata['metadata']['types']['progressive']['metadata']['num_triangles']), pretty(metadata['metadata']['types']['progressive']['progressive_stream_num_triangles'])))
    if len(too_big) > 0:
        puts()
    
    for m in missing_metadata:
        puts_err("Warning '%s' missing metadata" % m)
    if len(missing_metadata) > 0:
        puts()
    
    puts('Number of models in the scene: %s' % pretty(len(json_data)))
    puts('Number of unique models in the scene: %s' % pretty(len(set(m['path'] for m in json_data))))
    puts()
    
    puts("Type 'optimized'")
    with indent(4):
        puts('Triangles: %s' % pretty(total_triangles))
        puts('Texture RAM: %s' % humanize_bytes(total_ram))
        puts('Draw Calls: %s' % pretty(total_draw_calls))
        puts('Mesh Download Size: %s' % humanize_bytes(total_mesh_size))
        puts('Textures Download Size: %s' % humanize_bytes(total_texture_size))
        puts('Total Download Size: %s' % humanize_bytes(total_mesh_size + total_texture_size))
        
    puts()
    puts("Type 'progressive' base mesh")
    with indent(4):
        puts('Triangles: %s' % pretty(total_base_tris))
        puts('Texture RAM: %s' % humanize_bytes(total_base_ram))
        puts('Draw Calls: %s' % pretty(total_base_draw_calls))
        puts('Base Mesh Download Size: %s' % humanize_bytes(total_base_mesh_bytes))
        puts('Base Texture Download Size: %s' % humanize_bytes(total_base_texture_bytes))
        puts('Total Base Download Size: %s' % humanize_bytes(total_base_mesh_bytes + total_base_texture_bytes))
        
    puts()
    puts("Type 'progressive' full quality")
    with indent(4):
        puts('Triangles: %s' % pretty(total_full_tris))
        puts('Texture RAM: %s' % humanize_bytes(total_full_ram))
        puts('Draw Calls: %s' % pretty(total_full_draw_calls))
        puts('Progressive Stream Download Size: %s' % humanize_bytes(total_base_stream_bytes))
        puts('All Textures Download Size: %s' % humanize_bytes(total_texture_sum))
        puts('Total Download Size : %s' % humanize_bytes(total_base_mesh_bytes + total_base_stream_bytes + total_texture_sum))