def get_data(exp_dir, mango=False):
    total_durations = []
    buffer_reads = []
    buffer_writes = []
    kernel_execs = []
    if mango:
        buffer_reads_hhal = []
        buffer_writes_hhal = []
        kernel_execs_hhal = []
    for run in files_in_dir(exp_dir):
        with open(f'{exp_dir}/{run}', 'r') as f:
            run_data = json.load(f)
            if not 'operation_level_profiling' in run_data[
                    'params'] or run_data['params'][
                        'operation_level_profiling'] == '0':
                total_durations.append(run_data['total_duration'])
            if not 'operation_level_profiling' in run_data[
                    'params'] or run_data['params'][
                        'operation_level_profiling'] == '1':
                kernel_execs.append(
                    list(
                        map(lambda x: x['duration'],
                            run_data['kernel_executions'])))
                buffer_reads.append(
                    list(map(lambda x: x['duration'],
                             run_data['buffer_reads'])))
                buffer_writes.append(
                    list(
                        map(lambda x: x['duration'],
                            run_data['buffer_writes'])))
    if mango:
        for run in files_in_dir(f'{exp_dir}/hhal'):
            with open(f'{exp_dir}/hhal/{run}', 'r') as f:
                run_data = json.load(f)
                kernel_execs_hhal.append(
                    list(
                        map(lambda x: x['duration'],
                            run_data['kernel_executions'])))
                buffer_reads_hhal.append(
                    list(map(lambda x: x['duration'],
                             run_data['buffer_reads'])))
                buffer_writes_hhal.append(
                    list(
                        map(lambda x: x['duration'],
                            run_data['buffer_writes'])))

    if mango:
        return total_durations, buffer_reads, buffer_writes, kernel_execs, buffer_reads_hhal, buffer_writes_hhal, kernel_execs_hhal
    else:
        return total_durations, buffer_reads, buffer_writes, kernel_execs
Ejemplo n.º 2
0
def get_data(exp_dir):
    total_durations = []
    resource_allocations = []
    resource_deallocations = []
    exp_sizes = []
    for idx, exp in enumerate(sorted(files_in_dir(exp_dir, include_folders=True), key=to_int)):
        total_durations.append([])
        resource_allocations.append([])
        resource_deallocations.append([])
        exp_sizes.append(int(exp))
        for run in files_in_dir(f'{exp_dir}/{exp}'):
            with open(f'{exp_dir}/{exp}/{run}', 'r') as f:
                run_data = json.load(f)
                total_durations[idx].append(run_data['total_duration'])
                resource_allocations[idx].append(run_data['resource_allocations'][0]['duration'])
                resource_deallocations[idx].append(run_data['resource_deallocations'][0]['duration'])
    return exp_sizes, total_durations, resource_allocations, resource_deallocations
Ejemplo n.º 3
0
def run_with_config(exec_path, configs, dest_dir, mango=False):
    count = 0
    if not os.path.exists(dest_dir):
        os.makedirs(dest_dir)
    try:
        for c in configs:
            config_str = c['params']
            save_as_str = c['save_as']
            iterations = c['iterations']
            pyramid_height = 20
            config_dir = f"{dest_dir}/{save_as_str}"
            if os.path.exists(config_dir):
                print(f"Config {config_str} already done, skipping...")
                continue
            print(f"Running {iterations} iterations for {config_str}")
            i = 0
            while i < c['iterations']:
                cmd = f"{exec_path} {config_str}"
                print(f"Running {cmd}...")
                count += 1
                res = os.system(cmd)
                if mango:
                    sleep(1)
                if (res == 0):
                    i += 1
            print(f"Ran all {iterations} iterations for {config_str}")
            os.makedirs(config_dir)
            for f in files_in_dir('.'):
                if '.json' in f:
                    os.rename(f, f"{config_dir}/{f}")
            if mango:
                os.makedirs(f"{config_dir}/hhal")
                for f in files_in_dir('hhal_profiling'):
                    if '.json' in f:
                        os.rename(f"hhal_profiling/{f}", f"{config_dir}/hhal/{f}")
                    
    except CommandFailed:
        print("Command Failed")

    print(f"Ran {count} commands")
Ejemplo n.º 4
0
def generate_anything_from_source(pv, dirname, contentdir, callback):
    """
        Traverse a given directory and generate extra content from it:
        API definitions, file type definitions, test case count, etc.

        @pv - PackageVersion object
        @dirname - directory of local git repo where this version is the
        *CURRENT* checkout

        @contentdir - string - where to store the generated files
        @callback - func - callback to generate content

        @return - list - results from callback(file)
    """
    results = []

    if not os.path.exists(contentdir):
        os.makedirs(contentdir)

    os.chdir(contentdir)

    # empty directory => git init
    if not os.listdir(contentdir):
        if os.system('git init') != 0:
            raise Exception("FAILED: git init in %s" % contentdir)


    #check if the same tag already exists and skip the import
    version = pv.version.replace(" ", "_")
    version = version.replace(".", "\.")
    if os.system('git tag | grep "^%s$"' % version) == 0: # tag already exists
        return [] # we have no idea if content was generated in the previous run
                    # so return []

    # remove all content files from previous tag.
    # we're doing this because tags are not imported in sequence.
    for p in os.listdir(contentdir):
        if p == ".git": # skip git directory
            continue

        if os.path.isfile(p):
            os.remove(p)

        if os.path.isdir(p):
            shutil.rmtree(p, True) # ignore errors


    os.chdir(dirname)
    # MAKE SURE we're working on the tag for this version
    if os.system('git checkout "%s"' % pv.version.replace(" ", "_")) != 0:
        raise Exception("FAILED: git checkout - tag doesn't exist")


    # walk all files and generate the content
    l = len(dirname)+1

    # create empty .gitignore so we can commit
    f = open(os.path.join(contentdir, ".gitignore"), 'w')
    f.close()

    for filename in files_in_dir(dirname):
        # skip hidden files, files in .git, .hg directories
        # .git files will also override local .git/ directory
        if filename.find('/.') > -1:
            continue

        targetfilename = os.path.join(contentdir, filename[l:]) # absolute path

        # create subdirectories
        base_dir_name = os.path.dirname(targetfilename)
        if not os.path.exists(base_dir_name):
            os.makedirs(base_dir_name)

        res = callback(filename, contentdir, targetfilename)
        if res is not None:
            results.append(res)

    # all done, now commit
    os.chdir(contentdir)
    if os.system("git add .") != 0:
        raise Exception("FAILED: git add %s/%s" % (contentdir, pv.__unicode__()))

    # this may fail if nothing has changed
    # use -a to commit removed files
    cmdline = "git commit -a -m 'Content gen %s' --author='Difio <*****@*****.**>'" % pv.__unicode__() # FALSE NEGATIVE
    ret_code = os.system(cmdline)
    if ret_code not in [0, 1, 256]:
        raise Exception("FAILED: git commit %s/%s - return value %d" % (contentdir, pv.__unicode__(), ret_code))

    if os.system("git tag '%s'" % pv.version.replace(" ", "_")) != 0:
        raise Exception("FAILED: git tag %s/%s" % (contentdir, pv.__unicode__()))

    # go back to the original source
    # so that other stuff doesn't break
    os.chdir(dirname)

    return results # to the caller
def get_data(exp_dir, mango=False):
    total_durations = []
    buffer_reads = []
    buffer_writes = []
    kernel_execs = []
    exp_sizes = []
    if mango:
        resource_allocations = []
        buffer_reads_hhal = []
        buffer_writes_hhal = []
        kernel_execs_hhal = []
    for idx, exp in enumerate(
            sorted(files_in_dir(exp_dir, include_folders=True), key=to_int)):
        total_durations.append([])
        kernel_execs.append([])
        buffer_writes.append([])
        buffer_reads.append([])
        if mango:
            resource_allocations.append([])
        exp_sizes.append(int(exp))
        for run in files_in_dir(f'{exp_dir}/{exp}'):
            with open(f'{exp_dir}/{exp}/{run}', 'r') as f:
                run_data = json.load(f)
                if not 'operation_level_profiling' in run_data[
                        'params'] or run_data['params'][
                            'operation_level_profiling'] == '0':
                    total_durations[idx].append(run_data['total_duration'])
                if not 'operation_level_profiling' in run_data[
                        'params'] or run_data['params'][
                            'operation_level_profiling'] == '1':
                    kernel_execs[idx].append(
                        list(
                            map(lambda x: x['duration'],
                                run_data['kernel_executions'])))
                    buffer_reads[idx].append(
                        list(
                            map(
                                lambda x: {
                                    'size': x['size'],
                                    'duration': x['duration']
                                }, run_data['buffer_reads'])))
                    buffer_writes[idx].append(
                        list(
                            map(
                                lambda x: {
                                    'size': x['size'],
                                    'duration': x['duration']
                                }, run_data['buffer_writes'])))
                    if mango:
                        resource_allocations[idx].append(
                            list(
                                map(lambda x: x['duration'],
                                    run_data['resource_allocations'])) +
                            list(
                                map(lambda x: x['duration'],
                                    run_data['resource_deallocations'])))
        if mango:
            kernel_execs_hhal.append([])
            buffer_reads_hhal.append([])
            buffer_writes_hhal.append([])
            for run in files_in_dir(f'{exp_dir}/{exp}/hhal'):
                with open(f'{exp_dir}/{exp}/hhal/{run}', 'r') as f:
                    run_data = json.load(f)
                    kernel_execs_hhal[idx].append(
                        list(
                            map(lambda x: x['duration'],
                                run_data['kernel_executions'])))
                    buffer_reads_hhal[idx].append(
                        list(
                            map(
                                lambda x: {
                                    'size': x['size'],
                                    'duration': x['duration']
                                }, run_data['buffer_reads'])))
                    buffer_writes_hhal[idx].append(
                        list(
                            map(
                                lambda x: {
                                    'size': x['size'],
                                    'duration': x['duration']
                                }, run_data['buffer_writes'])))

    if mango:
        return exp_sizes, total_durations, buffer_reads, buffer_writes, kernel_execs, resource_allocations, buffer_reads_hhal, buffer_writes_hhal, kernel_execs_hhal
    else:
        return exp_sizes, total_durations, buffer_reads, buffer_writes, kernel_execs
Ejemplo n.º 6
0
def main():

    parser = argparse.ArgumentParser()
    # file path argument
    parser.add_argument("--img_dir",
                        type=str,
                        default='data/images/IMX219-83',
                        help="directory of images")
    parser.add_argument("--take_img",
                        type=str,
                        default='False',
                        help="Take depth images")
    parser.add_argument("--img_ext",
                        type=str,
                        default='.jpg',
                        help="Images extention")
    parser.add_argument("--show_plt",
                        type=str,
                        default='False',
                        help="Show plots")

    # create argument object
    args = parser.parse_args()

    camera_data = utils.read_json('config/jetson/config.json')

    left_pipeline = camera_data['waveshare_camera']['left']
    right_pipeline = camera_data['waveshare_camera']['right']
    api = camera_data['waveshare_camera']['apiEnum']

    utils.check_directory(args.img_dir)
    if args.take_img == 'True':
        captureImage(left_pipeline,
                     api=api,
                     save_dir=args.img_dir,
                     img_name='img_l',
                     show_img=False)
        captureImage(right_pipeline,
                     api=api,
                     save_dir=args.img_dir,
                     img_name='img_r',
                     show_img=False)

    # Read the stereo-pair of images
    images = utils.files_in_dir(args.img_dir, args.img_ext)
    assert len(images) == 2
    img_left = cv2.cvtColor(cv2.imread(images[0]), cv2.COLOR_RGB2BGR)
    img_right = cv2.cvtColor(cv2.imread(images[1]), cv2.COLOR_RGB2BGR)

    if args.show_plt == 'True':
        # Large plot of the left image
        plt.figure(figsize=(10, 10), dpi=100)
        plt.imshow(img_left)
        plt.show()

    disp_left = stereo.compute_left_disparity_map(img_left, img_right)

    if args.show_plt == 'True':
        # Show the left disparity map
        plt.figure(figsize=(10, 10))
        plt.imshow(disp_left)
        plt.show()
Ejemplo n.º 7
0
from utils import files_in_dir
import statistics

exp_dir = 'old/experiments-release'


def to_int(a_str):
    return int(a_str)


total_durations = []
resource_allocations = []
resource_deallocations = []
exp_sizes = []
for idx, exp in enumerate(
        sorted(files_in_dir(exp_dir, include_folders=True), key=to_int)):
    print(exp)
    total_durations.append([])
    resource_allocations.append([])
    resource_deallocations.append([])
    exp_sizes.append(int(exp))
    for run in files_in_dir(f'{exp_dir}/{exp}'):
        with open(f'{exp_dir}/{exp}/{run}', 'r') as f:
            run_data = json.load(f)
            total_durations[idx].append(run_data['total_duration'])
            resource_allocations[idx].append(
                run_data['resource_allocations'][0]['duration'])
            resource_deallocations[idx].append(
                run_data['resource_deallocations'][0]['duration'])

x = exp_sizes
 def _list_of_files(self):
     return list(files_in_dir(self.dataset)) + [self.testfile]
 def compress(self):
     list_of_files = files_in_dir(self.path)
     compressor = BatchCompressor(list_of_files)
     compressor.compress(self.path)