def do(i):

    # List performance entries
    r=ck.access({'action':'search',
                 'module_uoa':'experiment',
                 'data_uoa':'mobilenets-performance-*'
#                 'repo_uoa':'ck-request-asplos18-results'
                })
    if r['return']>0: return r
    lst=r['lst']

    for q in lst:
        duid=q['data_uid']
        duoa=q['data_uoa']
        ruid=q['repo_uid']
        path=q['path']

        ck.out(duoa)

        # Search matching accuracy entry
        r=ck.access({'action':'load',
                     'module_uoa':'experiment',
                     'data_uoa':duid,
                     'repo_uoa':ruid})
        if r['return']>0: return r

        dd=r['dict']
        ruid=r['repo_uid']
        apath=r['path']             

        # Updating meta if needed
        dd['meta']['scenario_module_uoa']='a555738be4b65860' # module:request.asplos18

        dd['meta']['model_species']='07d4e7aa3750ddc6' # model.species:mobilenets

        dd['meta']['dataset_species']='ImageNet' # dataset species (free format)
        dd['meta']['dataset_size']=500 # number of images ...

        dd['meta']['platform_species']='embedded' # embedded vs server (maybe other classifications such as edge)

        dd['meta']['platform_peak_power']=4.5 #Watts
        dd['meta']['platform_price']=239 # $
        dd['meta']['platform_price_date']='20170425' # date

        dd['meta']['artifact']='08da9685582866a0' # artifact description

        dd['meta']['model_precision']='fp32'

        dd['meta']['processed']='yes'

        # Unified full name for some deps
        ds=dd['meta']['deps_summary']

        x=ds['weights']
        r=ck.access({'action':'make_deps_full_name','module_uoa':'request.asplos18','deps':x})
        if r['return']>0: return r
        dd['meta']['model_design_name']=r['full_name']

        x=ds['compiler']
        r=ck.access({'action':'make_deps_full_name','module_uoa':'request.asplos18','deps':x})
        if r['return']>0: return r
        dd['meta']['compiler_name']=r['full_name']

        x=ds['library']
        r=ck.access({'action':'make_deps_full_name','module_uoa':'request.asplos18','deps':x})
        if r['return']>0: return r
        dd['meta']['library_name']=r['full_name']

        # Updating entry
        r=ck.access({'action':'update',
                     'module_uoa':'experiment',
                     'data_uoa':duid,
                     'repo_uoa':ruid,
                     'dict':dd,
                     'substitute':'yes',
                     'ignore_update':'yes',
                     'sort_keys':'yes'
                    })
        if r['return']>0: return r

        # Checking points to aggregate
        os.chdir(path)
        dperf=os.listdir(path)
        for f in dperf:
            if f.endswith('.cache.json'):
               os.system('git rm -f '+f)

            elif f.endswith('.flat.json'):
               ck.out(' * '+f)

               # Load performance file 
               p1=os.path.join(path, f)

               r=ck.load_json_file({'json_file':p1})
               if r['return']>0: return r
               d=r['dict']

               mult=d.get('##choices#env#CK_ENV_MOBILENET_WIDTH_MULTIPLIER#min','')

               if mult==0.25: size=1990786
               elif mult==0.5: size=5459810
               elif mult==0.75: size=10498594
               elif mult==1.0: size=17106694
               else:
                  return {'return':1, 'error':'unknown width multiplier "'+str(mult)+'"'}

               d['##features#model_size#min']=size

               d['##features#gpu_freq#min']=807
               d['##features#cpu_freq#min']=''
               d['##features#freq#min']=d['##features#gpu_freq#min']

               d['##features#processed#min']='yes'

               # Add throughput (images/second)
               tall=d.get('##characteristics#run#prediction_time_avg_s#all',[])
               if len(tall)>0:
                  tnew=[]
                  for t in tall:
                      t1=1/t
                      tnew.append(t1)
                  
                  r=ck.access({'action':'stat_analysis',
                               'module_uoa':'experiment',
                               'dict':d,
                               'dict1':{'##characteristics#run#inference_throughput':tnew}
                              })
                  if r['return']>0: return r

               # Unify batch size
               x=d.get('##choices#env#CK_BATCH_SIZE#min','')
               if x!=None and x!='':
                  batch=int(x)
                  d['##features#batch_size#min']=batch

                  if batch==1:
                     # inference latency
                     d['##features#measuring_latency#min']='yes'

                     r=ck.access({'action':'stat_analysis',
                                  'module_uoa':'experiment',
                                  'dict':d,
                                  'dict1':{'##characteristics#run#inference_latency':tall}
                                 })
                     if r['return']>0: return r

               # Save updated dict
               r=ck.save_json_to_file({'json_file':p1, 'dict':d, 'sort_keys':'yes'})
               if r['return']>0: return r

    return {'return':0}
Esempio n. 2
0
def main(arg):
    # CUSTOMIZABLE VARIABLES!!!!
    module_uoa = 'experiment'
    repo_uoa = 'explore-matrix-size-xgemm-fp32-firefly-rk3399'
    tags='explore-clblast-matrix-size'
    output_filename = 'tmp-ck-clblast-tune.json'
    
    weights_filename='NULL'
    
    WEIGHTS = 0 
    if arg.fp is not None:
        weights_filename = arg.fp
        if (os.path.isfile(weights_filename)):
            print("{RANKING ERROR %s not found. USE WEIGHTS=0}" %(weights_filename))
            WEIGHTS = 1 
        else:
             print("[RANKING ERROR] %s not found. USE WEIGHTS=0" %(weights_filename))

   
    ### END CUST
    
    
    dlist=[]
    dblist=[]
    r=ck.access({'action':'search', 'repo_uoa':repo_uoa, 'module_uoa':module_uoa, 'tags':tags})
    if r['return']>0:
        print ("Error: %s" % r['error'])
    experiments=r['lst']
    if len(experiments) == 0:
        print("No experiments found in repo %s with tags %s" %(repo_uoa, tags))
        exit(1)

    for exp in experiments:
#        print exp
        data_uoa = exp['data_uoa']
        r = ck.access({'action':'list_points', 'repo_uoa':repo_uoa, 'module_uoa':module_uoa, 'data_uoa':data_uoa})
        if r['return']>0:
            print ("Error: %s" % r['error'])
            exit(1)
    tags = r['dict']['tags']
#    print tags
    npoint = len(r['points'])
    print ("[RANKING] Number of matrices: %s" %(npoint))
        #    print npoint
    for point in r['points']:
        point_file = os.path.join(r['path'], 'ckp-%s.0001.json' % point)
        d = get_data(point_file) 
        dlist.append(d)

    # LOAD WEIGHTS
    
    w = []
#    WEIGHTS = os.path.isfile(weights_filename) and WEIGHTS
    if (WEIGHTS == 0):
        for i in range(0,npoint):
            w.append(1)
    else: 
        print("Loading weights %s" %(weights_filename))
        wdict = json.loads(open(weights_filename).read())
        for i in wdict:
            print i.get('Execution time (%)')
            w.append(float(i.get('Execution time (%)')))

    output = get_data_header(point_file)
    # FIND THE BEST
    #configurations = dlist[0]
    #best = sys.float_info.max
    #configuration_best_index =  len(configurations)
    #configuration_count = 0
    #final_score = []
    #for  configuration in configurations:
    #    res = find_it(configuration['parameters'],dlist)
    #    tmp_time = 0
    #    for ctime in res["time"]:
    #          tmp_time +=ctime
    #    if tmp_time < best:
    #       best= tmp_time
    #       configuration_best_index = configuration_count
    #    res["total_time"] = tmp_time
    #    final_score.append(res)
    #    configuration_count +=1
    #mbest = final_score[configuration_best_index]
    mbest= find_best(dlist, w)
    
    ### PREPARE OUTPUT
    del mbest['time']
    del mbest['total_time']
    mbest['GFLOPS'] = 0.0
    mbest['kernel'] = output.get('kernel')
    output['data'] = 'na'
    output['db'] = 'na'
    output['statistics'] = {'default_family':{}, 'default_configuration':{}, 'best_configuration': mbest }
    #print (json.dumps(output, indent=2))
    rr=ck.save_json_to_file({'json_file':output_filename, 'dict':output})
Esempio n. 3
0
def main(i):

    # Load common table file (for all models)
    ddd = {}
    r = ck.load_json_file({'json_file': 'save_all_model_data_tmp.json'})
    if r['return'] == 0:
        ddd = r['dict']

    # Searching for features
    for g in gcc:
        ck.out(
            '********************************************************************************'
        )
        ck.out('Modeling optimizations for ' + g)
        ck.out('')

        if g not in ddd: ddd[g] = {}

        gx = g.replace(' ', '_')

        r = ck.load_json_file(
            {'json_file': 'prepare_train_data_tmp.' + gx + '.json'})
        if r['return'] > 0: ck.err(r)

        d = r['dict']

        ftable = d['ftable']
        ctable = d['ctable']

        # Normalize (all features 0..1)
        ftable_range = {}
        for f in ftable:
            for k in range(0, 121):
                v = f[k]
                if k not in ftable_range:
                    ftable_range[k] = {'min': None, 'max': None}
                if ftable_range[k]['min'] == None or v < ftable_range[k]['min']:
                    ftable_range[k]['min'] = v
                if ftable_range[k]['max'] == None or v > ftable_range[k]['max']:
                    ftable_range[k]['max'] = v

        ftable_normalized = []
        for f in ftable:
            x = []
            for k in range(0, 121):
                v = 0
                if ftable_range[k]['max'] != 0:
                    v = f[k] / ftable_range[k]['max']
                x.append(v)
            ftable_normalized.append(x)

        # ft1..ft56
        for ft in [[0, 56], [0, 65], [0, 121], [56, 65], [56, 121], [65, 121]]:
            ft_start = ft[0]
            ft_stop = ft[1]

            ext = 'ft' + str(ft_start + 1) + '_' + 'ft' + str(ft_stop)

            ck.out('')
            ck.out('Using non-normalized features ' + ext + ' ...')

            r = model({
                'ftable': ftable,
                'ctable': ctable,
                'ft_start': ft_start,
                'ft_stop': ft_stop
            })
            if r['return'] > 0: return r

            ddd[g]['milepost_' + ext] = r['accuracy']

            r1 = ck.save_json_to_file({
                'json_file':
                'process_model_using_nearest_neighbour/process_model_using_nearest_neighbour_'
                + ext + '_tmp.' + gx + '.json',
                'dict':
                r
            })
            if r1['return'] > 0: ck.err(r1)

            # Normalized ft1..ft56
            ck.out('')
            ck.out('Using normalized features ' + ext + ' ...')

            r = model({
                'ftable': ftable_normalized,
                'ctable': ctable,
                'ft_start': ft_start,
                'ft_stop': ft_stop
            })
            if r['return'] > 0: return r

            r1 = ck.save_json_to_file({
                'json_file':
                'process_model_using_nearest_neighbour/process_model_using_nearest_neighbour_'
                + ext + '_tmp.' + gx + '.normalized.json',
                'dict':
                r
            })
            if r1['return'] > 0: ck.err(r1)

            ddd[g]['milepost_normalized_' + ext] = r['accuracy']

    # Save common data
    r = ck.save_json_to_file({
        'json_file': 'save_all_model_data_tmp.json',
        'dict': ddd
    })
    if r['return'] > 0: return r

    return {'return': 0}
Esempio n. 4
0
        label = label + 1

    # Prepare features
    fkeys = []
    ffkd = {}

    for q in range(1, 65 + 1):
        ffkd[q] = {'name': features[str(q)]['desc']}  # Add real name
        fkeys.append('ft' + str(q))

    for q in range(1, 56 + 1):
        q1 = 65 + q
        fkeys.append('ft' + str(q1))
        ffkd[q1] = {
            'name': 'Normalized ' + features[str(q)]['desc'] + ' (by ft24)'
        }  # Add real name

    # Recording input for model such as TensorFlow DNN
    r = ck.save_json_to_file({
        'json_file': 'prepare_train_data_tmp.' + gx + '.json',
        'dict': {
            'ftable': ftable,
            'fkeys': fkeys,
            'features_flat_keys_desc': ffkd,
            'ctable': ctable,
            "ckeys": ["Optimization class"],
            'optimization_classes': labels
        }
    })
    if r['return'] > 0: ck.err(r)
Esempio n. 5
0
def init(i):

    """
    Input:  {
              uid [str] - graph identifyer
              (version) [str] - graph version
              (desc_file) [str] - file with graph description
              (tags) [str] - tags separated by comma
            }

    Output: {
              return  [int]    - return code = 0 if success or >0 if error
              (error) [str]    - error string if return>0 

              dict    [dict]   - configuration dictionary
              path    [str]    - path to CK cfg entry
            }
    """

    # Get main configuration
    r=config.load({})
    if r['return']>0: return r
    cfg=r.get('dict',{})
    pcfg=r.get('path','')

    # CID ###########################################################        
    uid=i['uid']
    if uid==None: uid=''

    version=i.get('version')
    if version==None: version=''

    desc_file=i.get('desc_file','')
    if desc_file==None: desc_file=''

    # If UID!='', check if already exists ...
    found=False
    meta=meta_template
    path=''
    data_name=''
    tags=[]
    meta_info=''
    source=''
    extra_info={}

    if uid!='':
       r=ck.access({'action':'load',
                    'module_uoa':'result',
                    'data_uoa':uid})
       if r['return']>0:
          if r['return']!=16: return r
       else:
          found=True
          meta=r['dict']
          path=r['path']
          data_name=r['data_name']

          tags=meta.get('tags',[])
          source=meta.get('source','')
          meta_info=meta.get('meta',{}).get('info','')

          extra_info=r['info'].get('control',{})

    # Check if init from scratch and no title
    if i.get('name')!=None and i.get('name','')!='':
       data_name=i['name'].strip()
    elif not found or data_name=='':
       r=ck.inp({'text':'Select a title for your graph: '})
       if r['return']>0: return r

       data_name=r['string'].strip()

       meta['meta']['title']=data_name

    # Check if init from scratch and no title
    if not found or meta_info=='':
       r=ck.inp({'text':'Enter general info about your graph: '})
       if r['return']>0: return r

       x=r['string'].strip()

       if x=='': x=' '

       meta['meta']['info']=x

    # Adding tags
    if i.get('tags')!=None and i.get('tags','')!='':
       xtags=i['tags'].strip().split(',')

       for t in xtags:
           t1=t.strip()
           if t1!='' and t1 not in tags:
              tags.append(t1)

       meta['tags']=tags

    elif not found or (len(tags)==1 and 'result' in tags):
       r=ck.inp({'text':'Enter tags for your graph separated by commas: '})
       if r['return']>0: return r

       xtags=r['string'].strip().split(',')

       for t in xtags:
           t1=t.strip()
           if t1!='' and t1 not in tags:
              tags.append(t1)

       meta['tags']=tags

    # Checking source
    if not found or source=='':
       r=ck.inp({'text':'Enter source of results for your graph (can be URL): '})
       if r['return']>0: return r

       source=r['string'].strip()

       meta['source']=source

    # Checking authors
    for x in extra_info_desc:
        k=x['key']
        n=x['name']

        if not found or extra_info.get(k,'')=='':
           r=ck.inp({'text':'Enter '+n+': '})
           if r['return']>0: return r

           s=r['string'].strip()

           extra_info[k]=s

    # Creating/updating graph
    a='add'
    if found: a='update'

    ii={'action':a,
        'module_uoa':'result',
        'data_uoa':uid,
        'dict':meta,
        'sort_keys':'yes',
        'data_name':data_name,
        'substitute':'yes',
        'extra_info':extra_info}

    r=ck.access(ii)
    if r['return']>0: return r

    data_uoa=r['data_uoa']
    data_uid=r['data_uid']
    path=r['path']

    x='initialized'
    if found: x='updated'

    ck.out('Graph was successfully '+x+':')
    ck.out('')
    ck.out('  CK UID:  '+data_uid)
    ck.out('  CK name: '+data_uoa)
    ck.out('  CK path: '+path)

    # Add desc
    p1=os.path.join(path, 'desc.json')

    dt=copy.deepcopy(desc_template)
    if desc_file!='':
       rx=ck.load_json_file({'json_file':desc_file})
       if rx['return']>0: return rx
       dx=rx['dict']
       dt['data_config'].update(dx)

    if desc_file!='' or not os.path.isfile(p1):
       rx=ck.save_json_to_file({'json_file':p1, 'dict':dt, 'sort_keys':'yes'})
       if rx['return']>0: return rx

    p2=os.path.join(path, '.cm', 'meta.json')

    ck.out('')
    ck.out('You can continue updating graph using following files: ')
    ck.out('')
    ck.out('  Graph general meta info: '+p1)
    ck.out('     See example at '+config.CR_DEFAULT_SERVER+'/result/sota-mlperf-inference-results-v0.5-open-available/?action=download&filename=.cm/meta.json')
    ck.out('')
    ck.out('  Graph axes info: '+p2)
    ck.out('     See example at '+config.CR_DEFAULT_SERVER+'/result/sota-mlperf-inference-results-v0.5-open-available/?action=download&filename=desc.json')

    # Need to publish
    ck.out('')
    rx=ck.inp({'text':'Publish graph on the portal (Y/n)?'})
    if rx['return']>0: return rx
    s=rx['string'].strip().lower()

    if s=='' or s=='y':
       ck.out('')
       r=obj.publish({'cid':'result:'+data_uoa,
                      'version':version,
                      'force':True})

    else:
       ck.out('')
       ck.out('You can publish your graph on the portal using the following commands when ready: ')
       ck.out('')
       ck.out('  cb publish result:'+data_uoa+' --version=1.0.0 --force (--private)')

    return r
            sorted_heatmap2.append([ibench,iopt,reaction])

            ibench+=1

    # Move all without improvements to the end
    for bx in bench_index:
        sbx=str(bx)
        if sbx not in remapping:
           remapping[sbx]=ibench
           ibench+=1

    # remap
    for h in heatmap:
        bench=h[0]
        iopt=h[1]
        reaction=h[2]

        x=[remapping[str(bench)],iopt,reaction]

        sorted_heatmap.append(x)

    # save
    dd['table']['0']=sorted_heatmap
    r=ck.save_json_to_file({'json_file':fn+'_clustered.json','dict':dd,'sort_keys':'yes'})
    if r['return']>0: ck.err(r)

    # save
    dd['table']['0']=sorted_heatmap2
    r=ck.save_json_to_file({'json_file':fn+'_clustered2.json','dict':dd,'sort_keys':'yes'})
    if r['return']>0: ck.err(r)
def main(i):

    cur_dir = os.getcwd()
    fas = os.path.join(cur_dir, aggregated_stats)

    # Get some info about current platform
    ii = {'action': 'detect', 'module_uoa': 'platform', 'out': 'con'}

    r = ck.access(ii)
    if r['return'] > 0: return r

    hos = r['host_os_uid']
    hosx = r['host_os_uoa']
    hosd = r['host_os_dict']

    tos = r['os_uid']
    tosx = r['os_uoa']
    tosd = r['os_dict']

    cpu_name = r['features']['cpu']['name']
    plat_name = r['features']['platform']['name']

    #############################################################
    ck.out(line)
    ck.out('CPU name: ' + cpu_name)
    ck.out('Plat name: ' + plat_name)

    #############################################################
    ck.out(line)
    ck.out('Loading aggregated stats ...')

    aa = []
    if os.path.isfile(fas):
        r = ck.load_json_file({'json_file': fas})
        if r['return'] > 0: return r
        ax = r['dict']

        if 'all' not in ax: ax['all'] = []
        aa = ax['all']

    #############################################################
    ck.out(line)
    ck.out('Finding entry related to this platform ...')

    found = False
    for a in aa:
        if a.get('cpu_name', '') == cpu_name and a.get('plat_name',
                                                       '') == plat_name:
            found = True

    if not found:
        a = {'cpu_name': cpu_name, 'plat_name': plat_name}
        aa.append(a)

    if 'data' not in a: a['data'] = {}
    data = a.get('data', {})

    # Init pipeline
    r = ck.access({
        'action': 'pipeline',
        'module_uoa': 'program',
        'data_uoa': 'shared-matmul-c2',
        'cpu_freq': 'max',
        'gpu_freq': 'max',
        'speed': 'yes',
        'compiler_vars': {
            'USE_BLOCKED_MATMUL': 'YES'
        },
        'no_state_check': 'yes',
        'prepare': 'yes',
        'out': 'con'
    })
    if r['return'] > 0: return r

    ready = r['ready']
    if ready != 'yes':
        return {'return': 1, 'error': 'can\'t init pipeline'}

    pipeline = r

    # Compile program ones
    tpipeline = copy.deepcopy(pipeline)
    r = ck.access({
        'action': 'autotune',
        'module_uoa': 'pipeline',
        'pipeline': pipeline,
        'pipeline_update': {
            'env': {
                'CT_MATRIX_DIMENSION': 16,
                'CT_BLOCK_SIZE': 16
            }
        },
        'iterations': 1,
        'repetitions': 1,
        'out': 'con'
    })
    if r['return'] > 0: return r
    lsa = r.get('last_stat_analysis', {}).get('dict_flat', {})
    time_min = lsa.get('##characteristics#run#execution_time#min', None)
    if time_min == None or time_min == 0.0:
        return {'return': 1, 'error': 'failed to run default pipeline'}

    # data is per N size
    while True:  # continue infinite loop until stopping
        ck.out(line)

        n = random.randint(0, 3)  # Matrix size generator
        if n == 0:
            N = random.randint(4, 1024)  # Matrix size
        else:
            NX = random.randint(2, 10)
            N = 2**NX
            if n == 2: N = N - 1
            if n == 3: N = N + 1

        SN = str(N)

        if SN not in data: data[SN] = {}
        xdata = data.get(SN, {})

        tmin = xdata.get('tmin', None)
        tmax = xdata.get('tmax', None)
        gmin = xdata.get('gmin', None)
        gmax = xdata.get('gmax', None)
        best_tile = xdata.get('best_tile', None)

        for opts in range(0, 16):
            # Choose if random BS or power of two or power of two -+1
            if opts == 0:
                BS = 1
            elif opts == 1:
                BS = N
            else:
                b = random.randint(0, 3)

                if b == 0:
                    BS = random.randint(1, N)
                else:
                    B1 = math.frexp(N)[1] - 1
                    B2 = random.randint(0, B1)
                    BS = 2**B2

                    if b == 2 and BS > 1: BS = BS - 1
                    elif b == 3 and BS < N - 1: BS = BS + 1

            ck.out('Matrix size: ' + str(N))
            ck.out('Tile size:   ' + str(BS))

            # Run pipeline
            tpipeline = copy.deepcopy(pipeline)
            r = ck.access({
                'action': 'autotune',
                'module_uoa': 'pipeline',
                'pipeline': pipeline,
                'pipeline_update': {
                    'no_compile': 'yes',
                    'env': {
                        'CT_MATRIX_DIMENSION': N,
                        'CT_BLOCK_SIZE': BS
                    }
                },
                'iterations': 1,
                'repetitions': 3,
                'out': 'con'
            })
            if r['return'] > 0: return r

            lsa = r.get('last_stat_analysis', {}).get('dict_flat', {})
            time_min = lsa.get('##characteristics#run#execution_time#min',
                               None)

            changed = False
            if time_min != None:
                ops = 2 * (N * N * N)
                if tmin == None or time_min < tmin:
                    tmin = time_min
                    best_tile = BS
                    gmax = 1.0e-9 * ops / tmin
                    changed = True
                if tmax == None or time_min > tmax:
                    tmax = time_min
                    gmin = 1.0e-9 * ops / tmax
                    changed = True

                if changed:
                    xdata['tmin'] = tmin
                    xdata['tmax'] = tmax
                    xdata['gmin'] = gmin
                    xdata['gmax'] = gmax
                    xdata['best_tile'] = best_tile

                if opts == 0:
                    xdata['tbs1'] = time_min
                    xdata['gbs1'] = 1.0e-9 * ops / time_min
                    changed = True
                elif opts == 1:
                    xdata['tbsn'] = time_min
                    xdata['gbsn'] = 1.0e-9 * ops / time_min
                    xdata['bsn'] = N
                    changed = True

            if changed:
                ck.out(line)
                ck.out('Saving aggregated stats ...')

                r = ck.save_json_to_file({
                    'json_file': fas,
                    'dict': {
                        'all': aa
                    },
                    'sort_keys': 'yes'
                })
                if r['return'] > 0: return r

    #############################################################
    ck.out(line)
    ck.out('Saving aggregated stats ...')

    r = ck.save_json_to_file({
        'json_file': fas,
        'dict': {
            'all': aa
        },
        'sort_keys': 'yes'
    })
    if r['return'] > 0: return r

    return {'return': 0}
def main(i):

    # Load common table file (for all models)
    ddd = {}
    r = ck.load_json_file({'json_file': 'save_all_model_data_tmp.json'})
    if r['return'] == 0:
        ddd = r['dict']

    # Searching for features
    for g in gcc:
        ck.out(
            '********************************************************************************'
        )
        ck.out('Modeling optimizations for ' + g)
        ck.out('')

        if g not in ddd: ddd[g] = {}

        gx = g.replace(' ', '_')

        r = ck.load_json_file(
            {'json_file': 'prepare_train_data_tmp.' + gx + '.json'})
        if r['return'] > 0: ck.err(r)

        d = r['dict']

        ftable = d['ftable']
        ctable = d['ctable']

        s = ''

        for depth in range(1, 30):
            # Full cross-validation
            acc = 0
            obs = 0
            wrong = 0

            acc_min = None
            acc_max = None

            s = '==============================================================\n'
            s += 'Depth: ' + str(depth) + '\n\n'

            ck.out(s)

            for n in range(
                    0, 3
            ):  # Trying to build model N times (random - sometimes slightly different result)

                # Building decision tree on all data
                ii = {
                    'action':
                    'build',
                    'module_uoa':
                    'model',
                    'ftable':
                    ftable,
                    'ctable':
                    ctable,
                    'keep_temp_files':
                    'yes',
                    "model_module_uoa":
                    "model.sklearn",
                    "model_name":
                    "dtc",
                    "model_file":
                    "process_model_using_decision_trees/model-sklearn-dtc-" +
                    gx + '-depth' + str(depth),
                    "model_params": {
                        "max_depth": depth
                    },
                    "out":
                    ""
                }

                # Training
                cii = copy.deepcopy(ii)

                r = ck.access(ii)
                if r['return'] > 0: ck.err(r)

                # Validating
                ii = copy.deepcopy(ii)

                ii['action'] = 'validate'

                r = ck.access(ii)
                if r['return'] > 0: ck.err(r)

                obs += r['observations']
                wrong += r['mispredictions']

                acc = float(obs - wrong) / float(obs)

                x = '  Accuracy on all data (' + str(
                    n + 1) + ' out of 3):   ' + str(acc)
                s += x
                ck.out(x)

                acc = float(obs - wrong) / float(obs)

                if acc_min == None or acc < acc_min:
                    acc_min = acc

                if acc_max == None or acc > acc_max:
                    acc_max = acc

            x = '\nDepth: ' + str(
                depth
            ) + ' ; accuracy (min/max): ' + '%.2f' % acc_min + ' .. ' + '%.2f' % acc_max + '\n'
            s += x
            ck.out(x)

            # Cross-validating (for simplicity 1 run)
            cross_obs = 0
            cross_wrong = 0

            x = '  *************************************************\n'
            x += '  Cross-validating model (leave one out)\n\n'
            s += x
            ck.out(x)

            for bench in range(0, len(ftable)):
                train_ftable = []
                train_ctable = []
                test_ftable = []
                test_ctable = []

                for k in range(0, len(ftable)):
                    if k != bench:
                        train_ftable.append(ftable[k])
                        train_ctable.append(ctable[k])
                    else:
                        test_ftable.append(ftable[k])
                        test_ctable.append(ctable[k])

                # Selecting model
                ii = {
                    'action': 'build',
                    'module_uoa': 'model',
                    'ftable': train_ftable,
                    'ctable': train_ctable,
                    'keep_temp_files': 'no',
                    "model_module_uoa": "model.sklearn",
                    "model_name": "dtc",
                    "model_file": "tmp-model-sklearn-dtc",
                    "model_params": {
                        "max_depth": depth
                    },
                    "out": ""
                }

                # Training
                cii = copy.deepcopy(ii)

                r = ck.access(ii)
                if r['return'] > 0: ck.err(r)

                # Validating
                ii = copy.deepcopy(ii)

                ii['action'] = 'validate'
                ii['ftable'] = test_ftable
                ii['ctable'] = test_ctable

                r = ck.access(ii)
                if r['return'] > 0: ck.err(r)

                cross_obs += r['observations']
                cross_wrong += r['mispredictions']

                cross_acc = float(cross_obs - cross_wrong) / float(cross_obs)

                x = '    ' + str(bench) + ' out of ' + str(
                    len(ftable)
                ) + ' ) current cross-validation accuracy: ' + '%.2f' % cross_acc
                s += x
                ck.out(x)

            x = '\nDepth: ' + str(
                depth
            ) + ' ; accuracy (with cross-validation): ' + '%.2f' % cross_acc + '\n'
            s += x
            ck.out(x)

            ddd[g]['decision_trees_with_cross_validation_depth_' + str(depth) +
                   '_ft1_ft65'] = cross_acc
            ddd[g]['decision_trees_without_cross_validation_depth_' +
                   str(depth) + '_ft1_ft65'] = acc_max

        r = ck.save_text_file({
            'text_file':
            'process_model_using_decision_trees/log.' + gx + '.txt',
            'string':
            s
        })
        if r['return'] > 0: return r

        # Save common data (continuously, not to loose data)
        r = ck.save_json_to_file({
            'json_file': 'save_all_model_data_tmp.json',
            'dict': ddd
        })
        if r['return'] > 0: return r

    return {'return': 0}
def do(i):

    # List performance entries
    r = ck.access({
        'action': 'search',
        'module_uoa': 'experiment',
        'data_uoa': 'ck-request-asplos18-tvm-fpga-performance-*'
        #                 'repo_uoa':'ck-request-asplos18-results'
    })
    if r['return'] > 0: return r
    lst = r['lst']

    for q in lst:
        duid = q['data_uid']
        duoa = q['data_uoa']
        ruid = q['repo_uid']
        path = q['path']

        ck.out(duoa)

        # Search matching accuracy entry
        r = ck.access({
            'action': 'load',
            'module_uoa': 'experiment',
            'data_uoa': duid,
            'repo_uoa': ruid
        })
        if r['return'] > 0: return r

        dd = r['dict']
        ruid = r['repo_uid']
        apath = r['path']

        # Updating meta if needed
        dd['meta'][
            'scenario_module_uoa'] = 'a555738be4b65860'  # module:request.asplos18

        dd['meta'][
            'model_species'] = 'd41bbf1e489ab5e0'  # model.species:resnet18

        dd['meta'][
            'dataset_species'] = 'ImageNet'  # dataset species (free format)
        dd['meta']['dataset_size'] = 2000  # number of images ...

        dd['meta'][
            'platform_species'] = 'fpga'  # embedded vs server vs fpga (maybe other classifications such as edge)

        dd['meta']['platform_peak_power'] = 2.5  #Watts
        dd['meta']['platform_price'] = 229  # $
        dd['meta']['platform_price_date'] = '20180404'  # date

        dd['meta']['artifact'] = '9375838469ad4029'  # artifact description

        dd['meta']['model_precision'] = 'int8'

        dd['meta']['processed'] = 'yes'

        # Unified full name for some deps
        ds = dd['meta']['deps_summary']

        x = ds['model']
        r = ck.access({
            'action': 'make_deps_full_name',
            'module_uoa': 'request.asplos18',
            'deps': x
        })
        if r['return'] > 0: return r

        dd['meta']['model_design_name'] = r['full_name']
        dd['meta']['plat_name'] = 'Xilinx PYNQ-Z1 FPGA (ZYNQ XC7Z020-1CLG400C)'
        dd['meta']['os_name'] = 'Ubuntu 15.10'
        dd['meta'][
            'cpu_name'] = 'Programmable logic equivalent to Artix-7 FPGA'

        # Updating entry
        r = ck.access({
            'action': 'update',
            'module_uoa': 'experiment',
            'data_uoa': duid,
            'repo_uoa': ruid,
            'dict': dd,
            'substitute': 'yes',
            'ignore_update': 'yes',
            'sort_keys': 'yes'
        })
        if r['return'] > 0: return r

        # Checking points to aggregate
        os.chdir(path)
        dperf = os.listdir(path)
        for f in dperf:
            if f.endswith('.cache.json'):
                os.system('git rm -f ' + f)

            elif f.endswith('.flat.json'):
                ck.out(' * ' + f)

                # Load performance file
                p1 = os.path.join(path, f)

                r = ck.load_json_file({'json_file': p1})
                if r['return'] > 0: return r
                d1 = r['dict']

                # Prune some old value
                d = {}
                for k in d1:
                    if not k.startswith('##characteristics#run#accuracy_top1') and \
                       not k.startswith('##characteristics#run#accuracy_top5') and \
                       not k.startswith('##characteristics#run#inference_throughput') and \
                       not k.startswith('##characteristics#run#inference_latency'):
                        d[k] = d1[k]

                d['##features#model_size#min'] = 129770000  # Bytes

                d['##features#gpu_freq#min'] = 100
                d['##features#cpu_freq#min'] = ''
                d['##features#freq#min'] = d['##features#gpu_freq#min']

                d['##features#processed#min'] = 'yes'

                # Add throughput (images/second)
                tall = d.get(
                    '##characteristics#run#execution_time_classify_internal#all',
                    [])  # It's internal VTA measurements
                if len(tall) > 0:
                    tnew = []
                    for t in tall:
                        t1 = 1 / t
                        tnew.append(t1)

                    r = ck.access({
                        'action': 'stat_analysis',
                        'module_uoa': 'experiment',
                        'dict': d,
                        'dict1': {
                            '##characteristics#run#inference_throughput': tnew
                        }
                    })
                    if r['return'] > 0: return r

                # Unify batch size
                batch = 1  # for now only 1 is supported in this artifact
                d['##features#batch_size#min'] = batch

                # inference latency
                d['##features#measuring_latency#min'] = 'yes'

                r = ck.access({
                    'action': 'stat_analysis',
                    'module_uoa': 'experiment',
                    'dict': d,
                    'dict1': {
                        '##characteristics#run#inference_latency': tall
                    }
                })
                if r['return'] > 0: return r

                r = ck.access({
                    'action': 'stat_analysis',
                    'module_uoa': 'experiment',
                    'dict': d,
                    'dict1': {
                        '##characteristics#run#prediction_time_avg_s': tall
                    }
                })
                if r['return'] > 0: return r

                # Add accuracy (was calculated through separate experiment)
                r = ck.access({
                    'action': 'stat_analysis',
                    'module_uoa': 'experiment',
                    'dict': d,
                    'dict1': {
                        '##characteristics#run#accuracy_top1': [accuracy_top1]
                    }
                })
                if r['return'] > 0: return r

                # Add accuracy (was calculated through separate experiment)
                r = ck.access({
                    'action': 'stat_analysis',
                    'module_uoa': 'experiment',
                    'dict': d,
                    'dict1': {
                        '##characteristics#run#accuracy_top5': [accuracy_top5]
                    }
                })
                if r['return'] > 0: return r

                # Save updated dict
                r = ck.save_json_to_file({
                    'json_file': p1,
                    'dict': d,
                    'sort_keys': 'yes'
                })
                if r['return'] > 0: return r

    return {'return': 0}
Esempio n. 10
0
        heatmap=heatmaps[hm] 

        dd={'table':{"0":heatmap}}

        xx=[]
        iq=0
        for k in uopts[comp]:
            xx.append(iq)
            iq+=1

        dd['axis_y_labels']=xx

        name='init_reactions_tmp_heatmap_'+comp.replace(' ','_')+'_'+hm1

        r=ck.save_json_to_file({'json_file':name+'.json','dict':dd,'sort_keys':'yes'})
        if r['return']>0: ck.err(r)

        # Plot
        ii.update(dd)

        ii['out_to_file']=name+'.pdf'

        # Check number of labels
        x=[]
        for iq in range(0,len(datasets[hm])+1):
            x.append(iq)

        ii['axis_x_labels']=x

        r=ck.access(ii)
def main(i):

    # Load common table file (for all models)
    r = ck.load_json_file({'json_file': 'save_all_model_data_tmp.json'})
    if r['return'] > 0: return r
    d = r['dict']

    # Get all models/features
    kk = list(d['GCC 4.9.2'])

    table = []
    table_milepost = []
    table_short = []

    ikk = []
    for k in kk:
        sort = 0
        k1 = k

        j = k.find('_depth_')
        if j > 0:
            j1 = k.find('_', j + 7)
            if j1 > 0:
                sort = int(k[j + 7:j1])
                k1 = k[:j]

        j = k.find('_iteration_')
        if sort == '' and j > 0:
            j1 = k.find('_', j + 11)
            if j1 > 0:
                sort = int(k[j + 11:j1])
                k1 = k[:j]

        ikk.append([k1, sort, k])

    for k1 in sorted(ikk, key=lambda x: (x[0], x[1])):
        k = k1[2]

        norm = False

        j = k.find('_ft')
        j1 = k.find('_normalized')
        if j1 > 0:
            norm = True
            j = j1

        km = k[:j].replace('_', ' ')
        if km.startswith('milepost'):
            ext = km[9:]
            if ext != '': ext = ' (' + ext + ')'
            km = 'milepost nn' + ext

        km = km.replace(' depth', '; depth')
        km = km.replace(' iteration', '; iteration')

        kf = k[j + 1:]
        if norm:
            kf = kf[11:] + '\\newline' + '(normalized)'
        kf = kf.replace('_', ' .. ')

        a4 = "%.2f" % d['GCC 4.9.2'][k]
        a7 = "%.2f" % d['GCC 7.1.0'][k]

        # Full table (for interactive report)
        line = [km, kf, a4, a7]
        table.append(line)

        # Shorter version for paper - ugly but didn't have time to make it nicer ;)
        if 'depth 3' not in km and \
           'depth 5' not in km and \
           'depth 6' not in km and \
           'depth 7' not in km and \
           'depth 9' not in km and \
           'depth 10' not in km and \
           'depth 11' not in km and \
           'depth 12' not in km and \
           'depth 13' not in km and \
           'depth 14' not in km and \
           'depth 15' not in km and \
           'depth 17' not in km and \
           'depth 18' not in km and \
           'depth 19' not in km and \
           'depth 21' not in km and \
           'depth 22' not in km and \
           'depth 23' not in km and \
           'depth 24' not in km and \
           'depth 26' not in km and \
           'depth 27' not in km and \
           'depth 28' not in km and \
           'iteration 5' not in km and \
           'iteration 6' not in km and \
           'iteration 7' not in km and \
           'iteration 8' not in km and \
           'iteration 9' not in km:

            table_short.append(line)

        # Only short MILEPOST
        if km == 'milepost nn' and kf == 'ft1 .. ft56':
            table_milepost.append(line)

    dd = {
        "table_style":
        "border=\"1\"",
        "table_header": [{
            "name": "Model",
            "html_before": "<b>",
            "html_after": "</b>",
            "tex": "l",
            "tex_before": "\\textbf{",
            "tex_after": "}"
        }, {
            "name": "Features",
            "html_change_space": "yes",
            "tex": "p{1.2in}"
        }, {
            "name": "Accuracy (GCC 4.9.2)",
            "html_change_space": "yes",
            "tex": "p{0.9in}"
        }, {
            "name": "Accuracy (GCC 7.1.0)",
            "html_change_space": "yes",
            "tex": "p{0.9in}"
        }]
    }

    # Save full table file (for all models)
    dd['table'] = table

    r = ck.save_json_to_file({
        'json_file': 'save_all_model_data_tmp_table_full.json',
        'dict': dd,
        'sort_keys': 'yes'
    })
    if r['return'] > 0: return r

    # Save common table file (for all models)
    dd['table'] = table_short

    r = ck.save_json_to_file({
        'json_file': 'save_all_model_data_tmp_table_short.json',
        'dict': dd,
        'sort_keys': 'yes'
    })
    if r['return'] > 0: return r

    # Save common table file (for all models)
    dd['table'] = table_milepost

    r = ck.save_json_to_file({
        'json_file': 'save_all_model_data_tmp_table_milepost.json',
        'dict': dd,
        'sort_keys': 'yes'
    })
    if r['return'] > 0: return r

    return {'return': 0}
Esempio n. 12
0
def main(i):

    # Load common table file (for all models)
    ddd = {}
    r = ck.load_json_file({'json_file': 'save_all_model_data_tmp.json'})
    if r['return'] == 0:
        ddd = r['dict']

    # Searching for features
    for g in gcc:
        ck.out(
            '********************************************************************************'
        )
        ck.out('Modeling optimizations for ' + g)
        ck.out('')

        if g not in ddd: ddd[g] = {}

        gx = g.replace(' ', '_')

        r = ck.load_json_file(
            {'json_file': 'prepare_train_data_tmp.' + gx + '.json'})
        if r['return'] > 0: ck.err(r)

        d = r['dict']

        ftable = d['ftable']
        ctable = d['ctable']

        # Normalize (all features 0..1)
        ftable_range = {}
        for f in ftable:
            for k in range(0, 121):
                v = f[k]
                if k not in ftable_range:
                    ftable_range[k] = {'min': None, 'max': None}
                if ftable_range[k]['min'] == None or v < ftable_range[k]['min']:
                    ftable_range[k]['min'] = v
                if ftable_range[k]['max'] == None or v > ftable_range[k]['max']:
                    ftable_range[k]['max'] = v

        ftable_normalized = []
        for f in ftable:
            x = []
            for k in range(0, 121):
                v = 0
                if ftable_range[k]['max'] != 0:
                    v = f[k] / ftable_range[k]['max']
                x.append(v)
            ftable_normalized.append(x)

        features_mask = []
        for f in range(0, 121):
            features_mask.append(1)

        r = model({
            'ftable': ftable_normalized,
            'features_mask': features_mask,
            'ctable': ctable
        })
        if r['return'] > 0: return r

        r1 = ck.save_json_to_file({
            'json_file':
            'process_model_using_nearest_neighbour_reduce_features/prepare_reactions_model_train_ref_result.'
            + gx + '.json',
            'dict':
            r
        })
        if r1['return'] > 0: ck.err(r1)

        ref_acc = r['accuracy']  # Reference accuracy

        x = 'Reference accuracy: ' + str(ref_acc)
        s = x + '\n\n'

        ck.out('---------------------')
        ck.out(x)
        ck.out('')

        # Calculating Euclidean distance as in our MILEPOST GCC paper: https://hal.inria.fr/hal-00685276
        # MILPOST features: https://github.com/ctuning/ck-autotuning/blob/master/module/program.static.features/.cm/meta.json
        # 0..55 - original MILEPOST features
        # 56..64 - added by Jeremy Singer
        # 65..121 - 0..55/ft24 (normalized by total number of instructions)

        for k in range(0, 121):
            features_mask[k] = 0

            r = model({
                'ftable': ftable_normalized,
                'features_mask': features_mask,
                'ctable': ctable,
                'skip_out': 'yes'
            })
            if r['return'] > 0: return r

            acc = r['accuracy']

            keep = False
            sx = ''
            if acc < ref_acc:
                keep = True
                sx = 'kept'
            elif acc == ref_acc:
                sx = 'removed'
            elif acc > ref_acc:
                ref_acc = acc
                sx = 'removed (accuracy even improved)'

            if keep:
                features_mask[k] = 1

            x = 'ft' + str(k + 1) + ') ' + str(
                acc) + ' ' + sx + ' (ref acc=' + str(ref_acc) + ')'
            ck.out(x)
            s += x + '\n'

        # Final accuracy
        r = model({
            'ftable': ftable_normalized,
            'features_mask': features_mask,
            'ctable': ctable,
            'skip_out': 'yes'
        })
        if r['return'] > 0: return r

        acc = r['accuracy']

        r1 = ck.save_json_to_file({
            'json_file':
            'process_model_using_nearest_neighbour_reduce_features/prepare_reactions_model_train_reduced_result.'
            + gx + '.json',
            'dict':
            r
        })
        if r1['return'] > 0: ck.err(r1)

        # Final result
        ck.out('')
        ck.out('Final features mask:')
        ck.out('')

        s += '\nFinal features mask:\n\n'

        for f in range(0, len(features_mask)):
            x = '  ft' + str(f + 1) + ') ' + str(features_mask[f])
            ck.out(x)
            s += x + '\n'

        s += '\nFinal features mask:\n\n'
        s1 = ''

        for f in range(0, len(features_mask)):
            x = '  ft' + str(f + 1) + ') ' + str(features_mask[f])
            ck.out(x)
            s += x + '\n'

            if features_mask[f] == 1:
                if s1 != '': s1 += ','
                s1 += 'ft' + str(f + 1)

        s += '\nFinal accuracy: ' + str(acc) + '\n'

        r = ck.save_text_file({
            'text_file':
            'process_model_using_nearest_neighbour_reduce_features/log.' + gx +
            '.txt',
            'string':
            s
        })
        if r['return'] > 0: return r

        r = ck.save_text_file({
            'text_file':
            'process_model_using_nearest_neighbour_reduce_features/influential_features.'
            + gx + '.txt',
            'string':
            s1
        })
        if r['return'] > 0: return r

        ddd[g]['milepost_reduce_complexity2_normalized_ft1_ft121'] = acc

    # Save common data
    r = ck.save_json_to_file({
        'json_file': 'save_all_model_data_tmp.json',
        'dict': ddd
    })
    if r['return'] > 0: return r

    return {'return': 0}
Esempio n. 13
0
    # Record heat map
    dd = {'table': {"0": heatmap}}

    xx = []
    iq = 0
    for k in uopts[comp]:
        xx.append(iq)
        iq += 1

    dd['axis_y_labels'] = xx

    r = ck.save_json_to_file({
        'json_file':
        'init_reactions_tmp_heatmap_' + comp.replace(' ', '_') + '.json',
        'dict':
        dd,
        'sort_keys':
        'yes'
    })
    if r['return'] > 0: ck.err(r)

    # Sort classes
    for o in classes[comp]:
        b = classes[comp][o]

        b1 = sorted(b, key=lambda k: k.get('improvement', 0.0), reverse=True)

        classes[comp][o] = b1

    # Prepare individual flags to predict (YES/NO)
    for opt in classes[comp]:
Esempio n. 14
0
def main(i):

    # Load common table file (for all models)
    ddd = {}
    r = ck.load_json_file({'json_file': 'save_all_model_data_tmp.json'})
    if r['return'] == 0:
        ddd = r['dict']

    # Searching for features
    for g in gcc:
        ck.out(
            '********************************************************************************'
        )
        ck.out('Preparing accuracy graph for ' + g)
        ck.out('')

        gx = g.replace(' ', '_')

        d = ddd[g]

        table = {"0": [], "1": []}

        for depth in range(1, 30):
            key1 = 'decision_trees_with_cross_validation_depth_' + str(
                depth) + '_ft1_ft65'
            key2 = 'decision_trees_without_cross_validation_depth_' + str(
                depth) + '_ft1_ft65'

            acc1 = d[key1]
            acc2 = d[key2]

            table["0"].append([depth, acc1])
            table["1"].append([depth, acc2])

        # Graph input
        ii = {
            "action":
            "plot",
            "module_uoa":
            "graph",
            "table":
            table,
            "add_x_loop":
            "no",
            "ignore_point_if_none":
            "yes",
            "plot_type":
            "mpl_2d_scatter",
            "display_y_error_bar":
            "no",
            "title":
            "Powered by Collective Knowledge",
            "axis_x_desc":
            "Decision tree depth",
            "axis_y_desc":
            "Model accuracy for " + g + " (%)",
            "plot_grid":
            "yes",
            "mpl_image_size_x":
            "12",
            "mpl_image_size_y":
            "6",
            "mpl_image_dpi":
            "100",
            "font_size":
            22,
            "out_to_file":
            'process_model_using_decision_trees_accuracy_graph_output.' + gx +
            '.pdf',
            "point_style": {
                "0": {
                    "marker": "o"
                },
                "1": {
                    "marker": "x"
                }
            }
        }

        # Save common data (continuously, not to loose data)
        r = ck.save_json_to_file({
            'json_file':
            'process_model_using_decision_trees_accuracy_graph_input.' + gx +
            '.json',
            'dict':
            ii
        })
        if r['return'] > 0: return r

        # Plot graph (save to pdf)
        r = ck.access(ii)
        if r['return'] > 0: return r

    return {'return': 0}
Esempio n. 15
0
def update(i):
    """
    Input:  {
              (force) [bool] - if True, force update
            }

    Output: {
              return  [int]    - return code = 0 if success or >0 if error
              (error) [str]    - error string if return>0 
            }
    """

    import os

    global bootstrapping
    bootstrapping = True

    force = i.get('force')
    cfg = i.get('cfg', {})

    from . import obj

    title = 'Bootstrapping'
    if cfg.get('bootstrapped', '') == 'yes': title = 'Updating'

    ck.out(title + ' cBench to support portable actions and workflows:')
    ck.out('')

    # Check release notes
    server_url = cfg.get('server_url', '')
    if server_url == '': server_url = 'https://cKnowledge.io/api/v1/?'

    from . import comm_min
    r = comm_min.send({
        'url': server_url,
        'action': 'event',
        'dict': {
            'type': 'get-cbench-bootstrap-notes'
        }
    })

    notes = r.get('notes', '')
    if notes != '':
        ck.out('***********************************************')
        ck.out(notes)
        ck.out('***********************************************')

    lst_all = []

    sbf = os.environ.get('CB_SAVE_BOOTSTRAP_FILES', '')

    if sbf == '':
        fboot = 'cb-bootstrap-20200529'
        files = [fboot + '.json']

        if os.name == 'nt':
            files.append(fboot + '-win.json')

        for fn in files:
            r = ck.gen_tmp_file({'prefix': 'cb-bootstrap-', 'suffix': '.json'})
            if r['return'] > 0: return r
            ftmp = r['file_name']

            burl = CR_DEFAULT_SERVER + '/static/bootstrap/' + fn

            ck.out('Downloading ' + burl)

            from . import comm

            rx = comm.download_file({'url': burl, 'file': ftmp})
            if rx['return'] > 0: return rx

            rx = ck.load_json_file({'json_file': ftmp})
            if rx['return'] > 0: return rx

            lst_all += rx['dict']

            os.remove(ftmp)

        r = obj.download({'components': lst_all, 'force': force})
        if r['return'] > 0 and r['return'] != 8: return r

    else:
        for x in CR_SOLUTION_CK_COMPONENTS:
            r = obj.download({
                'cid': x['cid'],
                'version': x.get('version', ''),
                'force': force
            })
            if r['return'] > 0:
                if r['return'] != 8: return r
                else: ck.out('    Skipped - already exists!')
            else:
                lst_all += r['components']

        rx = ck.save_json_to_file({
            'json_file': sbf,
            'dict': lst_all,
            'sort_keys': 'yes'
        })
        if rx['return'] > 0: return rx

    ck.out('')

    # Update cfg
    cfg['bootstrapped'] = 'yes'

    ii = {
        'action': 'update',
        'repo_uoa': CK_CFG_REPO_UOA,
        'module_uoa': CK_CFG_MODULE_UID,
        'data_uoa': CK_CFG_DATA_UOA,
        'dict': cfg,
        'sort_keys': 'yes'
    }

    r = ck.access(ii)

    ck.out(title + ' finished!')
    ck.out('')

    return r