Exemple #1
0
def start(i):
    global tunnel_url

    # Check tunnel URL
    tunnel = i.get('tunnel', '')
    if tunnel != None and tunnel != '':
        tunnel_url = tunnel

        ck.out('All CodeReef web requests will be tunneled to ' + tunnel_url)

    host = i.get('host')
    if host == '' or host == None: host = 'localhost'

    port = i.get('port')
    if port == '' or port == None: port = '4444'

    # Assemble URL.
    url = host + ':' + port

    ck.out('Starting web service for CodeReef client on ' + url + ' ...')
    ck.out('')

    sys.stdout.flush()

    # We do not need secure HTTPS connection here since the user
    # runs webbrowser on her/his machine and communicates with
    # CodeReef service on the same machine via 127.0.0.1
    # while avoiding Internet!

    # Still it's possible to start this service with SSL
    # but it will require a propoer SSL certificate
    # otherwise the connection will not be validated
    # if it's purely local ...

    # Get certificates for SSL
    # ssl_certificate_file = {path to client.pem}

    # Generate it using "openssl req -new -x509 -keyout server.pem -out server.pem -days 365 -nodes"

    try:
        server = ThreadedHTTPServer((host, int(port)), server_handler)

        #       Needed for SSL connection (non-SSL connection will not work then)
        #       server.socket = ssl.wrap_socket (server.socket, server_side=True,
        #                                       certfile=ssl_certificate_file)

        # Prevent issues with socket reuse
        server.allow_reuse_address = True

        server.serve_forever()
    except KeyboardInterrupt:
        ck.out('Keyboard interrupt, terminating CodeReef web service ...')
        server.socket.close()
        return 1
    except OSError as e:
        ck.out('Internal CodeReef web service error (' + format(e) + ')')
        return 1

    return 0
Exemple #2
0
def process_web_request_post_via_tunnel(i):

    http = i['http']
    post = (i.get('post', '') == 'yes')

    target_url = tunnel_url + http.path

    ck.out(
        '* Tunneling **************************************************************'
    )

    try:

        if post:
            post_body = http.rfile.read(
                int(http.headers.get_all('content-length', 0)[0]))

        parsed_headers = {}
        for h in http.headers:
            parsed_headers[h] = http.headers[h]

        if post:
            receive = requests.post(
                target_url,
                headers=parsed_headers,
                verify=False,
                data=post_body,
            )
        else:
            receive = requests.get(target_url,
                                   headers=parsed_headers,
                                   verify=False)

        http.send_response(receive.status_code)

        received_headers = receive.headers
        for h in received_headers:
            h1 = h.lower()
            if '-encoding' not in h1 and h1 != 'content-length':
                http.send_header(h, received_headers[h])

        http.send_header('Content-Length', len(receive.content))
        http.end_headers()

        http.wfile.write(receive.content)

    except Exception as e:
        print('Error: ' + format(e))
        http.send_error(500, 'problem accessing remote host')

    return
Exemple #3
0
def login(i):
    """
    Input:  {
            }

    Output: {
              return  [int]    - return code = 0 if success or >0 if error
              (error) [str]    - error string if return>0 
            }
    """

    # Get current configuration
    cfg = {}

    ii = {
        'action': 'load',
        'repo_uoa': config.CK_CFG_REPO_UOA,
        'module_uoa': config.CK_CFG_MODULE_UID,
        'data_uoa': config.CK_CFG_DATA_UOA
    }

    r = ck.access(ii)
    if (r['return'] > 0 and r['return'] != 16): ck.err(r)

    # If not found, setup client
    if r['return'] == 16:
        setup(i)

    # Load again
    cfg = {}

    #    ii={'action':'load',
    #        'repo_uoa':config.CK_CFG_REPO_UOA,
    #        'module_uoa':config.CK_CFG_MODULE_UID,
    #        'data_uoa':config.CK_CFG_DATA_UOA}
    #
    #    r=ck.access(ii)
    #    if r['return']>0: ck.err(r)

    r = config.load({})
    if r['return'] > 0: return r
    cfg = r.get('dict', {})

    # Sending request to test connection
    r = comm.send({'config': cfg, 'action': 'login'})
    if r['return'] > 0: ck.err(r)

    # Success
    ck.out('CodeReef login tested successfully!')

    return 0
Exemple #4
0
def open_page(i):
    """
    Input:  {
              cid [str] - CK CID of format (repo UOA:)module UOA:data UOA
            }

    Output: {
              return  [int]    - return code = 0 if success or >0 if error
              (error) [str]    - error string if return>0 
            }
    """

    # Get current configuration
    r = config.load({})
    if r['return'] > 0: return r
    cfg = r['dict']

    # URL
    url = cfg.get('server_url', '')
    if url != '':
        h = url.find('api/')
        if h > 0:
            url = url[:h]
        else:
            url = ''

    if url == '':
        url = 'https://codereef.ai/portal/'

    # CID ###########################################################
    cid = i.get('cid')

    if cid == '' or cid == None:
        return {'return': 1, 'error': 'CK entry (CID) is not defined'}

    # Parse CID
    r = ck.parse_cid({'cid': cid})
    if r['return'] > 0: return r

    data_uoa = r.get('data_uoa', '')
    module_uoa = r.get('module_uoa', '')

    # Form URL
    url += 'c/' + module_uoa + '/' + data_uoa

    ck.out('Opening CodeReef page ' + url + ' ...')

    import webbrowser
    webbrowser.open(url)

    return {'return': 0}
Exemple #5
0
def versions(i):
    """
    Input:  {
              cid [str] - CK CID of format (repo UOA:)module UOA:data UOA
            }

    Output: {
              return  [int]    - return code = 0 if success or >0 if error
              (error) [str]    - error string if return>0 
            }
    """

    # Get current configuration
    r = config.load({})
    if r['return'] > 0: return r
    cfg = r['dict']

    # CID ###########################################################
    cid = i.get('cid')

    if cid == '' or cid == None:
        return {'return': 1, 'error': 'CK entry (CID) is not defined'}

    # Parse CID
    r = ck.parse_cid({'cid': cid})
    if r['return'] > 0: return r

    data_uoa = r.get('data_uoa', '')
    module_uoa = r.get('module_uoa', '')

    # Call CodeReef API
    r = comm.send({
        'config': cfg,
        'action': 'list_versions',
        'dict': {
            'module_uoa': module_uoa,
            'data_uoa': data_uoa
        }
    })
    if r['return'] > 0: return r

    versions = r.get('versions', [])
    for v in versions:
        vv = v.get('version', '')
        dt = v.get('iso_datetime', '').replace('T', ' ')

        ck.out(vv + ' (' + dt + ')')

    return r
Exemple #6
0
def select_program():
    res = ck.access({
        'action': 'search',
        'module_uoa': 'program',
        'data_uoa': 'image-classification-tf*'
    })
    if res['return'] > 0:
        return res
    programs = res.get('lst', [])
    if programs:
        if len(programs) == 1:
            return {'return': 0, 'program': programs[0]['data_uoa']}

        ck.out('')
        ck.out('More than one program is found suitable for this script:')
        ck.out('')
        res = ck.access({
            'action': 'select_uoa',
            'module_uoa': 'choice',
            'choices': programs
        })
        if res['return'] > 0:
            return res
        for p in programs:
            if p['data_uid'] == res['choice']:
                return {'return': 0, 'program': p['data_uoa']}

    return {'return': 1, 'error': 'No related programs found'}
def select_ImageNet():
    res = ck.access({
        'action': 'show',
        'module_uoa': 'env',
        'tags': 'dataset,imagenet,raw,val'
    })
    if res['return'] > 0:
        return res
    datasets = res.get('lst', [])
    if datasets:
        if len(datasets) == 1:
            return {'return': 0, 'dataset': datasets[0]}

        ck.out('')
        ck.out(
            'More than one ImageNet dataset is found suitable for this script:'
        )
        ck.out('')
        dataset_choices = []
        for d in datasets:
            dataset_choices.append({
                'data_uid': d['data_uid'],
                'data_uoa': get_ImageNet_path(d)
            })
        res = ck.access({
            'action': 'select_uoa',
            'module_uoa': 'choice',
            'choices': dataset_choices
        })
        if res['return'] > 0:
            return res
        for d in datasets:
            if d['data_uid'] == res['choice']:
                return {'return': 0, 'dataset': d}

    return {'return': 1, 'error': 'No installed ImageNet dataset found'}
Exemple #8
0
def do(i):

    # List performance entries
    r=ck.access({'action':'search',
                 'module_uoa':'experiment',
                 'data_uoa':'ck-request-asplos18-mobilenets-tvm-arm-performance-*',
                 'repo_uoa':'local'})
    if r['return']>0: return r
    lst=r['lst']

    for q in lst:
        duid=q['data_uid']
        duoa=q['data_uoa']
        path=q['path']

        ck.out(duoa)

        # Search matching accuracy entry
        aduoa=duoa.replace('-performance-','-accuracy-')

        r=ck.access({'action':'find',
                     'module_uoa':'experiment',
                     'data_uoa':aduoa,
                     'repo_uoa':'local'})
        if r['return']>0: return r
        apath=r['path']             

        # Checking points to aggregate
        dperf=os.listdir(path)
        for f in dperf:
            if f.endswith('.flat.json'):
               ck.out(' * '+f)

               # Load performance file 
               p1=os.path.join(path, f)

               r=ck.load_json_file({'json_file':p1})
               if r['return']>0: return r
               d=r['dict']

               p2=os.path.join(path, f[:-10]+'.features_flat.json') # Features

               r=ck.load_json_file({'json_file':p2})
               if r['return']>0: return r
               df=r['dict']

               # Remove batch
               del(df['##choices#env#CK_BATCH_COUNT'])
               
               # Find matching features file to merge
               dacc=os.listdir(apath)
               matched=False
               for af in dacc:
                   if af.endswith('.features_flat.json'):
                      r=ck.load_json_file({'json_file':os.path.join(apath,af)})
                      if r['return']>0: return r
                      adf=r['dict']

                      # Remove batch
                      del(adf['##choices#env#CK_BATCH_COUNT'])

                      # Compare dicts
                      r=ck.compare_dicts({'dict1':df, 'dict2':adf})
                      if r['return']>0: return r
                      if r['equal']=='yes':
                         matched=True

                         # Load accuracy data to merge
                         px=os.path.join(apath,af[:-19]+'.flat.json')
                         r=ck.load_json_file({'json_file':px})
                         if r['return']>0: return r
                         dd=r['dict']

                         # Merge keys
                         for k in dd:
                             if k.startswith('##characteristics#run#accuracy_top'):
                                d[k]=dd[k]

                         break
               
               if not matched:
                  return {'return':1, 'error':'no match - strange'}

               # Save updated dict
               r=ck.save_json_to_file({'json_file':p1, 'dict':d, 'sort_keys':'yes'})
               if r['return']>0: return r

    return {'return':0}
Exemple #9
0
def do(i, arg):
    fp = arg.fp
    if fp is not None:
        fromfile = os.path.isfile(fp)
        if (fromfile):
            print("Loading triples %s" % (fp))
            triples = json.loads(open(fp).read())
            del size_m[:]
            del size_n[:]
            del size_k[:]
            for i in triples:
                size_m.append(str(i.get('bSizeM')))
                size_n.append(str(i.get('bSizeN')))
                size_k.append(str(i.get('bSizeK')))

    if VERBOSE or DEBUG:
        print('[Experiment] %s' % title)
        print('[Preparing pipeline] Clock resolution: %d' % clock_resolution)
        print('[Preparing pipeline] Matrix sizes: m=%s, k=%s, n=%s: ' %
              (size_m, size_k, size_n))
        print('[Preparing pipeline] Precision: %d' % precision)
        print('[Preparing pipeline] Run for configuration: %d' % run)
        print('[Preparing pipeline] More parms... ')
    ntrip = len(size_m)
    print('[Experiment] Number of triple(s) %s' % (ntrip))
    size_tag = ''
    for tp in range(0, ntrip):
        if (tp == ntrip - 1):
            size_tag += str(
                (int(size_m[tp]) * int(size_n[tp]) * int(size_k[tp])))
        else:
            size_tag += str(
                (int(size_m[tp]) * int(size_n[tp]) * int(size_k[tp]))) + ','
    # Detect basic platform info.
    ii = {'action': 'detect', 'module_uoa': 'platform', 'con': 'con'}
    r = ck.access(ii)
    if DEBUG: print("%s %s" % (DEBUG_STR, r))
    if r['return'] > 0: return r

    # Host and target OS params.
    hos = r['host_os_uoa']
    hosd = r['host_os_dict']
    tos = r['os_uoa']
    tosd = r['os_dict']
    tdid = r['device_id']

    if DEBUG: print("%s %s %s" % (DEBUG_STR, hos, hosd))
    if DEBUG: print("%s %s %s %s" % (DEBUG_STR, tos, tosd, tdid))

    # Load CLBLAST program meta and desc to check deps.
    ii = {
        'action': 'load',
        'module_uoa': 'program',
        'data_uoa': 'clblast-tune'
    }
    rx = ck.access(ii)
    if DEBUG: print("%s %s " % (DEBUG_STR, rx))
    if rx['return'] > 0: return rx
    meta = rx['dict']

    # Get compile-time and run-time deps.
    cdeps = meta.get('compile_deps', {})
    rdeps = meta.get('run_deps', {})

    # Merge rdeps with cdeps for setting up the pipeline (which uses
    # common deps), but tag them as "for_run_time".
    for k in rdeps:
        cdeps[k] = rdeps[k]
        cdeps[k]['for_run_time'] = 'yes'
    # CLblast libs.
    depl = copy.deepcopy(cdeps['lib-clblast'])
    #ON LOCAL MACHINE
    if ((arg.tos is not None) and (arg.did is not None)):
        tos = arg.tos
        tdid = arg.did

    ii = {
        'action': 'resolve',
        'module_uoa': 'env',
        'host_os': hos,
        'target_os': tos,
        'device_id': tdid,
        'out': 'con',
        'deps': {
            'lib-clblast': copy.deepcopy(depl)
        }
    }
    r = ck.access(ii)
    if r['return'] > 0: return r
    udepl = r['deps']['lib-clblast'].get('choices', [])
    if len(udepl) == 0:
        return {'return': 1, 'error': 'no installed CLBlast libs'}

    #prepare pipeline
    ii = {
        'action': 'pipeline',
        'module_uoa': 'program',
        'data_uoa': 'clblast-tune',
        'prepare': 'yes',
        'dependencies': cdeps,
        'no_compiler_description': 'yes',
        'cmd_key': kernel[0],
        "target_os": tos,
        "device_id": tdid,
        "out": 'con',
        "no_state_check": "yes",
        'flags': '-O3',
    }
    r = ck.access(ii)
    if r['return'] > 0: return r
    fail = r.get('fail', '')
    if fail == 'yes':
        return {
            'return': 10,
            'error': 'pipeline failed (' + r.get('fail_reason', '') + ')'
        }

    ready = r.get('ready', '')
    if ready != 'yes': return {'return': 11, 'error': 'pipeline not ready'}

    state = r['state']
    tmp_dir = state['tmp_dir']
    xcdeps = r.get('dependencies', {})
    # Clean pipeline.
    if 'ready' in r: del (r['ready'])
    if 'fail' in r: del (r['fail'])
    if 'return' in r: del (r['return'])
    pipeline = copy.deepcopy(r)

    record_repo = 'local'
    record_uoa = 'explore-matrix-size-' + kernel[0]
    ck.out(
        '---------------------------------------------------------------------------------------'
    )
    ck.out('Experiment - %s:%s' % (record_repo, record_uoa))

    cpipeline = copy.deepcopy(pipeline)
    ii = {
        'action':
        'autotune',
        'module_uoa':
        'pipeline',
        'data_uoa':
        'program',
        'choices_order': [['##env#CK_CLBLAST_MSIZE'],
                          [
                              '##env#CK_CLBLAST_NSIZE',
                          ], ['##env#CK_CLBLAST_KSIZE']],
        'choices_selection': [{
            "type": "loop-with-next",
            "choice": size_m,
            "default": "256"
        }, {
            "type": "loop-with-next",
            "choice": size_n,
            "default": "256"
        }, {
            "type": "loop-with-next",
            "choice": size_k,
            "default": "256"
        }],
        'features_keys_to_process': ['##choices#*'],
        'iterations':
        -1,
        'repetitions':
        3,
        'record':
        'yes',
        'record_failed':
        'yes',
        'record_params': {
            'search_point_by_features': 'yes'
        },
        'record_repo':
        record_repo,
        'record_uoa':
        record_uoa,
        'tags': ['explore-clblast-matrix-size', kernel[0], model, size_tag],
        'pipeline':
        cpipeline,
        'out':
        'con'
    }
    r = ck.access(ii)
    if DEBUG > 0: print("%s %s" % (DEBUG_STR, r))
    if r['return'] > 0: return r
    fail = r.get('fail', '')
    if fail == 'yes':
        return {
            'return': 10,
            'error': 'pipeline failed (' + r.get('fail_reason', '') + ')'
        }

    return {'return': 0}
Exemple #10
0
def download(i):
    """
    Input:  {
              cid [str] - CK CID of format (repo UOA:)module UOA:data UOA
                          (can use wildcards)
              (version) [str] - assign version
              (force) [bool] - if True, force download even if components already exists

              (tags) [str] - can search by tags (usually soft/package)

              (all) [bool] - if True, download dependencies (without force!)
            }

    Output: {
              return  [int]    - return code = 0 if success or >0 if error
              (error) [str]    - error string if return>0 
            }
    """

    # CID ###########################################################
    cid = i.get('cid')

    if cid == '' or cid == None:
        return {'return': 1, 'error': 'CK entry (CID) is not defined'}

    version = i.get('version')
    if version == None: version = ''

    force = i.get('force')
    al = i.get('all')

    skip_module_check = i.get('skip_module_check', False)

    # Parse CID
    r = ck.parse_cid({'cid': cid})
    if r['return'] > 0: return r

    repo_uoa = r.get('repo_uoa', '')
    data_uoa = r.get('data_uoa', '')
    module_uoa = r.get('module_uoa', '')

    tags = i.get('tags', '')

    spaces = i.get('spaces', '')

    # Get current configuration
    r = config.load({})
    if r['return'] > 0: return r
    cfg = r['dict']

    # Sending request to download
    r = comm.send({
        'config': cfg,
        'action': 'download',
        'dict': {
            'module_uoa': module_uoa,
            'data_uoa': data_uoa,
            'version': version,
            'tags': tags
        }
    })
    if r['return'] > 0:
        return r

    lst = r['components']

    for l in lst:

        fpack64 = l['file_base64']
        fmd5 = l['file_md5']

        muoa = l['module_uoa']
        muid = l['module_uid']

        duoa = l['data_uoa']
        duid = l['data_uid']

        dependencies = l.get('dependencies', [])

        xcid = muoa + ':' + duoa

        # Check if module exists
        if not skip_module_check:
            r = ck.access({
                'action': 'find',
                'module_uoa': 'module',
                'data_uoa': muoa,
                'common_func': 'yes'
            })
            if r['return'] > 0:
                if r['return'] != 16: return r

                x = 'module:' + muoa
                if repo_uoa != '': x = repo_uoa + ':' + x

                # FGG: we should not add "version" for dependencies or related components since it's not the same!
                #              r=download({'cid':x, 'force':force, 'version':version, 'skip_module_check':True, 'all':al})
                r = download({
                    'cid': x,
                    'force': force,
                    'skip_module_check': True,
                    'all': al
                })
                if r['return'] > 0: return r

        # Check if entry already exists
        path = ''
        r = ck.access({
            'action': 'find',
            'common_func': 'yes',
            'repo_uoa': repo_uoa,
            'module_uoa': muid,
            'data_uoa': duoa
        })
        if r['return'] == 0:
            if not force:
                return {
                    'return': 8,
                    'error': 'local entry for "' + xcid + '" already exists'
                }
        else:
            if r['return'] != 16: return r

            r = ck.access({
                'action': 'add',
                'common_func': 'yes',
                'repo_uoa': repo_uoa,
                'module_uoa': muid,
                'data_uoa': duoa,
                'data_uid': duid,
                'ignore_update': 'yes'
            })
            if r['return'] > 0: return r

        path = r['path']

        # Prepare pack
        ppz = os.path.join(path, config.PACK_FILE)

        if os.path.isfile(ppz):
            if not force:
                return {
                    'return': 1,
                    'error': 'pack file already exists (' + ppz + ')'
                }
            os.remove(ppz)

        # Save pack to file
        rx = ck.convert_upload_string_to_file({
            'file_content_base64': fpack64,
            'filename': ppz
        })
        if rx['return'] > 0: return rx

        # MD5 of the pack
        rx = ck.load_text_file({'text_file': ppz, 'keep_as_bin': 'yes'})
        if rx['return'] > 0: return rx
        bpack = rx['bin']

        import hashlib
        md5 = hashlib.md5(bpack).hexdigest()

        if md5 != fmd5:
            return {
                'return':
                1,
                'error':
                'MD5 of the newly created pack (' + md5 +
                ') did not match the one from CodeReef server (' + fmd5 + ')'
            }

        # Unpack to src subdirectory
        import zipfile

        f = open(ppz, 'rb')
        z = zipfile.ZipFile(f)
        for d in z.namelist():
            if d != '.' and d != '..' and not d.startswith(
                    '/') and not d.startswith('\\'):
                pp = os.path.join(path, d)
                if d.endswith('/'):
                    # create directory
                    if not os.path.exists(pp): os.makedirs(pp)
                else:
                    ppd = os.path.dirname(pp)
                    if not os.path.exists(ppd): os.makedirs(ppd)

                    # extract file
                    fo = open(pp, 'wb')
                    fo.write(z.read(d))
                    fo.close()

                    if pp.endswith('.sh') or pp.endswith('.bash'):
                        import stat
                        st = os.stat(pp)
                        os.chmod(pp, st.st_mode | stat.S_IEXEC)

        f.close()

        # Remove pack file
        os.remove(ppz)

        # Note
        ck.out(spaces + 'Successfully downloaded "' + xcid + '" to ' + path)

        # Check deps
        if al:
            if len(dependencies) > 0:
                ck.out(spaces + '  Checking dependencies ...')

#           import json
#           print (json.dumps(dependencies))
#           input('xyz')

            for dep in dependencies:
                muoa = dep.get('module_uid', '')
                duoa = dep.get('data_uid', '')

                tags = dep.get('tags', [])
                xtags = ''
                if len(tags) > 0:
                    xtags = ','.join(tags)
                    muoa = 'package'
                    duoa = ''

                cid = muoa + ':' + duoa
                rx = download({
                    'cid': cid,
                    'all': al,
                    'tags': xtags,
                    'spaces': spaces + '    '
                })
                if rx['return'] > 0 and rx['return'] != 8 and rx[
                        'return'] != 16:
                    return rx
                if rx['return'] == 16:
                    if xtags == '': return rx
                    rx = download({
                        'cid': 'soft:',
                        'all': al,
                        'tags': xtags,
                        'spaces': spaces + '    '
                    })
                    if rx['return'] > 0 and rx['return'] != 8: return rx

    return r
def main(i):

    # Load common table file (for all models)
    ddd = {}
    r = ck.load_json_file({'json_file': 'save_all_model_data_tmp.json'})
    if r['return'] == 0:
        ddd = r['dict']

    # Searching for features
    for g in gcc:
        random.seed(12345)

        ck.out(
            '********************************************************************************'
        )
        ck.out('Modeling optimizations for ' + g)
        ck.out('')

        if g not in ddd: ddd[g] = {}

        gx = g.replace(' ', '_')

        r = ck.load_json_file(
            {'json_file': 'prepare_train_data_tmp.' + gx + '.json'})
        if r['return'] > 0: ck.err(r)

        d = r['dict']

        ftable = d['ftable']
        ctable = d['ctable']

        s = ''

        for iteration in range(1, 10):
            start_time = time.time()

            # Full cross-validation
            acc = 0
            obs = 0
            wrong = 0

            acc_min = None
            acc_max = None

            s = '==============================================================\n'
            s += 'Iteration: ' + str(iteration) + '\n\n'

            if iteration == 1:
                # Default params from TF example
                hu = [10, 20, 10]
                ts = 1000
            else:
                # Generate random DNN topology and params
                hu = []
                nhu = random.randint(1, 5)

                for k in range(0, nhu):
                    x = random.randint(10, 30)
                    hu.append(x)

                ts = random.randint(1000, 3000)

            s += '  Hidden units:   ' + str(hu) + '\n'
            s += '  Training steps: ' + str(ts) + '\n\n'

            ck.out(s)

            for n in range(
                    0, 3
            ):  # Trying to build model N times (random - sometimes slightly different result)

                # Building decision tree on all data
                ii = {
                    'action': 'build',
                    'module_uoa': 'model',
                    'ftable': ftable,
                    'ctable': ctable,
                    'keep_temp_files': 'yes',
                    "model_module_uoa": "model.tf",
                    "model_name": "dnn_classifier",
                    "model_file": "process_model_using_dnn_tf/" + gx,
                    "model_params": {
                        "hidden_units": hu,
                        "training_steps": ts
                    },
                    "out": ""
                }

                # Training
                cii = copy.deepcopy(ii)

                r = ck.access(ii)
                if r['return'] > 0: ck.err(r)

                # Validating
                ii = copy.deepcopy(ii)

                ii['action'] = 'validate'

                r = ck.access(ii)
                if r['return'] > 0: ck.err(r)

                obs += r['observations']
                wrong += r['mispredictions']

                acc = float(obs - wrong) / float(obs)

                x = '\n  Accuracy on all data (' + str(
                    n + 1) + ' out of 3):   ' + str(acc) + '\n'
                s += x
                ck.out(x)

                acc = float(obs - wrong) / float(obs)

                if acc_min == None or acc < acc_min:
                    acc_min = acc

                if acc_max == None or acc > acc_max:
                    acc_max = acc

            stop_time = time.time() - start_time

            x = '\n\nIteration: ' + str(
                iteration
            ) + ' ; accuracy (min/max): ' + '%.2f' % acc_min + ' .. ' + '%.2f' % acc_max + '\n'
            x = '\n  Elapsed time: ' + '%.1f' % stop_time + ' sec.\n'
            s += x
            ck.out(x)

            # Cross-validating (for simplicity 1 run)
            cross_obs = 0
            cross_wrong = 0

            x = '  *************************************************\n'
            x += '  Cross-validating model (leave one out)\n\n'
            s += x
            ck.out(x)

            for bench in range(0, len(ftable)):
                train_ftable = []
                train_ctable = []
                test_ftable = []
                test_ctable = []

                for k in range(0, len(ftable)):
                    if k != bench:
                        train_ftable.append(ftable[k])
                        train_ctable.append(ctable[k])
                    else:
                        test_ftable.append(ftable[k])
                        test_ctable.append(ctable[k])

                # Selecting model
                ii = {
                    'action':
                    'build',
                    'module_uoa':
                    'model',
                    'ftable':
                    train_ftable,
                    'ctable':
                    train_ctable,
                    'keep_temp_files':
                    'no',
                    "model_module_uoa":
                    "model.tf",
                    "model_name":
                    "dnn_classifier",
                    "model_file":
                    "tmp-model-tf-dnn-classifier-" + gx + '_' + str(iteration),
                    "model_params": {
                        "hidden_units": hu,
                        "training_steps": ts
                    },
                    "out":
                    ""
                }

                # Training
                cii = copy.deepcopy(ii)

                r = ck.access(ii)
                if r['return'] > 0: ck.err(r)

                # Validating
                ii = copy.deepcopy(ii)

                ii['action'] = 'validate'
                ii['ftable'] = test_ftable
                ii['ctable'] = test_ctable
                #                ii['ftable']=ftable
                #                ii['ctable']=ctable

                r = ck.access(ii)
                if r['return'] > 0: ck.err(r)

                cross_obs += r['observations']
                cross_wrong += r['mispredictions']

                cross_acc = float(cross_obs - cross_wrong) / float(cross_obs)

                x = '\n    ' + str(bench) + ' out of ' + str(
                    len(ftable)
                ) + ' ) current cross-validation accuracy: ' + '%.2f' % cross_acc + '\n'
                s += x
                ck.out(x)

            stop_time = time.time() - start_time

            x = '\nIteration: ' + str(
                iteration
            ) + ' ; accuracy (with cross-validation): ' + '%.2f' % cross_acc + '\n'
            x = '\n  Elapsed time: ' + '%.1f' % stop_time + ' sec.\n'
            s += x

            ck.out(x)

            ddd[g]['dnn_tf_with_cross_validation_iteration_' + str(iteration) +
                   '_ft1_ft65'] = cross_acc
            ddd[g]['dnn_tf_without_cross_validation_iteration_' +
                   str(iteration) + '_ft1_ft65'] = acc_max

        r = ck.save_text_file({
            'text_file':
            'process_model_using_decision_trees/log.' + gx + '.txt',
            'string':
            s
        })
        if r['return'] > 0: return r

        # Save common data (continuously, not to loose data)
        r = ck.save_json_to_file({
            'json_file': 'save_all_model_data_tmp.json',
            'dict': ddd
        })
        if r['return'] > 0: return r

    return {'return': 0}
                                      key=lambda x: x.get('data_uoa', '')):
                    dduoa = dataset['data_uoa']
                    dduid = dataset['data_uid']

                    dd = dataset['meta']

                    dfiles = dd.get('dataset_files', [''])

                    # Iterate over data files
                    for dfile in dfiles:
                        idataset += 1

                        if idataset > max_dataset:
                            break

                        ck.out('-------------------')
                        ck.out('Program        ' + str(iq) + ' of ' + str(im))
                        ck.out('')
                        ck.out('Program:      ' + duoa)
                        ck.out('CMD:          ' + cmd)
                        ck.out('Dataset:      ' + dduoa)
                        ck.out('Dataset file: ' + dfile)
                        ck.out('Dataset No:   ' + str(idataset))
                        ck.out('Compiler:     ' + gcc)

                        ii = {
                            'action':
                            'autotune',
                            'module_uoa':
                            'program',
                            'data_uoa':
Exemple #13
0
def model(i):

    so = i.get('skip_out', '')

    acc = 0
    obs = 0
    good = 0
    good_top3 = 0

    ftable = i['ftable']
    features_mask = i.get('features_mask', [])
    ctable = i['ctable']

    pctable = []  # predictions
    pctable_all = []  # all
    edistance = []  # Euclidean distance
    similarity = []  # Similar program

    for q in range(0, len(ftable)):
        obs += 1

        ft = ftable[q]
        c = ctable[q]

        # search the most close features
        ed_min = -1
        similar = -1

        distances = {}

        # Calculating Euclidean distance as in our MILEPOST GCC paper: https://hal.inria.fr/hal-00685276
        # MILPOST features: https://github.com/ctuning/ck-autotuning/blob/master/module/program.static.features/.cm/meta.json
        # 0..55 - original MILEPOST features
        # 56..64 - added by Jeremy Singer
        # 65..121 - 0..55/ft24 (normalized by total number of instructions)

        for k in range(0, len(ftable)):
            ft2 = ftable[k]

            if k != q:
                dist = 0.0
                #               for f in range(65, 121):
                for f in range(0, 121):
                    if len(features_mask) == 0 or features_mask[f] == 1:
                        dist += pow((float(ft2[f]) - float(ft[f])), 2)

                ed = math.sqrt(dist)

                distances[k] = ed

                if ed_min == -1 or ed < ed_min:
                    ed_min = ed
                    similar = k

        # Sort distances
        dd = sorted(distances, key=lambda x: distances[x])

        c2a = ctable[dd[0]]
        c2b = ctable[dd[1]]
        c2c = ctable[dd[2]]

        #        ck.out('')
        #        ck.out('Program (original and most similar): '+str(q)+' -> '+str(similar))
        #        ck.out('Opt class (original and predicted top3):  '+str(c[0])+' -> '+str(c2a[0])+' or '+str(c2b[0])+' or '+str(c2c[0]))

        similarity.append(similar)

        pctable.append(c2a[0])

        x = []
        for k in dd:
            x.append(ctable[k][0])
        pctable_all.append(x)

        x = []
        for k in dd:
            x.append(distances[k])
        edistance.append(x)

        if int(c[0]) == int(c2a[0]):
            good += 1

        if int(c[0]) == int(c2a[0]) or int(c[0]) == int(c2b[0]) or int(
                c[0]) == int(c2c[0]):
            good_top3 += 1

    wrong = obs - good
    wrong_top3 = obs - good_top3
    acc = good / obs
    acc_top3 = good_top3 / obs

    info = {
        'pctable': pctable,
        'pctable_all': pctable_all,
        'edistance': edistance,
        'similarity': similarity
    }

    if so != 'yes':
        ck.out('')
        ck.out('Observations:          ' + str(obs))
        ck.out('')
        ck.out('Mispredictions:        ' + str(wrong))
        ck.out('Accuracy:              ' + str(acc))
        ck.out('')
        ck.out('Mispredictions (top3): ' + str(wrong_top3))
        ck.out('Accuracy (top3):       ' + str(acc_top3))

#       ck.out('')
#       ck.inp({'text':'Press Enter to continue ...'})

    return {
        'return': 0,
        'accuracy': acc,
        'accuracy_top3': acc_top3,
        'observations': obs,
        'wrong': wrong,
        'wrong_top3': wrong_top3,
        'info': info
    }
def do(i, arg):
    if arg.fp is not None:
        fin = arg.fp
        if (os.path.isfile(fin)):
          print ("File loading %s " %(fin))
          #LOAD FILE and TRIPLES
        else:
           print("File %s not found " %(fin))
    if VERBOSE or DEBUG:
        print('[Experiment] %s' % title)
        print('[Preparing pipeline] Clock resolution: %d' % clock_resolution)
        print('[Preparing pipeline] Matrix sizes: m=%s, k=%s, n=%s: ' % (size_m, size_k, size_n))
        print('[Preparing pipeline] Precision: %d' % precision)
        print('[Preparing pipeline] Run for configuration: %d' % run)
        print('[Preparing pipeline] More parms... ')
    ntrip = len(size_m) 
    print ('[Experiment] Number of triple(s) %s' % (ntrip))
    size_tag = ''
    for tp in range (0, ntrip):
        if (tp == ntrip-1):
            size_tag += str((int(size_m[tp])*int(size_n[tp])*int(size_k[tp])))
        else:
            size_tag += str((int(size_m[tp])*int(size_n[tp])*int(size_k[tp])))+','
    # Detect basic platform info.
    ii={'action':'detect',
        'module_uoa':'platform',
        'con':'con'}
    r=ck.access(ii)
    if DEBUG: print("%s %s" %(DEBUG_STR, r))
    if r['return']>0: return r

    # Host and target OS params.
    hos=r['host_os_uoa']
    hosd=r['host_os_dict']
    tos=r['os_uoa']
    tosd=r['os_dict']
    tdid=r['device_id']

    if DEBUG: print("%s %s %s" %(DEBUG_STR, hos, hosd))
    if DEBUG: print("%s %s %s %s" %( DEBUG_STR, tos, tosd, tdid))

    # Load CLBLAST program meta and desc to check deps.
    ii={'action':'load',
        'module_uoa':'program',
        'data_uoa':'clblast-tune'}
    rx=ck.access(ii)
    if DEBUG: print("%s %s " %(DEBUG_STR, rx))
    if rx['return']>0: return rx
    meta= rx['dict']

    # Get compile-time and run-time deps.
    cdeps=meta.get('compile_deps',{})
    rdeps=meta.get('run_deps',{})

    # Merge rdeps with cdeps for setting up the pipeline (which uses
    # common deps), but tag them as "for_run_time".
    for k in rdeps:
        cdeps[k]=rdeps[k]
        cdeps[k]['for_run_time']='yes'
    # CLblast libs.
    depl=copy.deepcopy(cdeps['lib-clblast'])
    #ON LOCAL MACHINE
    if ((arg.tos is not None) and (arg.did is not None) ):
       tos=arg.tos
       tdid=arg.did

    ii={'action':'resolve',
    'module_uoa':'env',
    'host_os':hos,
    'target_os':tos,
    'device_id':tdid,
    'out':'con',
    'deps':{'lib-clblast':copy.deepcopy(depl)}
    }
    r=ck.access(ii)
    if r['return']>0: return r
    udepl=r['deps']['lib-clblast'].get('choices',[])
    if len(udepl)==0: return {'return':1, 'error':'no installed CLBlast libs'}

    #prepare pipeline
    ii={'action':'pipeline',
        'module_uoa':'program',
        'data_uoa':'clblast-tune',
        'prepare':'yes',
        'dependencies': cdeps,
        'no_compiler_description':'yes',
        'cmd_key':kernel[0],
        "target_os":tos,
        "device_id":tdid,
        "out":'con',
        "no_state_check":"yes",
        'flags':'-O3',
    }
    r=ck.access(ii)
    if r['return']>0: return r
    fail=r.get('fail','')
    if fail=='yes': return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}

    ready=r.get('ready','')
    if ready!='yes': return {'return':11, 'error':'pipeline not ready'}

    state=r['state']
    tmp_dir=state['tmp_dir']
    xcdeps=r.get('dependencies',{})
    # Clean pipeline.
    if 'ready' in r: del(r['ready'])
    if 'fail' in r: del(r['fail'])
    if 'return' in r: del(r['return'])
    pipeline=copy.deepcopy(r)


    record_repo='local'
    record_uoa='explore-matrix-size-'+kernel[0]
    ck.out('---------------------------------------------------------------------------------------')
    ck.out('Experiment - %s:%s' % (record_repo, record_uoa))

    cpipeline=copy.deepcopy(pipeline)
    ii={
        'action':'autotune',
        'module_uoa':'pipeline',
        'data_uoa':'program',
        'choices_order':[
            [
	     '##env#CK_CLBLAST_MSIZE'
	    ],
	    [
	     '##env#CK_CLBLAST_NSIZE',
	    ],
	    [
	     '##env#CK_CLBLAST_KSIZE'
	    ]
        ],
        'choices_selection':[
            {"type":"loop-with-next", "choice":size_m, "default":"256"},
            {"type":"loop-with-next", "choice":size_n, "default":"256"},
            {"type":"loop-with-next", "choice":size_k, "default":"256"}

        ],
        'features_keys_to_process':['##choices#*'],


        'iterations':-1,
        'repetitions':3,
        'record':'yes',
        'record_failed':'yes',
        'record_params':{
            'search_point_by_features':'yes'
        },
        'record_repo':record_repo,
        'record_uoa':record_uoa,
        'tags':['explore-clblast-matrix-size-client', kernel[0], size_tag],
        'pipeline': cpipeline,
        'out':'con'

    }
    r=ck.access(ii)
    if DEBUG > 0: print("%s %s" %(DEBUG_STR, r))
    if r['return']>0: return r
    fail=r.get('fail','')
    if fail=='yes':
       return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}

    return  {'return':0}
Exemple #15
0
def do(i):

    # List performance entries
    r=ck.access({'action':'search',
                 'module_uoa':'experiment',
                 'data_uoa':'ck-request-asplos18-mxnet-arm-performance*'
#                 'repo_uoa':'ck-request-asplos18-results'
                })
    if r['return']>0: return r
    lst=r['lst']

    for q in lst:
        duid=q['data_uid']
        duoa=q['data_uoa']
        ruid=q['repo_uid']
        path=q['path']

        ck.out(duoa)

        # Search matching accuracy entry
        r=ck.access({'action':'load',
                     'module_uoa':'experiment',
                     'data_uoa':duid,
                     'repo_uoa':ruid})
        if r['return']>0: return r

        dd=r['dict']
        ruid=r['repo_uid']
        apath=r['path']             

        # Updating meta if needed
        dd['meta']['scenario_module_uoa']='a555738be4b65860' # module:request.asplos18

        dd['meta']['dataset_species']='ImageNet' # dataset species (free format)

        dd['meta']['platform_species']='embedded' # embedded vs server (maybe other classifications such as edge)

        dd['meta']['platform_peak_power']=6.05 #Watts http://opensource.rock-chips.com/images/6/60/Rockchip_RK3399_Datasheet_V1.6-20170301.pdf last page
        dd['meta']['platform_price']=149 # $, http://shop.t-firefly.com/goods.php?id=45
        dd['meta']['platform_price_date']='20180416' # date

        dd['meta']['artifact']='08da9685582866a0' # artifact description

        dd['meta']['model_precision']='fp32'

        dd['meta']['processed']='yes'

        # Unified full name for some deps
        ds=dd['meta']['deps_summary']

        x=ds['mxnet-model']
        r=ck.access({'action':'make_deps_full_name','module_uoa':'request.asplos18','deps':x})
        if r['return']>0: return r
        full_model_name=r['full_name']

        dd['meta']['model_design_name']=full_model_name

        # for simplicity add manually (can later automate it as in other artifacts, but just didn't have time here)
        if 'mobilenet' in full_model_name:
           dd['meta']['model_species']='07d4e7aa3750ddc6' # model.species:resnet18
           dd['meta']['dataset_size']=50000 # number of images ...
           accuracy_top1=0.66694
           accuracy_top5=0.87734
        elif 'resnet' in full_model_name:
           dd['meta']['model_species']='d41bbf1e489ab5e0' # model.species:resnet18
           dd['meta']['dataset_size']=25000 # number of images ...
           accuracy_top1=0.61318
           accuracy_top5=0.83702
        elif 'vgg16' in full_model_name:
           dd['meta']['model_species']='a3fcac86d42bdbc4' # model.species:resnet18
           dd['meta']['dataset_size']=5000 # number of images ...
           accuracy_top1=0.63120
           accuracy_top5=0.84951
        else:
           return {'return':1, 'error':'unknown model ('+y+')'}

        x=ds['lib-mxnet']
        r=ck.access({'action':'make_deps_full_name','module_uoa':'request.asplos18','deps':x})
        if r['return']>0: return r
        dd['meta']['library_name']=r['full_name']

        x=x['deps']['compiler']
        r=ck.access({'action':'make_deps_full_name','module_uoa':'request.asplos18','deps':x})
        if r['return']>0: return r
        dd['meta']['compiler_name']=r['full_name']

        # Updating entry
        r=ck.access({'action':'update',
                     'module_uoa':'experiment',
                     'data_uoa':duid,
                     'repo_uoa':ruid,
                     'dict':dd,
                     'substitute':'yes',
                     'ignore_update':'yes',
                     'sort_keys':'yes'
                    })
        if r['return']>0: return r

        # Checking points to aggregate
        os.chdir(path)
        dperf=os.listdir(path)
        for f in dperf:
            if f.endswith('.cache.json'):
               os.system('git rm -f '+f)

            elif f.endswith('.flat.json'):
               ck.out(' * '+f)

               # Load performance file 
               p1=os.path.join(path, f)

               r=ck.load_json_file({'json_file':p1})
               if r['return']>0: return r
               d1=r['dict']

               # Prune some old value
               d={}
               for k in d1:
                   if not k.startswith('##characteristics#run#accuracy_top1') and \
                      not k.startswith('##characteristics#run#accuracy_top5') and \
                      not k.startswith('##characteristics#run#inference_throughput') and \
                      not k.startswith('##characteristics#run#inference_latency'):
                      d[k]=d1[k]

               # for simplicity add manually (can later automate it as in other artifacts, but just didn't have time here)
               if 'mobilenet' in full_model_name:
                  model_size=17024109
               elif 'resnet' in full_model_name:
                  model_size=46803089
               elif 'vgg16' in full_model_name:
                  model_size=553432060
               else:
                  return {'return':1, 'error':'unknown model ('+y+')'}

               d['##features#model_size#min']=model_size # Bytes

               d['##features#gpu_freq#min']=''
               d['##features#cpu_freq#min']=1416
               d['##features#freq#min']=d['##features#cpu_freq#min']

               d['##features#processed#min']='yes'

               # Add throughput (images/second)
               tall=d.get('##characteristics#run#execution_time_classify#all',[]) # It's internal VTA measurements
               if len(tall)>0:
                  tnew=[]
                  for t in tall:
                      t1=1/t
                      tnew.append(t1)
                  
                  r=ck.access({'action':'stat_analysis',
                               'module_uoa':'experiment',
                               'dict':d,
                               'dict1':{'##characteristics#run#inference_throughput':tnew}
                              })
                  if r['return']>0: return r

               # Unify batch size
               batch=1 # for now only 1 is supported in this artifact
               d['##features#batch_size#min']=batch

               # inference latency
               d['##features#measuring_latency#min']='yes'

               r=ck.access({'action':'stat_analysis',
                            'module_uoa':'experiment',
                            'dict':d,
                            'dict1':{'##characteristics#run#inference_latency':tall}
                           })
               if r['return']>0: return r

               r=ck.access({'action':'stat_analysis',
                            'module_uoa':'experiment',
                            'dict':d,
                            'dict1':{'##characteristics#run#prediction_time_avg_s':tall}
                           })
               if r['return']>0: return r

               # Add accuracy (was calculated through separate experiment)
               r=ck.access({'action':'stat_analysis',
                            'module_uoa':'experiment',
                            'dict':d,
                            'dict1':{'##characteristics#run#accuracy_top1':[accuracy_top1]}
                           })
               if r['return']>0: return r

               # Add accuracy (was calculated through separate experiment)
               r=ck.access({'action':'stat_analysis',
                            'module_uoa':'experiment',
                            'dict':d,
                            'dict1':{'##characteristics#run#accuracy_top5':[accuracy_top5]}
                           })
               if r['return']>0: return r

               # Save updated dict
               r=ck.save_json_to_file({'json_file':p1, 'dict':d, 'sort_keys':'yes'})
               if r['return']>0: return r

    return {'return':0}
Exemple #16
0
print('')

r=ck.test()

if r.get('return',0)!=123:
   print('')
   print('Test FAILED (output!=123) !')
   sys.exit(1)

##############################################
print('Getting CK version ...')
print('')

r=ck.get_version()
if r.get('return',-1)!=0:
   ck.out('')
   ck.out('Test FAILED!')
   sys.exit(1)

ck.out('')
ck.out('CK version: '+r.get('ver_str',''))

##############################################
ck.out('Parsing CK command line ...')
ck.out('')
cmd="mv data cid1 cid2 key1=value1 key2=value2 key3 key4=value4 -key10 -key11=value11 --key12 --key13=value13 @test1.json @test2.json @test2unicode.json @@{'a':['b','c']} -- abc"
i=cmd.split(' ')

r=ck.list2dict(i)
if r['return']>0:
   ck.out('Test FAILED ('+r['error']+')!')
def do(i, arg):
    fp = arg.fp
    if fp is not None:
        fromfile = os.path.isfile(fp)
        if (fromfile):
            print("Loading triples %s" %(fp))
            triples = json.loads(open(fp).read())
            del size_m[:]
            del size_n[:]
            del size_k[:]
            for i in triples:
                size_m.append(str(i.get('bSizeM'))) 
                size_n.append(str(i.get('bSizeN'))) 
                size_k.append(str(i.get('bSizeK'))) 


    if VERBOSE or DEBUG:
        print('[Experiment] %s' % title)
        print('[Preparing pipeline] Clock resolution: %d' % clock_resolution)
        print('[Preparing pipeline] Matrix sizes: m=%s, k=%s, n=%s: ' % (size_m, size_k, size_n))
        print('[Preparing pipeline] Precision: %d' % precision)
        print('[Preparing pipeline] Run for configuration: %d' % run)
        print('[Preparing pipeline] More parms... ')
    ntrip = len(size_m) 
    print ('[Experiment] Number of triple(s) %s' % (ntrip))
    size_tag = ''
    for tp in range (0, ntrip):
        if (tp == ntrip-1):
            size_tag += str((int(size_m[tp])*int(size_n[tp])*int(size_k[tp])))
        else:
            size_tag += str((int(size_m[tp])*int(size_n[tp])*int(size_k[tp])))+','
    # Detect basic platform info.
    ii={'action':'detect',
        'module_uoa':'platform',
        'con':'con'}
    r=ck.access(ii)
    if DEBUG: print("%s %s" %(DEBUG_STR, r))
    if r['return']>0: return r

    # Host and target OS params.
    hos=r['host_os_uoa']
    hosd=r['host_os_dict']
    tos=r['os_uoa']
    tosd=r['os_dict']
    tdid=r['device_id']

    if DEBUG: print("%s %s %s" %(DEBUG_STR, hos, hosd))
    if DEBUG: print("%s %s %s %s" %( DEBUG_STR, tos, tosd, tdid))
    
    #PROGRAM TAGS 
    program_tags='sgemm,opencl' 
    
    # Search program on tags 
    ii={'action':'search',
        'module_uoa':'program',
	'tags':program_tags
    }
    rx=ck.access(ii)
    
    if rx['return']>0: return rx
    programs_list=rx['lst']
    for p in programs_list:

       # Load CLBLAST program meta and desc to check deps.
        ii={'action':'load',
            'module_uoa':'program',
            'data_uoa': p['data_uoa']}
        rx=ck.access(ii)
        if DEBUG: print("%s %s " %(DEBUG_STR, rx))
        if rx['return']>0: return rx
        meta= rx['dict']

        # Get compile-time and run-time deps.
        cdeps=meta.get('compile_deps',{})
        rdeps=meta.get('run_deps',{})
        print(cdeps)
        # Merge rdeps with cdeps for setting up the pipeline (which uses
        # common deps), but tag them as "for_run_time".
        for k in rdeps:
            cdeps[k]=rdeps[k]
            cdeps[k]['for_run_time']='yes'
        # CLblast libs.
	cmd=""
        if 'clblast' in p['data_uoa'] :
	   cmd=kernel[0]
           print cmd
        ii={'action':'pipeline',
            'module_uoa':'program',
            'data_uoa': p['data_uoa'],
            'prepare':'yes',
            'dependencies': cdeps,
            'no_compiler_description':'yes',
	    'cmd_key':cmd,
            "target_os":tos,
            "device_id":tdid,
            'dvdt_prof':'yes',
            "out":'con',
            "no_state_check":"yes",
            'flags':'-O3',
        }
        r=ck.access(ii)
        if r['return']>0: return r
        fail=r.get('fail','')
        if fail=='yes': return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}

        ready=r.get('ready','')
        if ready!='yes': return {'return':11, 'error':'pipeline not ready'}

        state=r['state']
        tmp_dir=state['tmp_dir']
        xcdeps=r.get('dependencies',{})
        # Clean pipeline.
        if 'ready' in r: del(r['ready'])
        if 'fail' in r: del(r['fail'])
        if 'return' in r: del(r['return'])
        pipeline=copy.deepcopy(r)


        record_repo='local'
        record_uoa='explore-matrix-size-gemm-libs-'+p['data_uoa'] 
        ck.out('---------------------------------------------------------------------------------------')
        ck.out('Experiment - %s:%s' % (record_repo, record_uoa))

        cpipeline=copy.deepcopy(pipeline)
        ii={
            'action':'autotune',
            'module_uoa':'pipeline',
            'data_uoa':'program',
            'choices_order':[
                [
                 '##env#CK_CLBLAST_MSIZE'
                ],
                [
                 '##env#CK_CLBLAST_NSIZE',
                ],
                [
                 '##env#CK_CLBLAST_KSIZE'
                ]
            ],
            'choices_selection':[
                {"type":"loop-with-next", "choice":size_m, "default":"256"},
                {"type":"loop-with-next", "choice":size_n, "default":"256"},
                {"type":"loop-with-next", "choice":size_k, "default":"256"}

            ],
            'features_keys_to_process':['##choices#*'],


            'iterations':-1,
            'repetitions':3,
            'record':'yes',
            'record_failed':'yes',
            'record_params':{
                'search_point_by_features':'yes'
            },
            'record_repo':record_repo,
            'record_uoa':record_uoa,
            'tags':['dvdt-prof', 'explore-matrix-size-libs-sgemm', p['data_uoa']],
            'pipeline': cpipeline,
            'out':'con'

        }
        r=ck.access(ii)
        if DEBUG > 0: print("%s %s" %(DEBUG_STR, r))
        if r['return']>0: return r
        fail=r.get('fail','')
        if fail=='yes':
           return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}
    
    return  {'return':0}
def do(i, arg):
    if arg.fp is not None:
        fin = arg.fp
        if (os.path.isfile(fin)):
          print ("File loading %s " %(fin))
          #LOAD FILE and TRIPLES
        else:
           print("File %s not found " %(fin))
    

    #Load Models Matrixes
    alex_net = loadMatrixFromJson(alex_net_json) 
    google_net = loadMatrixFromJson(google_net_json) 
    squeeze11_net = loadMatrixFromJson(squeeze11_net_json) 
    
    dataset =[]
    dataset.append({'Model_name': 'AlexNet', 'Batch_size' : 1, 'matrix' : alex_net})
    dataset.append({'Model_name': 'GoogleNet', 'Batch_size' : 1, 'matrix' : google_net})
    dataset.append({'Model_name': 'SqueezeNet1.1', 'Batch_size' : 1, 'matrix' : squeeze11_net})
 
    if VERBOSE or DEBUG:
        print('[Experiment] %s' % title)
        print('[Preparing pipeline] Clock resolution: %d' % clock_resolution)
        #print('[Preparing pipeline] Matrix sizes: m=%s, k=%s, n=%s: ' % (size_m, size_k, size_n))
        print('[Preparing pipeline] Precision: %d' % precision)
        print('[Preparing pipeline] Run for configuration: %d' % run)
        print('[Preparing pipeline] More parms... ')
    #ntrip = len(size_m) 
    #print ('[Experiment] Number of triple(s) %s' % (ntrip))
    #size_tag = ''
    #for tp in range (0, ntrip):
     #   if (tp == ntrip-1):
     #       size_tag += str((int(size_m[tp])*int(size_n[tp])*int(size_k[tp])))
     #   else:
     #       size_tag += str((int(size_m[tp])*int(size_n[tp])*int(size_k[tp])))+','
    # Detect basic platform info.
    ii={'action':'detect',
        'module_uoa':'platform',
        'con':'con'}
    r=ck.access(ii)
    if DEBUG: print("%s %s" %(DEBUG_STR, r))
    if r['return']>0: return r

    # Host and target OS params.
    hos=r['host_os_uoa']
    hosd=r['host_os_dict']
    tos=r['os_uoa']
    tosd=r['os_dict']
    tdid=r['device_id']

    if DEBUG: print("%s %s %s" %(DEBUG_STR, hos, hosd))
    if DEBUG: print("%s %s %s %s" %( DEBUG_STR, tos, tosd, tdid))

    # Load CLBLAST program meta and desc to check deps.
    ii={'action':'load',
        'module_uoa':'program',
        'data_uoa':'clblast-tune'}
    rx=ck.access(ii)
    if DEBUG: print("%s %s " %(DEBUG_STR, rx))
    if rx['return']>0: return rx
    meta= rx['dict']

    # Get compile-time and run-time deps.
    cdeps=meta.get('compile_deps',{})
    rdeps=meta.get('run_deps',{})

    # Merge rdeps with cdeps for setting up the pipeline (which uses
    # common deps), but tag them as "for_run_time".
    for k in rdeps:
        cdeps[k]=rdeps[k]
        cdeps[k]['for_run_time']='yes'
    # CLblast libs.
    depl=copy.deepcopy(cdeps['lib-clblast'])
    #ON LOCAL MACHINE
    if ((arg.tos is not None) and (arg.did is not None) ):
       tos=arg.tos
       tdid=arg.did

    ii={'action':'resolve',
    'module_uoa':'env',
    'host_os':hos,
    'target_os':tos,
    'device_id':tdid,
    'out':'con',
    'deps':{'lib-clblast':copy.deepcopy(depl)}
    }
    r=ck.access(ii)
    if r['return']>0: return r
    udepl=r['deps']['lib-clblast'].get('choices',[])
    if len(udepl)==0: return {'return':1, 'error':'no installed CLBlast libs'}
    
    cdeps['lib-clblast']['uoa']=udepl[0]
    
    #prepare pipeline
    ii={'action':'pipeline',
        'module_uoa':'program',
        'data_uoa':'clblast-tune',
        'prepare':'yes',
        'dependencies': cdeps,
        'no_compiler_description':'yes',
        'cmd_key':kernel[0],
        "target_os":tos,
        "device_id":tdid,
        "out":'con',
        "no_state_check":"yes",
        'flags':'-O3',
        'cpu_freq' : 'max',
        'gpu_freq' : 'max',
    }
    r=ck.access(ii)
    if r['return']>0: return r
    fail=r.get('fail','')
    if fail=='yes': return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}

    ready=r.get('ready','')
    if ready!='yes': return {'return':11, 'error':'pipeline not ready'}

    state=r['state']
    tmp_dir=state['tmp_dir']
    xcdeps=r.get('dependencies',{})
    # Clean pipeline.
    if 'ready' in r: del(r['ready'])
    if 'fail' in r: del(r['fail'])
    if 'return' in r: del(r['return'])
    pipeline=copy.deepcopy(r)
     
    print udepl
    # For each Clblast lib ***********************************
    for lib_uoa in udepl:
        print lib_uoa
        ii={'action' : 'load',
            'module_uoa' : 'env',
            'data_uoa' : lib_uoa}
        r=ck.access(ii)
        if r['return']>0 : return r
        lib_name=r['data_name']
        #lib_tags=re.match('BVLC Caffe framework \((?P<tags>.*)\)', lib_name)
        #lib_tags=r['dict']['tags']
        lib_tags=r['dict']['customize']['used_package_uoa'].split('-')
        tags=''
        skip_tags = ['lib','master', 'universal']
        for t in lib_tags:
            if t not in skip_tags:
                tags+= t + '-'
        
        # Add the extra_tags (if any)
        lib_tags=r['dict']['setup']['version']
        tags += lib_tags
        # For each model in dataset
        for model in dataset:

            record_repo='local'
            record_uoa='explore-matrix-size-'+tags+'-' + model['Model_name']
            ck.out('---------------------------------------------------------------------------------------')
            ck.out('Experiment - %s:%s' % (record_repo, record_uoa))
    
            cpipeline=copy.deepcopy(pipeline)
            ii={
                'action':'autotune',
                'module_uoa':'pipeline',
                'data_uoa':'program',
                'choices_order':[
                    [
                     '##env#CK_CLBLAST_MSIZE'
                    ],
                    [
                     '##env#CK_CLBLAST_NSIZE',
                    ],
                    [
                     '##env#CK_CLBLAST_KSIZE'
                    ]
                ],
                'choices_selection':[
                    {"type":"loop-with-next", "choice":model['matrix']['M'], "default":"256"},
                    {"type":"loop-with-next", "choice":model['matrix']['N'], "default":"256"},
                    {"type":"loop-with-next", "choice":model['matrix']['K'], "default":"256"}
    
                ],
                'features_keys_to_process':['##choices#*'],
    
    
                'iterations':-1,
                'repetitions':3,
                'record':'yes',
                'record_failed':'yes',
                'record_params':{
                    'search_point_by_features':'yes'
                },
                'record_repo':record_repo,
                'record_uoa':record_uoa,
                'tags':['explore-clblast-matrix-size-client', tags, model['Model_name']],
                'pipeline': cpipeline,
                'out':'con'
    
            }
            r=ck.access(ii)
            if DEBUG > 0: print("%s %s" %(DEBUG_STR, r))
            if r['return']>0: return r
            fail=r.get('fail','')
            if fail=='yes':
               return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}
    
    return  {'return':0}
def do(i, arg):
    # Detect basic platform info.
    ii = {'action': 'detect', 'module_uoa': 'platform', 'out': 'out'}
    r = ck.access(ii)
    if r['return'] > 0: return r

    # Host and target OS params.
    hos = r['host_os_uoa']
    hosd = r['host_os_dict']

    tos = r['os_uoa']
    tosd = r['os_dict']
    tdid = r['device_id']

    # Program and command.
    program = 'image-classification-tf-py'
    cmd_key = 'default'
    # Load program meta and description to check deps.
    ii = {'action': 'load', 'module_uoa': 'program', 'data_uoa': program}
    rx = ck.access(ii)
    if rx['return'] > 0: return rx
    mm = rx['dict']
    # Get compile-time and run-time deps.
    cdeps = mm.get('compile_deps', {})
    rdeps = mm.get('run_deps', {})
    # Merge rdeps with cdeps for setting up the pipeline (which uses
    # common deps), but tag them as "for_run_time".
    for k in rdeps:
        cdeps[k] = rdeps[k]
        cdeps[k]['for_run_time'] = 'yes'

    # Tensorflow libs.
    depl = copy.deepcopy(cdeps['lib-tensorflow'])
    if (arg.tos is not None) and (arg.did is not None):
        tos = arg.tos
        tdid = arg.did
    ii = {
        'action': 'resolve',
        'module_uoa': 'env',
        'host_os': hos,
        'target_os': tos,
        'device_id': tdid,
        'out': 'con',
        'deps': {
            'lib-tensorflow': copy.deepcopy(depl)
        },
        'quiet': 'yes'
    }
    r = ck.access(ii)
    if r['return'] > 0: return r

    udepl = r['deps']['lib-tensorflow'].get(
        'choices', [])  # All UOAs of env for Tensorflow libs.
    if len(udepl) == 0:
        return {'return': 1, 'error': 'no installed Tensorflow libs'}

    # Tensorflow models.
    depm = copy.deepcopy(cdeps['model-and-weights'])
    ii = {
        'action': 'resolve',
        'module_uoa': 'env',
        'host_os': hos,
        'target_os': tos,
        'device_id': tdid,
        'out': 'con',
        'deps': {
            'tensorflow_model': copy.deepcopy(depm)
        },
        'quiet': 'yes'
    }
    r = ck.access(ii)
    if r['return'] > 0: return r

    udepm = r['deps']['tensorflow_model'].get(
        'choices', [])  # All UOAs of env for Tensorflow models.
    if len(udepm) == 0:
        return {'return': 1, 'error': 'no installed TensorFlow models'}

    # Load dataset path.
    # FIXME: Does not have to be ImageNet val.
    ii = {
        'action': 'show',
        'module_uoa': 'env',
        'tags': 'dataset,imagenet,val,raw'
    }
    rx = ck.access(ii)
    if len(rx['lst']) == 0: return rx
    # FIXME: Can also be 'CK_ENV_DATASET_IMAGE_DIR'.
    img_dir = rx['lst'][0]['meta']['env']['CK_ENV_DATASET_IMAGENET_VAL']

    # Prepare pipeline.
    cdeps['lib-tensorflow']['uoa'] = udepl[0]
    cdeps['model-and-weights']['uoa'] = udepm[0]
    ii = {
        'action': 'pipeline',
        'prepare': 'yes',
        'dependencies': cdeps,
        'module_uoa': 'program',
        'data_uoa': program,
        'cmd_key': cmd_key,
        'target_os': tos,
        'device_id': tdid,
        'no_state_check': 'yes',
        'no_compiler_description': 'yes',
        'skip_calibration': 'yes',
        'env': {
            'CK_ENV_DATASET_IMAGE_DIR': img_dir,
            'CK_BATCH_COUNT': num_batches
        },
        'cpu_freq': 'max',
        'gpu_freq': 'max',
        'flags': '-O3',
        'speed': 'no',
        'energy': 'no',
        'skip_print_timers': 'yes',
        'out': 'con'
    }

    r = ck.access(ii)
    if r['return'] > 0: return r
    fail = r.get('fail', '')
    if fail == 'yes':
        return {
            'return': 10,
            'error': 'pipeline failed (' + r.get('fail_reason', '') + ')'
        }

    ready = r.get('ready', '')
    if ready != 'yes':
        return {'return': 11, 'error': 'pipeline not ready'}

    state = r['state']
    tmp_dir = state['tmp_dir']

    # Remember resolved deps for this benchmarking session.
    xcdeps = r.get('dependencies', {})
    # Clean pipeline.
    if 'ready' in r: del (r['ready'])
    if 'fail' in r: del (r['fail'])
    if 'return' in r: del (r['return'])

    pipeline = copy.deepcopy(r)

    # For each Tensorflow lib.*******************************************************
    for lib_uoa in udepl:
        # Load Tensorflow lib.
        ii = {'action': 'load', 'module_uoa': 'env', 'data_uoa': lib_uoa}
        r = ck.access(ii)
        if r['return'] > 0: return r
        # Get the tags from e.g. 'TensorFlow library (from sources, cuda)'
        lib_name = r['data_name']
        lib_tags = re.match('TensorFlow library \((?P<tags>.*)\)', lib_name)
        lib_tags = lib_tags.group('tags').replace(' ', '').replace(',', '-')
        # Skip some libs with "in [..]" or "not in [..]".
        if lib_tags not in [
                'prebuilt-cuda', 'fromsources-cuda', 'fromsources-cuda-xla',
                'prebuilt-cpu', 'fromsources-cpu', 'fromsources-cpu-xla'
        ]:
            continue
        cmd_keys = ['default']
        gpu_memory_pc = [33]
        # For each cmd key.*************************************************
        for cmd_key in cmd_keys:
            # For each TensorFlow model.*************************************************
            for model_uoa in udepm:
                # Load Tensorflow model.
                ii = {
                    'action': 'load',
                    'module_uoa': 'env',
                    'data_uoa': model_uoa
                }
                r = ck.access(ii)
                if r['return'] > 0: return r
                # Get the tags from e.g. 'TensorFlow python model and weights (squeezenet)'.
                model_name = r['data_name']
                model_tags = re.match(
                    'TensorFlow python model and weights \((?P<tags>.*)\)',
                    model_name)
                model_tags = model_tags.group('tags').replace(' ', '').replace(
                    ',', '-').lower()
                # Skip some models with "in [..]" or "not in [..]".
                if model_tags not in [
                        'squeezenet', 'googlenet', 'mobilenet-1.0-224'
                ]:
                    continue  # 'alexnet'

                record_repo = 'local'
                record_uoa = model_tags + '-' + lib_tags

                # Prepare pipeline.
                ck.out(
                    '---------------------------------------------------------------------------------------'
                )
                ck.out('%s - %s' % (lib_name, lib_uoa))
                ck.out('%s - %s' % (model_name, model_uoa))
                ck.out('Experiment - %s:%s' % (record_repo, record_uoa))

                # Prepare autotuning input.
                cpipeline = copy.deepcopy(pipeline)

                # Reset deps and change UOA.
                new_deps = {
                    'lib-tensorflow': copy.deepcopy(depl),
                    'squeezedet': copy.deepcopy(depm)
                }

                new_deps['lib-tensorflow']['uoa'] = lib_uoa
                new_deps['squeezedet']['uoa'] = model_uoa

                jj = {
                    'action': 'resolve',
                    'module_uoa': 'env',
                    'host_os': hos,
                    'target_os': tos,
                    'device_id': tdid,
                    'deps': new_deps
                }
                r = ck.access(jj)
                if r['return'] > 0: return r

                cpipeline['dependencies'].update(new_deps)

                cpipeline['cmd_key'] = cmd_key

                ii = {
                    'action':
                    'autotune',
                    'module_uoa':
                    'pipeline',
                    'data_uoa':
                    'program',
                    'choices_order':
                    [['##choices#env#CK_TF_GPU_MEMORY_PERCENT'],
                     ['##choices#env#CK_BATCH_SIZE']],
                    'choices_selection': [
                        {
                            'type': 'loop',
                            'choice': gpu_memory_pc
                        },
                        {
                            'type': 'loop',
                            'start': bs['start'],
                            'stop': bs['stop'],
                            'step': bs['step'],
                            'default': bs['default']
                        },
                    ],
                    'features_keys_to_process': ['##choices#*'],
                    'iterations':
                    -1,
                    'repetitions':
                    num_repetitions,
                    'record':
                    'yes',
                    'record_failed':
                    'yes',
                    'record_params': {
                        'search_point_by_features': 'yes'
                    },
                    'record_repo':
                    record_repo,
                    'record_uoa':
                    record_uoa,
                    'tags': [
                        'explore-batch-size-libs-models', model_tags, lib_tags,
                        platform_tags
                    ],
                    'pipeline':
                    cpipeline,
                    'out':
                    'con'
                }

                r = ck.access(ii)
                if r['return'] > 0: return r
                fail = r.get('fail', '')
                if fail == 'yes':
                    return {
                        'return':
                        10,
                        'error':
                        'pipeline failed (' + r.get('fail_reason', '') + ')'
                    }

    return {'return': 0}
def main(i):

    # Load common table file (for all models)
    ddd = {}
    r = ck.load_json_file({'json_file': 'save_all_model_data_tmp.json'})
    if r['return'] == 0:
        ddd = r['dict']

    # Searching for features
    for gi in range(0, len(gcc)):
        g = gcc[gi]
        depth = best_depth[gi]

        ck.out(
            '********************************************************************************'
        )
        ck.out('Modeling optimizations for ' + g)
        ck.out('')

        if g not in ddd: ddd[g] = {}

        gx = g.replace(' ', '_')
        gx2 = g.replace(' ', '-')

        r = ck.load_json_file(
            {'json_file': 'prepare_train_data_tmp.' + gx + '.json'})
        if r['return'] > 0: ck.err(r)

        d = r['dict']

        ftable = d['ftable']
        ctable = d['ctable']

        s = '==============================================================\n'
        s += 'Depth: ' + str(depth) + '\n\n'

        ck.out(s)

        # Building decision tree on all data
        ck_model_entry_name = "rpi3-milepost-model-" + gx2.lower()

        ii = {
            'action': 'build',
            'module_uoa': 'model',
            'ftable': ftable,
            'ctable': ctable,
            'keep_temp_files': 'yes',
            "model_module_uoa": "model.sklearn",
            "model_name": "dtc",
            "model_file": "tmp-model-sklearn-dtc",
            "model_params": {
                "max_depth": depth
            },
            "model_repo_uoa": "ck-rpi-optimization-results",
            "model_data_uoa": ck_model_entry_name,
            "out": ""
        }

        # Training
        cii = copy.deepcopy(ii)

        r = ck.access(ii)
        if r['return'] > 0: ck.err(r)

        # Validating
        ii = copy.deepcopy(ii)

        ii['action'] = 'validate'

        r = ck.access(ii)
        if r['return'] > 0: ck.err(r)

        obs = r['observations']
        wrong = r['mispredictions']

        acc = float(obs - wrong) / float(obs)

        x = '  Accuracy on all data: ' + str(acc)
        s += x
        ck.out(x)

        # Record example of features to demo predictions (to be integrated with compiler optimization prediction (web)services)
        d = {
            "action": "use",
            "module_uoa": "model",
            "features": ftable[123],  # features of some random benchmark
            "model_module_uoa": "model.sklearn",
            "model_name": "dtc",
            "model_file": "tmp-model-sklearn-dtc",
            "model_data_uoa": ck_model_entry_name
        }

        r = ck.save_json_to_file({
            'json_file':
            'process_model_using_decision_trees_and_record_to_ck_use.' + gx +
            '.json',
            'dict':
            d
        })
        if r['return'] > 0: return r

    return {'return': 0}
Exemple #21
0
def main(i):

    # Load common table file (for all models)
    ddd = {}
    r = ck.load_json_file({'json_file': 'save_all_model_data_tmp.json'})
    if r['return'] == 0:
        ddd = r['dict']

    # Searching for features
    for g in gcc:
        ck.out(
            '********************************************************************************'
        )
        ck.out('Reducing model complexity for ' + g)
        ck.out('')

        if g not in ddd: ddd[g] = {}

        gx = g.replace(' ', '_')

        r = ck.load_json_file(
            {'json_file': 'prepare_train_data_tmp.' + gx + '.json'})
        if r['return'] > 0: ck.err(r)

        d = r['dict']

        ftable = d['ftable']
        ctable = d['ctable']

        # Normalize (all features 0..1)
        ftable_range = {}
        for f in ftable:
            for k in range(0, 121):
                v = f[k]
                if k not in ftable_range:
                    ftable_range[k] = {'min': None, 'max': None}
                if ftable_range[k]['min'] == None or v < ftable_range[k]['min']:
                    ftable_range[k]['min'] = v
                if ftable_range[k]['max'] == None or v > ftable_range[k]['max']:
                    ftable_range[k]['max'] = v

        ftable_normalized = []
        for f in ftable:
            x = []
            for k in range(0, 121):
                v = 0
                if ftable_range[k]['max'] != 0:
                    v = f[k] / ftable_range[k]['max']
                x.append(v)
            ftable_normalized.append(x)

        features_mask = []
        for f in range(0, 121):
            features_mask.append(0)

        r = model({
            'ftable': ftable_normalized,
            'features_mask': features_mask,
            'ctable': ctable
        })
        if r['return'] > 0: return r

        r1 = ck.save_json_to_file({
            'json_file':
            'process_model_using_nearest_neighbour_assemble_features/prepare_reactions_model_train_ref_result.'
            + gx + '.json',
            'dict':
            r
        })
        if r1['return'] > 0: ck.err(r1)

        ref_acc = r['accuracy']  # Reference accuracy

        x = 'Reference accuracy: ' + str(ref_acc)
        s = x + '\n\n'

        ck.out('---------------------')
        ck.out(x)
        ck.out('')

        # Assembling features (adding features one by one) 0:121 - normalized features!
        for k in range(0, 121):
            features_mask[k] = 1

            r = model({
                'ftable': ftable_normalized,
                'features_mask': features_mask,
                'ctable': ctable,
                'skip_out': 'yes'
            })
            if r['return'] > 0: return r

            acc = r['accuracy']

            keep = False
            sx = ''
            if acc < ref_acc:
                keep = True
                sx = 'removed'
            elif acc == ref_acc:
                sx = 'kept'
            elif acc > ref_acc:
                ref_acc = acc
                sx = 'kept (accuracy even improved)'

            if keep:
                features_mask[k] = 0

            x = 'ft' + str(k + 1) + ') ' + str(
                acc) + ' ' + sx + ' (ref acc=' + str(ref_acc) + ')'
            ck.out(x)
            s += x + '\n'

        # Final accuracy
        r = model({
            'ftable': ftable_normalized,
            'features_mask': features_mask,
            'ctable': ctable,
            'skip_out': 'yes'
        })
        if r['return'] > 0: return r

        acc = r['accuracy']

        r1 = ck.save_json_to_file({
            'json_file':
            'process_model_using_nearest_neighbour_assemble_features/prepare_reactions_model_train_reduced_result.'
            + gx + '.json',
            'dict':
            r
        })
        if r1['return'] > 0: ck.err(r1)

        # Final result
        ck.out('')
        ck.out('Final features mask:')
        ck.out('')

        s += '\nFinal features mask:\n\n'
        s1 = ''

        for f in range(0, len(features_mask)):
            x = '  ft' + str(f + 1) + ') ' + str(features_mask[f])
            ck.out(x)
            s += x + '\n'

            if features_mask[f] == 1:
                if s1 != '': s1 += ','
                s1 += 'ft' + str(f + 1)

        s += '\nFinal accuracy: ' + str(acc) + '\n'

        r = ck.save_text_file({
            'text_file':
            'process_model_using_nearest_neighbour_assemble_features/log.' +
            gx + '.txt',
            'string':
            s
        })
        if r['return'] > 0: return r

        r = ck.save_text_file({
            'text_file':
            'process_model_using_nearest_neighbour_assemble_features/influential_features.'
            + gx + '.txt',
            'string':
            s1
        })
        if r['return'] > 0: return r

        ddd[g]['milepost_reduce_complexity1_normalized_ft1_ft121'] = acc

    # Save common data
    r = ck.save_json_to_file({
        'json_file': 'save_all_model_data_tmp.json',
        'dict': ddd
    })
    if r['return'] > 0: return r

    return {'return': 0}
    duoa = q['data_uoa']

    meta = q['meta']

    cmds = meta['run_cmds']

    for cmd in cmds:

        # 7e149c8504752933 - gcc 4.9.2
        # 4f11abfefd3cc031 - gcc 7.1.0
        for xgcc in ['gcc4', 'gcc7']:

            if xgcc == 'gcc4': gcc = '7e149c8504752933'
            elif xgcc == 'gcc7': gcc = '4f11abfefd3cc031'

            ck.out('-------------------')
            ck.out('Program   ' + str(iq) + ' of ' + str(im))
            ck.out('Program:  ' + duoa)
            ck.out('CMD:      ' + cmd)
            ck.out('Compiler: ' + gcc)

            ii = {
                'action': 'autotune',
                'module_uoa': 'program',
                'data_uoa': duoa,
                'iterations': 1,
                'repetitions': 2,
                'scenario': '9d88674c45b94971',
                'compiler_env_uoa': gcc,
                'seed': 12345,
                'solution_module_uoa': '8289e0cf24346aa7',
def run(i):
    # Get path1
    r=ck.access({'action':'load',
                 'module_uoa':'experiment',
                 'data_uoa':euoa1})
    if r['return']>0: return r
    p1=r['path']

    # Get path1
    r=ck.access({'action':'load',
                 'module_uoa':'experiment',
                 'data_uoa':euoa2})
    if r['return']>0: return r
    p2=r['path']

    # Going through points in path 2
    dirList1=os.listdir(p1)
    dirList2=os.listdir(p2)

    for fn2 in dirList2:
        if fn2.endswith('.features.json'):
           ck.out('Loading point '+fn2+' ...')

           px2=os.path.join(p2, fn2)

           r=ck.load_json_file({'json_file':px2})
           if r['return']>0: return r

           df2=r['dict'].get('features',{})

           # Searching in p1
           found=False
           for fn1 in dirList1:
               if fn1.endswith('.features.json'):
                  px1=os.path.join(p1, fn1)

                  r=ck.load_json_file({'json_file':px1})
                  if r['return']>0: return r

                  df1=r['dict'].get('features',{})
     
                  rx=ck.compare_dicts({'dict1':df1, 'dict2':df2})
                  if rx['return']>0: return rx

                  equal=rx['equal']
                  if equal=='yes':
                     found=True
                     break

           if found:
              ck.out('  Found!')
           else:
              # Removing point
              ck.out('    Removing point ...')

              fn=fn2[:-14]

              for fn0 in dirList2:
                  if fn0.startswith(fn):
                     p0=os.path.join(p2,fn0)
                     os.remove(p0)

    return {'return':0}
def main(i):

    cur_dir = os.getcwd()
    fas = os.path.join(cur_dir, aggregated_stats)

    # Get some info about current platform
    ii = {'action': 'detect', 'module_uoa': 'platform', 'out': 'con'}

    r = ck.access(ii)
    if r['return'] > 0: return r

    hos = r['host_os_uid']
    hosx = r['host_os_uoa']
    hosd = r['host_os_dict']

    tos = r['os_uid']
    tosx = r['os_uoa']
    tosd = r['os_dict']

    cpu_name = r['features']['cpu']['name']
    plat_name = r['features']['platform']['name']

    #############################################################
    ck.out(line)
    ck.out('CPU name: ' + cpu_name)
    ck.out('Plat name: ' + plat_name)

    #############################################################
    ck.out(line)
    ck.out('Loading aggregated stats ...')

    aa = []
    if os.path.isfile(fas):
        r = ck.load_json_file({'json_file': fas})
        if r['return'] > 0: return r
        ax = r['dict']

        if 'all' not in ax: ax['all'] = []
        aa = ax['all']

    #############################################################
    ck.out(line)
    ck.out('Finding entry related to this platform ...')

    found = False
    for a in aa:
        if a.get('cpu_name', '') == cpu_name and a.get('plat_name',
                                                       '') == plat_name:
            found = True

    if not found:
        a = {'cpu_name': cpu_name, 'plat_name': plat_name}
        aa.append(a)

    if 'data' not in a: a['data'] = {}
    data = a.get('data', {})

    # Init pipeline
    r = ck.access({
        'action': 'pipeline',
        'module_uoa': 'program',
        'data_uoa': 'shared-matmul-c2',
        'cpu_freq': 'max',
        'gpu_freq': 'max',
        'speed': 'yes',
        'compiler_vars': {
            'USE_BLOCKED_MATMUL': 'YES'
        },
        'no_state_check': 'yes',
        'prepare': 'yes',
        'out': 'con'
    })
    if r['return'] > 0: return r

    ready = r['ready']
    if ready != 'yes':
        return {'return': 1, 'error': 'can\'t init pipeline'}

    pipeline = r

    # Compile program ones
    tpipeline = copy.deepcopy(pipeline)
    r = ck.access({
        'action': 'autotune',
        'module_uoa': 'pipeline',
        'pipeline': pipeline,
        'pipeline_update': {
            'env': {
                'CT_MATRIX_DIMENSION': 32,
                'CT_BLOCK_SIZE': 32
            }
        },
        'iterations': 1,
        'repetitions': 1,
        'out': 'con'
    })
    if r['return'] > 0: return r
    lsa = r.get('last_stat_analysis', {}).get('dict_flat', {})
    time_min = lsa.get('##characteristics#run#execution_time#min', None)
    if time_min == None or time_min == 0.0:
        return {'return': 1, 'error': 'failed to run default pipeline'}

    # data is per N size
    for nn in range(-1, 2):
        for n in range(3, 32):
            N = (2**n) + nn

            SN = str(N)

            if SN not in data: data[SN] = {}
            xdata = data.get(SN, {})

            tmin = xdata.get('tmin', None)
            tmax = xdata.get('tmax', None)
            gmin = xdata.get('gmin', None)
            gmax = xdata.get('gmax', None)
            best_tile = xdata.get('best_tile', None)

            for opts in range(0, 16):
                # Choose if random BS or power of two or power of two -+1
                if opts == 0:
                    BS = 1
                elif opts == 1:
                    BS = N
                else:
                    BS = 2**(opts - 1)
                    if BS > N: continue

                ck.out('Matrix size: ' + str(N))
                ck.out('Tile size:   ' + str(BS))

                # Run pipeline
                tpipeline = copy.deepcopy(pipeline)
                r = ck.access({
                    'action': 'autotune',
                    'module_uoa': 'pipeline',
                    'pipeline': pipeline,
                    'pipeline_update': {
                        'no_compile': 'yes',
                        'env': {
                            'CT_MATRIX_DIMENSION': N,
                            'CT_BLOCK_SIZE': BS
                        }
                    },
                    'iterations': 1,
                    'repetitions': 3,
                    'out': 'con'
                })
                if r['return'] > 0: return r

                lsa = r.get('last_stat_analysis', {}).get('dict_flat', {})
                tmin = lsa.get('##characteristics#run#execution_time#min',
                               None)

                changed = False
                if tmin != None:
                    ops = 2 * (N * N * N)
                    gflops = 1.0e-9 * ops / tmin
                    SBS = str(BS)

                    xdata[SBS] = {'tmin': tmin, 'gflops': gflops}

                    ck.out(line)
                    ck.out('Saving aggregated stats ...')

                    r = ck.save_json_to_file({
                        'json_file': fas,
                        'dict': {
                            'all': aa
                        },
                        'sort_keys': 'yes'
                    })
                    if r['return'] > 0: return r

    #############################################################
    ck.out(line)
    ck.out('Saving aggregated stats ...')

    r = ck.save_json_to_file({
        'json_file': fas,
        'dict': {
            'all': aa
        },
        'sort_keys': 'yes'
    })
    if r['return'] > 0: return r

    return {'return': 0}
def do(i):

    # List performance entries
    r = ck.access({
        'action': 'search',
        'module_uoa': 'experiment',
        'repo_uoa': 'local',
        #                 'repo_uoa':'ck-request-asplos18-results'})
        'data_uoa': '*ck-request-asplos18-caffe-intel-performance-*'
    })
    if r['return'] > 0: return r
    lst = r['lst']

    for q in lst:
        duid = q['data_uid']
        duoa = q['data_uoa']
        ruid = q['repo_uid']
        path = q['path']

        ck.out(duoa)

        # Load entry
        r = ck.access({
            'action': 'load',
            'module_uoa': 'experiment',
            'data_uoa': duid,
            'repo_uoa': ruid
        })
        if r['return'] > 0: return r

        dd = r['dict']
        ruid = r['repo_uid']
        apath = r['path']

        # Check extra info
        if 'inception-v3' in duoa:
            model = 'inception-v3'
            model_species = '1b339ddb13408f8f'
            model_size = 95533753
        elif 'resnet50' in duoa:
            model = 'resnet50'
            model_species = 'd777f6335496db61'
            model_size = 102462397

        if model == '':
            return {'return': 1, 'error': 'model is not recognized'}

        prec = ''
        if '-fp32' in duoa:
            prec = 'fp32'
        elif '-int8' in duoa:
            prec = 'int8'
            model_size = model_size / 4  # Guess

        if prec == '':
            return {'return': 1, 'error': 'model precision is not recognized'}

        # Updating meta if needed
        dd['meta'][
            'scenario_module_uoa'] = 'a555738be4b65860'  # module:request.asplos18

        dd['meta']['model_species'] = model_species  # model.species:mobilenets

        dd['meta'][
            'dataset_species'] = 'ImageNet'  # dataset species (free format)
        dd['meta']['dataset_size'] = 50000  # number of images ...

        dd['meta'][
            'platform_species'] = 'server'  # embedded vs server (maybe other classifications such as edge)

        # Unified full name for some deps
        ds = dd['meta']['deps_summary']

        x = ds['caffemodel']
        r = ck.access({
            'action': 'make_deps_full_name',
            'module_uoa': 'request.asplos18',
            'deps': x
        })
        if r['return'] > 0: return r
        dd['meta']['model_design_name'] = r['full_name']

        x = ds['lib-caffe']
        r = ck.access({
            'action': 'make_deps_full_name',
            'module_uoa': 'request.asplos18',
            'deps': x
        })
        if r['return'] > 0: return r
        dd['meta']['library_name'] = r['full_name']

        x = x['deps']['compiler']
        r = ck.access({
            'action': 'make_deps_full_name',
            'module_uoa': 'request.asplos18',
            'deps': x
        })
        if r['return'] > 0: return r
        dd['meta']['compiler_name'] = r['full_name']

        used_gpgpu = False
        if ds.get('lib-caffe', {}).get('deps', {}).get(
                'compiler-cuda', {}).get('data_name', '') != '':
            used_gpgpu = True

        if used_gpgpu:
            # GPU used
            dd['meta']['cpu_name'] = ''
            dd['meta']['cpu_vendor'] = ''
            dd['meta']['platform_peak_power'] = 180  #Watts
            dd['meta']['platform_price'] = 700  # $
            dd['meta']['platform_price_date'] = '20180101'  # date

        else:
            dd['meta']['gpgpu_name'] = ''
            dd['meta']['gpgpu_vendor'] = ''
            dd['meta']['platform_peak_power'] = 105  #Watts
            dd['meta']['platform_price'] = 1166  # $
            dd['meta']['platform_price_date'] = '20141212'  # date

        dd['meta']['artifact'] = 'e7cc77d72f13441e'  # artifact description

        dd['meta']['model_precision'] = prec

        dd['meta']['processed'] = 'yes'

        # Updating entry
        r = ck.access({
            'action': 'update',
            'module_uoa': 'experiment',
            'data_uoa': duid,
            'repo_uoa': ruid,
            'dict': dd,
            'substitute': 'yes',
            'ignore_update': 'yes',
            'sort_keys': 'yes'
        })
        if r['return'] > 0: return r

        # Checking points to aggregate
        os.chdir(path)
        dperf = os.listdir(path)
        for f in dperf:
            if f.endswith('.cache.json'):
                os.system('git rm -f ' + f)

            elif f.endswith('.flat.json'):
                ck.out(' * ' + f)

                # Load performance file
                p1 = os.path.join(path, f)

                r = ck.load_json_file({'json_file': p1})
                if r['return'] > 0: return r
                d = r['dict']

                d['##features#processed#min'] = 'yes'

                # Clean up keys
                d1 = {}
                for k in d:
                    v = d[k]
                    if not k.startswith('##characteristics#run#inference_latency') and \
                       not k.startswith('##characteristics#run#prediction_time_avg_s') and \
                       not k.startswith('##characteristics#run#inference_throughput') and \
                       not k.startswith('##characteristics#run#usage_cost'):
                        d1[k] = v
                d = d1

                # Unify execution time + batch size
                x = d.get(
                    '##characteristics#run#REAL_ENV_CK_CAFFE_BATCH_SIZE#min',
                    '')
                if x != None and x != '':
                    batch = int(x)
                    d['##features#batch_size#min'] = batch

                    tall = d.get('##characteristics#run#time_fw_s#all', [])

                    if batch == 1:
                        # inference latency
                        d['##features#measuring_latency#min'] = 'yes'

                        r = ck.access({
                            'action': 'stat_analysis',
                            'module_uoa': 'experiment',
                            'dict': d,
                            'dict1': {
                                '##characteristics#run#inference_latency': tall
                            }
                        })
                        if r['return'] > 0: return r

                    tnew = []
                    cnew = []
                    for t in tall:
                        t1 = t / batch
                        tnew.append(t1)

                        c1 = t1 * cost / (60 * 60)
                        if c1 != 0:
                            cnew.append(c1)

                    r = ck.access({
                        'action': 'stat_analysis',
                        'module_uoa': 'experiment',
                        'dict': d,
                        'dict1': {
                            '##characteristics#run#prediction_time_avg_s': tnew
                        }
                    })
                    if r['return'] > 0: return r

                    if len(cnew) > 0:
                        r = ck.access({
                            'action': 'stat_analysis',
                            'module_uoa': 'experiment',
                            'dict': d,
                            'dict1': {
                                '##characteristics#run#usage_cost': cnew
                            }
                        })
                        if r['return'] > 0: return r

                        d['##characteristics#run#usage_cost_per_hour#min'] = cost
                        d['##characteristics#run#usage_cost_date'] = '20180403'

                    # Throughput for all batches
                    if len(tnew) > 0:
                        tall = tnew  # from previous calculation

                        tnew = []
                        for t in tall:
                            t1 = 1 / t
                            tnew.append(t1)

                        r = ck.access({
                            'action': 'stat_analysis',
                            'module_uoa': 'experiment',
                            'dict': d,
                            'dict1': {
                                '##characteristics#run#inference_throughput':
                                tnew
                            }
                        })
                        if r['return'] > 0: return r

                d['##features#model_size#min'] = model_size

                if not used_gpgpu:
                    d['##features#cpu_freq#min'] = 2000
                    d['##features#freq#min'] = d['##features#cpu_freq#min']
                else:
                    d['##features#gpu_freq#min'] = 1600
                    d['##features#freq#min'] = d['##features#gpu_freq#min']

                # Save updated dict
                r = ck.save_json_to_file({
                    'json_file': p1,
                    'dict': d,
                    'sort_keys': 'yes'
                })
                if r['return'] > 0: return r

    return {'return': 0}
def do(i, arg):
    # Detect basic platform info.
    ii={'action':'detect',
        'module_uoa':'platform',
        'out':'out'}
    r=ck.access(ii)
    if r['return']>0: return r

    # Host and target OS params.
    hos=r['host_os_uoa']
    hosd=r['host_os_dict']

    tos=r['os_uoa']
    tosd=r['os_dict']
    tdid=r['device_id']
    # Program and command.
    program='caffe'
    cmd_key='test_gpu'

    # Load Caffe program meta and desc to check deps.
    ii={'action':'load',
        'module_uoa':'program',
        'data_uoa':program}
    rx=ck.access(ii)
    if rx['return']>0: return rx
    mm=rx['dict']

    # Get compile-time and run-time deps.
    cdeps=mm.get('compile_deps',{})
    rdeps=mm.get('run_deps',{})
    # Merge rdeps with cdeps for setting up the pipeline (which uses
    # common deps), but tag them as "for_run_time".
    for k in rdeps:
        cdeps[k]=rdeps[k]
        cdeps[k]['for_run_time']='yes'
    # Caffe libs.
    depl=copy.deepcopy(cdeps['lib-caffe'])
    if (arg.tos is not None) and (arg.did is not None):
        tos=arg.tos
        tdid=arg.did

    ii={'action':'resolve',
        'module_uoa':'env',
        'host_os':hos,
        'target_os':tos,
        'device_id':tdid,
        'out':'con',
        'deps':{'lib-caffe':copy.deepcopy(depl)}
    }
    r=ck.access(ii)
    if r['return']>0: return r

    udepl=r['deps']['lib-caffe'].get('choices',[]) # All UOAs of env for Caffe libs.
    if len(udepl)==0:
        return {'return':1, 'error':'no installed Caffe libs'}

    # Caffe models.
    depm=copy.deepcopy(cdeps['caffemodel'])

    ii={'action':'resolve',
        'module_uoa':'env',
        'host_os':hos,
        'target_os':tos,
        'device_id':tdid,
        'out':'con',
        'deps':{'caffemodel':copy.deepcopy(depm)}
    }
    r=ck.access(ii)
    if r['return']>0: return r

    udepm=r['deps']['caffemodel'].get('choices',[]) # All UOAs of env for Caffe models.
    if len(udepm)==0:
        return {'return':1, 'error':'no installed Caffe models'}

    # Prepare pipeline.
    cdeps['lib-caffe']['uoa']=udepl[0]
    cdeps['caffemodel']['uoa']=udepm[0]

    ii={'action':'pipeline',
        'prepare':'yes',
        'dependencies':cdeps,

        'module_uoa':'program',
        'data_uoa':program,
        'cmd_key':cmd_key,

        'target_os':tos,
        'device_id':tdid,

        'no_state_check':'yes',
        'no_compiler_description':'yes',
        'skip_calibration':'yes',

        'env':{
          'CK_CAFFE_SKIP_BACKWARD':1,
          'OPENBLAS_NUM_THREADS':4
        },

        'cpu_freq':'max',
        'gpu_freq':'max',

        'flags':'-O3',
        'speed':'no',
        'energy':'no',

        'skip_print_timers':'yes',
        'out':'con'
    }

    r=ck.access(ii)
    if r['return']>0: return r

    fail=r.get('fail','')
    if fail=='yes':
        return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}

    ready=r.get('ready','')
    if ready!='yes':
        return {'return':11, 'error':'pipeline not ready'}

    state=r['state']
    tmp_dir=state['tmp_dir']

    # Remember resolved deps for this benchmarking session.
    xcdeps=r.get('dependencies',{})

    # Clean pipeline.
    if 'ready' in r: del(r['ready'])
    if 'fail' in r: del(r['fail'])
    if 'return' in r: del(r['return'])

    pipeline=copy.deepcopy(r)

    # For each Caffe lib.*******************************************************
    for lib_uoa in udepl:
        # Load Caffe lib.
        ii={'action':'load',
            'module_uoa':'env',
            'data_uoa':lib_uoa}
        r=ck.access(ii)
        if r['return']>0: return r
        # Get the tags from e.g. 'BVLC Caffe framework (libdnn,viennacl)'
        lib_name=r['data_name']
        lib_tags=re.match('BVLC Caffe framework \((?P<tags>.*)\)', lib_name)
        lib_tags=lib_tags.group('tags').replace(' ', '').replace(',', '-')
        # Skip some libs with "in [..]" or "not in [..]".

#        if lib_tags in ['opencl-clblast-tune']: continue

        if lib_tags not in ['opencl-libdnn-clblast-tune']: continue

#        if lib_tags not in ['opencl-clblast-tune', 'opencl-clblast']: continue
#        if lib_tags not in ['opencl-libdnn-clblast-tune', 'opencl-libdnn-clblast']: continue
#        if lib_tags not in ['opencl-libdnn-viennacl', 'opencl-viennacl']: continue
        
        skip_compile='no'

        cmd_key='time_gpu_fp16'

        # For each Caffe model.*************************************************
        for model_uoa in udepm:
            # Load Caffe model.
            ii={'action':'load',
                'module_uoa':'env',
                'data_uoa':model_uoa}
            r=ck.access(ii)
            if r['return']>0: return r
            # Get the tags from e.g. 'Caffe model (net and weights) (deepscale, squeezenet, 1.1)'
            model_name=r['data_name']
            model_tags = re.match('Caffe model \(net and weights\) \((?P<tags>.*)\)', model_name)
            model_tags = model_tags.group('tags').replace(' ', '').replace(',', '-')
            # Skip some models with "in [..]" or "not in [..]".
            if model_tags in []: continue

            record_repo='local'
            record_uoa=model_tags+'-'+lib_tags

            # Prepare pipeline.
            ck.out('---------------------------------------------------------------------------------------')
            ck.out('%s - %s' % (lib_name, lib_uoa))
            ck.out('%s - %s' % (model_name, model_uoa))
            ck.out('Experiment - %s:%s' % (record_repo, record_uoa))

            # Prepare autotuning input.
            cpipeline=copy.deepcopy(pipeline)

            # Reset deps and change UOA.
            new_deps={'lib-caffe':copy.deepcopy(depl),
                      'caffemodel':copy.deepcopy(depm)}

            new_deps['lib-caffe']['uoa']=lib_uoa
            new_deps['caffemodel']['uoa']=model_uoa

            jj={'action':'resolve',
                'module_uoa':'env',
                'host_os':hos,
                'target_os':tos,
                'device_id':tdid,
                'deps':new_deps}
            r=ck.access(jj)
            if r['return']>0: return r

            cpipeline['dependencies'].update(new_deps)

            cpipeline['no_clean']=skip_compile
            cpipeline['no_compile']=skip_compile

            cpipeline['cmd_key']=cmd_key

            ii={'action':'autotune',

                'module_uoa':'pipeline',
                'data_uoa':'program',

                'choices_order':[
                    [
                        '##choices#env#CK_CAFFE_BATCH_SIZE'
                    ]
                ],
                'choices_selection':[
                    {'type':'loop', 'start':bs['start'], 'stop':bs['stop'], 'step':bs['step'], 'default':bs['default']}
                ],

                'features_keys_to_process':['##choices#*'],

                'iterations':-1,
                'repetitions':num_repetitions,

                'record':'yes',
                'record_failed':'yes',
                'record_params':{
                    'search_point_by_features':'yes'
                },
                'record_repo':record_repo,
                'record_uoa':record_uoa,

                'tags':[ 'explore-batch-size-libs-models', cmd_key, model_tags, lib_tags, platform_tags ],

                'pipeline':cpipeline,
                'out':'con'}

            r=ck.access(ii)
            if r['return']>0: return r

            fail=r.get('fail','')
            if fail=='yes':
                return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}

            skip_compile='yes'

    return {'return':0}
Exemple #27
0
def publish(i):
    """
    Input:  {
              cid [str] - CK CID of format (repo UOA:)module UOA:data UOA
                          (can use wildcards)
              (tags) [str] - search multiple CK components by these tags separated by comma
              (version) [str] - assign version
            }

    Output: {
              return  [int]    - return code = 0 if success or >0 if error
              (error) [str]    - error string if return>0 
            }
    """

    # Get current directory (since will be changing it to get info about Git repo)
    cur_dir = os.getcwd()

    # Get current configuration
    r = config.load({})
    if r['return'] > 0: return r
    cfg = r['dict']

    # Check commands
    # Username ##########################################################
    username = cfg.get('username', '')
    if i.get('username') != None: username = i['username']

    if username == '' or username == None:
        return {'return': 1, 'error': 'Username is not defined'}

    cfg['username'] = username

    # API key ###########################################################
    api_key = cfg.get('api_key', '')

    if i.get('api_key') != None: api_key = i['api_key']

    if api_key == '' or api_key == None:
        return {'return': 1, 'error': 'API key is not defined'}

    cfg['api_key'] = api_key

    # CID ###########################################################
    cid = i.get('cid')

    if cid == '' or cid == None:
        return {'return': 1, 'error': 'CK entry (CID) is not defined'}

    tags = i.get('tags', '')

    # Check if no module and use "cr-solution" by default
    if cid.find(':') < 0:
        cid = 'cr-solution:' + cid

    # Version ###########################################################
    version = i.get('version')
    if version == '' or version == None:
        return {'return': 1, 'error': 'Version is not defined'}

    # Extra info about authors
    author = i.get('author', '')
    if author == None: author = ''

    author_codereef_id = i.get('author_codereef_id', '')
    if author_codereef_id == None: author_codereef_id = ''

    copyright = i.get('copyright', '')
    if copyright == None: copyright = ''

    license = i.get('license', '')
    if license == None: license = ''

    source = i.get('source', '')
    if source == None: source = ''

    quiet = i.get('quiet', False)
    force = i.get('force', False)

    # List CK components
    r = ck.access({
        'action': 'search',
        'cid': cid,
        'tags': tags,
        'add_info': 'yes',
        'add_meta': 'yes',
        'common_func': 'yes'
    })
    if r['return'] > 0: return r

    lst = r['lst']
    llst = len(lst)

    if llst == 0:
        ck.out('No CK objects found')

    num = 0

    # Sort lst by modules and then data
    lst1 = sorted(lst,
                  key=lambda x: (x.get('repo_uoa', ''), x.get(
                      'module_uoa', ''), x.get('data_uoa', '')))

    for obj in lst1:
        num += 1

        # Basic info about CK object
        repo_uoa = obj['repo_uoa']
        repo_uid = obj['repo_uid']

        module_uoa = obj['module_uoa']
        module_uid = obj['module_uid']

        data_uoa = obj['data_uoa']
        data_uid = obj['data_uid']

        # Print info
        ck.out(
            str(num) + ' out of ' + str(llst) + ') ' + repo_uoa + ':' +
            module_uoa + ':' + data_uoa)

        # Check name and date
        data_name = obj.get('info', {}).get('data_name', '')
        if data_name == data_uoa: data_name = ''

        data_meta = obj['meta']
        if data_name == '':
            if data_meta.get('misc', {}).get('title', '') != '':
                data_name = data_meta['misc']['title']

        data_date = ''
        if data_meta.get('misc', {}).get('date', '') != '':
            data_date = data_meta['misc']['date']

        source2 = data_meta.get('source', '')
        if source2 == '': source2 = source

        license2 = data_meta.get('license', '')
        if license2 == '': license2 = license

        copyright2 = data_meta.get('copyright', '')
        if copyright2 == '': copyright2 = copyright

        # Specialize per specific modules
        not_digital_component = False
        extra_dict = {}
        extra_tags = {}

        if module_uoa == 'module':
            extra_dict['last_module_actions'] = []
            actions = data_meta.get('actions', {})
            for a in actions:
                extra_dict['last_module_actions'].append(a + ' ' + data_uoa)

        elif module_uoa == 'cr-lib':
            not_digital_component = True
            extra_tags = ['codereef-library']

            if 'reproduced-papers' in data_meta.get('tags', []):
                extra_tags.append('reproduced-papers')

            data_meta2 = data_meta.get('meta', {})

            if data_name == '':
                data_name = data_meta2.get('title', '')

            all_authors = data_meta2.get('authors', '')
            if all_authors != '':
                extra_dict['all_authors'] = []
                for aa in all_authors.split(','):
                    if aa != '': aa = aa.strip()
                    if aa != '':
                        extra_dict['all_authors'].append(aa)

            for k in [
                    'badge_acm_artifact_available',
                    'badge_acm_artifact_functional',
                    'badge_acm_artifact_reusable',
                    'badge_acm_results_replicated',
                    'badge_acm_results_reproduced'
            ]:
                if data_meta2.get(k, '') == 'yes':
                    extra_tags.append(k)

        elif module_uoa == 'cr-event' or module_uoa == 'repo':
            not_digital_component = True

        # Get info of the first creation
        first_creation = obj['info'].get('control', {})

        # Load info about repo
        repo_dict = {}

        if not force and repo_uoa == 'local' and module_uoa != 'repo':  # Normally skip everything from local unless we publish repos themselves
            ck.out('     SKIPPED')
            continue

        if module_uoa == 'repo':
            if not force and data_uoa == 'local':
                ck.out('     SKIPPED')
                continue

            repo_dict = obj['meta']

        elif repo_uoa != 'default' and repo_uoa != 'local':
            r = ck.access({
                'action': 'load',
                'repo_uoa': config.CK_CFG_REPO_UOA,
                'module_uoa': config.CK_CFG_MODULE_REPO_UOA,
                'data_uoa': repo_uid,
                'common_func': 'yes'
            })
            if r['return'] > 0: return r
            repo_dict = r['dict']
            if 'path' in repo_dict:
                del (repo_dict['path'])

        # Generate temp file to pack
        r = ck.gen_tmp_file({'prefix': 'cr-obj-', 'suffix': '.zip'})
        if r['return'] > 0: return r

        fn = r['file_name']

        # Pack component
        p = obj['path']

        zip_method = zipfile.ZIP_DEFLATED

        ii = {'path': p, 'all': 'yes'}

        # Prune files for cr-solution
        if module_uoa == 'cr-solution':
            ii['ignore_names'] = ['CK', 'venv']

        r = ck.list_all_files(ii)
        if r['return'] > 0: return r

        fl = r['list']

        # Write archive
        try:
            f = open(fn, 'wb')
            z = zipfile.ZipFile(f, 'w', zip_method)
            for fx in fl:
                add = True
                for k in skip_words_in_files:
                    if k in fx:
                        add = False
                        break

                if add:
                    p1 = os.path.join(p, fx)
                    z.write(p1, fx, zip_method)
            z.close()
            f.close()

        except Exception as e:
            return {
                'return': 1,
                'error': 'failed to prepare archive (' + format(e) + ')'
            }

        # Check size
        statinfo = os.stat(fn)
        pack_size = statinfo.st_size

        # Check problems with repository or components
        x = ''
        if repo_dict.get('remote', '') == 'yes':
            x += 'remote repo;'
        if repo_dict.get('private', '') == 'yes':
            x += 'private repo;'
        if repo_dict.get('url', '') == '' and repo_uoa != 'default':
            x += 'repo not shared;'
        if pack_size > config.PACK_SIZE_WARNING:
            x += 'pack size (' + str(pack_size) + ') > ' + str(
                config.PACK_SIZE_WARNING) + ';'

        skip_component = False
        if not force and x != '':
            if quiet:
                skip_component = True
            else:
                r = ck.inp({
                    'text':
                    '  This component has potential issues (' + x +
                    '). Skip processing (Y/n)? '
                })
                if r['return'] > 0: return r
                s = r['string'].strip()
                if s == '' or s == 'Y' or s == 'y':
                    skip_component = True

        if skip_component:
            ck.out('    SKIPPED (' + x + ')')

            if os.path.isfile(fn):
                os.remove(fn)

            continue

        # Convert to MIME to send over internet
        r = ck.convert_file_to_upload_string({'filename': fn})
        if r['return'] > 0: return r

        pack64 = r['file_content_base64']

        if os.path.isfile(fn):
            os.remove(fn)

        # Check workspaces
        lworkspaces = []
        workspaces = i.get('workspaces', '')
        if workspaces != None:
            lworkspaces = workspaces.strip().split(',')

        # Get extra info about repo
        os.chdir(p)

        repo_info = {
            'publish_repo_uoa': repo_uoa,
            'publish_repo_uid': repo_uid
        }

        # Get current Git URL
        r = ck.run_and_get_stdout(
            {'cmd': ['git', 'config', '--get', 'remote.origin.url']})
        if r['return'] == 0 and r['return_code'] == 0:
            x = r['stdout'].strip()
            if x != '': repo_info['remote_git_url'] = x

        # Get current Git branch
        r = ck.run_and_get_stdout(
            {'cmd': ['git', 'rev-parse', '--abbrev-ref', 'HEAD']})
        if r['return'] == 0 and r['return_code'] == 0:
            x = r['stdout'].strip()
            if x != '': repo_info['remote_git_branch'] = x

        # Get current Git checkout
        r = ck.run_and_get_stdout(
            {'cmd': ['git', 'rev-parse', '--short', 'HEAD']})
        if r['return'] == 0 and r['return_code'] == 0:
            x = r['stdout'].strip()
            if x != '': repo_info['remote_git_checkout'] = x

        repo_info['dict'] = repo_dict

        #TBD: owner, version, info about repo
        # Sending request
        r = comm.send({
            'config': cfg,
            'action': 'publish',
            'ownership': {
                'private': i.get('private', False),
                'workspaces': lworkspaces
            },
            'dict': {
                'publish_module_uoa': module_uoa,
                'publish_module_uid': module_uid,
                'publish_data_uoa': data_uoa,
                'publish_data_uid': data_uid,
                'publish_data_name': data_name,
                'publish_data_date': data_date,
                'publish_pack': pack64,
                'publish_pack_size': pack_size,
                'repo_info': repo_info,
                'first_creation': first_creation,
                'version': version,
                'author': author,
                'author_codereef_id': author_codereef_id,
                'copyright': copyright2,
                'license': license2,
                'source': source2,
                'not_digital_component': not_digital_component,
                'extra_dict': extra_dict,
                'extra_tags': extra_tags,
            }
        })
        if r['return'] > 0:
            ck.out('    WARNING: CodeReef API returned error: ' + r['error'])
        else:
            data_uid = r['data_uid']
            ck.out('    CodeReef component ID: ' + data_uid)

    os.chdir(cur_dir)

    return {'return': 0}
          dd['##characteristics#gpu_copy_is_much_better_cpu#min']=False

       if (fcpu/fgpu_only)>1.07:
          dd['##characteristics#gpu_only_is_much_better_cpu#min']=True
       else:
          dd['##characteristics#gpu_only_is_much_better_cpu#min']=False

    return {'return':0, 'changed':changed, 'dict':dd}

########################################################
ff=getattr(sys.modules[__name__], 'filter_data')

ii={'action':'filter',
    'module_uoa':'experiment',
    'out':'con',
    'filter_func_addr':ff}

r=ck.load_json_file({'json_file':'filter-add-characteristic-gpu-copy-is-much-better-cpu.py'})
if r['return']>0: 
   ck.out('Error:'+r['error'])
   exit(1)

ii.update(r['dict'])

r=ck.access(ii)
if r['return']>0: 
   ck.out('Error:'+r['error'])
   exit(1)

exit(0)
Exemple #29
0
def do(i, arg):
    # Detect basic platform info.
    ii={'action':'detect',
        'module_uoa':'platform',
        'out':'out'}
    r=ck.access(ii)
    if r['return']>0: return r

    # Host and target OS params.
    hos=r['host_os_uoa']
    hosd=r['host_os_dict']

    tos=r['os_uoa']
    tosd=r['os_dict']
    tdid=r['device_id']
    # Program and command.
    program='caffe'
    cmd_key='test_gpu'

    # Load Caffe program meta and desc to check deps.
    ii={'action':'load',
        'module_uoa':'program',
        'data_uoa':program}
    rx=ck.access(ii)
    if rx['return']>0: return rx
    mm=rx['dict']

    # Get compile-time and run-time deps.
    cdeps=mm.get('compile_deps',{})
    rdeps=mm.get('run_deps',{})
    # Merge rdeps with cdeps for setting up the pipeline (which uses
    # common deps), but tag them as "for_run_time".
    for k in rdeps:
        cdeps[k]=rdeps[k]
        cdeps[k]['for_run_time']='yes'
    # Caffe libs.
    depl=copy.deepcopy(cdeps['lib-caffe'])
    if (arg.tos is not None) and (arg.did is not None):
        tos=arg.tos
        tdid=arg.did

    ii={'action':'resolve',
        'module_uoa':'env',
        'host_os':hos,
        'target_os':tos,
        'device_id':tdid,
        'out':'con',
        'deps':{'lib-caffe':copy.deepcopy(depl)}
    }
    r=ck.access(ii)
    if r['return']>0: return r

    udepl=r['deps']['lib-caffe'].get('choices',[]) # All UOAs of env for Caffe libs.
    if len(udepl)==0:
        return {'return':1, 'error':'no installed Caffe libs'}

    # Caffe models.
    depm=copy.deepcopy(cdeps['caffemodel'])

    ii={'action':'resolve',
        'module_uoa':'env',
        'host_os':hos,
        'target_os':tos,
        'device_id':tdid,
        'out':'con',
        'deps':{'caffemodel':copy.deepcopy(depm)}
    }
    r=ck.access(ii)
    if r['return']>0: return r

    udepm=r['deps']['caffemodel'].get('choices',[]) # All UOAs of env for Caffe models.
    if len(udepm)==0:
        return {'return':1, 'error':'no installed Caffe models'}

    # ImageNet LMDBs.
    depimg=copy.deepcopy(cdeps['dataset-imagenet-lmdb'])
    ii={'action':'resolve',
        'module_uoa':'env',
        'host_os':hos,
        'target_os':tos,
        'device_id':tdid,
        'out':'con',
        'deps':{'dataset-imagenet-lmdb':copy.deepcopy(depimg)}
    }
    r=ck.access(ii)
    if r['return']>0: return r
    c = r['deps']
    udepi=r['deps']['dataset-imagenet-lmdb'].get('choices',[]) # All UOAs of env for ImageNet LMDBs.
    if len(udepi)==0:
       return {'return':1, 'error':'no installed ImageNet LMDB'}

    # Prepare pipeline.
    cdeps['lib-caffe']['uoa']=udepl[0]
    cdeps['caffemodel']['uoa']=udepm[0]
    cdeps['dataset-imagenet-lmdb']['uoa']=udepi[0]


    ii={'action':'pipeline',
        'prepare':'yes',
        'dependencies':cdeps,

        'module_uoa':'program',
        'data_uoa':program,
        'cmd_key':cmd_key,

        'target_os':tos,
        'device_id':tdid,

        'no_state_check':'yes',
        'no_compiler_description':'yes',
        'skip_calibration':'yes',

        'env':{
          'CK_CAFFE_BATCH_SIZE':2,
          'CK_CAFFE_ITERATIONS':1,
          'OPENBLAS_NUM_THREADS':4
        },

        'cpu_freq':'max',
        'gpu_freq':'max',

        'flags':'-O3',
        'speed':'no',
        'energy':'no',

        'skip_print_timers':'yes',
        'out':'con'
    }

    r=ck.access(ii)
    if r['return']>0: return r

    fail=r.get('fail','')
    if fail=='yes':
        return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}

    ready=r.get('ready','')
    if ready!='yes':
        return {'return':11, 'error':'pipeline not ready'}

    state=r['state']
    tmp_dir=state['tmp_dir']

    # Remember resolved deps for this benchmarking session.
    xcdeps=r.get('dependencies',{})

    # Clean pipeline.
    if 'ready' in r: del(r['ready'])
    if 'fail' in r: del(r['fail'])
    if 'return' in r: del(r['return'])

    pipeline=copy.deepcopy(r)

    # For each Caffe lib.*******************************************************
    for lib_uoa in udepl:
        # Load Caffe lib.
        ii={'action':'load',
            'module_uoa':'env',
            'data_uoa':lib_uoa}
        r=ck.access(ii)
        if r['return']>0: return r
        # Get the tags from e.g. 'BVLC Caffe framework (libdnn,viennacl)'
        lib_name=r['data_name']
        lib_tags=re.match('BVLC Caffe framework \((?P<tags>.*)\)', lib_name)
        lib_tags=lib_tags.group('tags').replace(' ', '').replace(',', '-')
        # Skip some libs with "in [..]" or "not in [..]".
        if lib_tags not in ['cudnn']: continue

        skip_compile='no'

        # For each Caffe model.*************************************************
        for model_uoa in udepm:
            # Load Caffe model.
            ii={'action':'load',
                'module_uoa':'env',
                'data_uoa':model_uoa}
            r=ck.access(ii)
            if r['return']>0: return r
            # Get the tags from e.g. 'Caffe model (net and weights) (deepscale, squeezenet, 1.1)'
            model_name=r['data_name']
            model_tags=re.match('Caffe model \(net and weights\) \((?P<tags>.*)\)', model_name)
            model_tags=model_tags.group('tags').replace(' ', '').replace(',', '-')
            # Skip some models with "in [..]" or "not in [..]".
            if model_tags not in ['bvlc-alexnet', 'bvlc-googlenet', 'deepscale-squeezenet-1.1']: continue

            # For each ImageNet LMDB.*******************************************
            for img_uoa in udepi:
                ii={'action':'load',
                'module_uoa':'env',
                'data_uoa':img_uoa}
                r=ck.access(ii)
                if r['return']>0: return r
                img_name=r['data_name']
                img_tags=r.get('dict',{}).get('tags',[])
                if model_tags in img_tags: break
            # Skip models having no compatible LMDBs.
            if model_tags not in img_tags: continue
            resize_tags = [ tag for tag in img_tags if tag.find('resize-')!=-1 ]
            resize_tag = resize_tags[0] if resize_tags else 'resize-unknown'
            img_tags = 'imagenet-val-lmdb-'+resize_tag

            record_repo='local'
            record_uoa=model_tags+'-'+lib_tags+'-'+img_tags
            # Prepare pipeline.
            ck.out('---------------------------------------------------------------------------------------')
            ck.out('%s - %s' % (lib_name, lib_uoa))
            ck.out('%s - %s' % (model_name, model_uoa))
            ck.out('%s - %s' % (img_name, img_uoa))
            ck.out('Experiment - %s:%s' % (record_repo, record_uoa))

            # Prepare autotuning input.
            cpipeline=copy.deepcopy(pipeline)

            # Reset deps and change UOA.
            new_deps={'lib-caffe':copy.deepcopy(depl),
                      'caffemodel':copy.deepcopy(depm),
                      'dataset-imagenet-lmdb':copy.deepcopy(depimg)}

### maybe here    exit(1)
            new_deps['lib-caffe']['uoa']=lib_uoa
            new_deps['caffemodel']['uoa']=model_uoa
            new_deps['dataset-imagenet-lmdb']['uoa']=img_uoa
            jj={'action':'resolve',
                'module_uoa':'env',
                'host_os':hos,
                'target_os':tos,
                'device_id':tdid,
                'deps':new_deps}
            r=ck.access(jj)
            if r['return']>0: return r

            cpipeline['dependencies'].update(new_deps)

            cpipeline['no_clean']=skip_compile
            cpipeline['no_compile']=skip_compile

            cpipeline['cmd_key']=cmd_key

            ii={'action':'autotune',

                'module_uoa':'pipeline',
                'data_uoa':'program',

                'iterations':1,
                'repetitions':1,

                'record':'yes',
                'record_failed':'yes',
                'record_params':{
                    'search_point_by_features':'yes'
                },
                'record_repo':record_repo,
                'record_uoa':record_uoa,

                'tags':['test-accuracy', cmd_key, model_tags, lib_tags, img_tags, platform_tags ],

                'pipeline':cpipeline,
                'out':'con'}

            r=ck.access(ii)
            if r['return']>0: return r

            fail=r.get('fail','')
            if fail=='yes':
                return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}

            skip_compile='yes'

    return {'return':0}
Exemple #30
0
def do(i, arg):
    if arg.fp is not None:
        fin = arg.fp
        if (os.path.isfile(fin)):
            print("File loading %s " % (fin))
            #LOAD FILE and TRIPLES
        else:
            print("File %s not found " % (fin))

    #Load Models Matrixes
    alex_net = loadMatrixFromJson(alex_net_json)
    google_net = loadMatrixFromJson(google_net_json)
    squeeze11_net = loadMatrixFromJson(squeeze11_net_json)

    dataset = []
    dataset.append({
        'Model_name': 'AlexNet',
        'Batch_size': 1,
        'matrix': alex_net
    })
    dataset.append({
        'Model_name': 'GoogleNet',
        'Batch_size': 1,
        'matrix': google_net
    })
    dataset.append({
        'Model_name': 'SqueezeNet1.1',
        'Batch_size': 1,
        'matrix': squeeze11_net
    })

    if VERBOSE or DEBUG:
        print('[Experiment] %s' % title)
        print('[Preparing pipeline] Clock resolution: %d' % clock_resolution)
        #print('[Preparing pipeline] Matrix sizes: m=%s, k=%s, n=%s: ' % (size_m, size_k, size_n))
        print('[Preparing pipeline] Precision: %d' % precision)
        print('[Preparing pipeline] Run for configuration: %d' % run)
        print('[Preparing pipeline] More parms... ')
    #ntrip = len(size_m)
    #print ('[Experiment] Number of triple(s) %s' % (ntrip))
    #size_tag = ''
    #for tp in range (0, ntrip):
    #   if (tp == ntrip-1):
    #       size_tag += str((int(size_m[tp])*int(size_n[tp])*int(size_k[tp])))
    #   else:
    #       size_tag += str((int(size_m[tp])*int(size_n[tp])*int(size_k[tp])))+','
    # Detect basic platform info.
    ii = {'action': 'detect', 'module_uoa': 'platform', 'con': 'con'}
    r = ck.access(ii)
    if DEBUG: print("%s %s" % (DEBUG_STR, r))
    if r['return'] > 0: return r

    # Host and target OS params.
    hos = r['host_os_uoa']
    hosd = r['host_os_dict']
    tos = r['os_uoa']
    tosd = r['os_dict']
    tdid = r['device_id']

    if DEBUG: print("%s %s %s" % (DEBUG_STR, hos, hosd))
    if DEBUG: print("%s %s %s %s" % (DEBUG_STR, tos, tosd, tdid))

    # Load CLBLAST program meta and desc to check deps.
    ii = {
        'action': 'load',
        'module_uoa': 'program',
        'data_uoa': 'clblast-tune'
    }
    rx = ck.access(ii)
    if DEBUG: print("%s %s " % (DEBUG_STR, rx))
    if rx['return'] > 0: return rx
    meta = rx['dict']

    # Get compile-time and run-time deps.
    cdeps = meta.get('compile_deps', {})
    rdeps = meta.get('run_deps', {})

    # Merge rdeps with cdeps for setting up the pipeline (which uses
    # common deps), but tag them as "for_run_time".
    for k in rdeps:
        cdeps[k] = rdeps[k]
        cdeps[k]['for_run_time'] = 'yes'
    # CLblast libs.
    depl = copy.deepcopy(cdeps['lib-clblast'])
    #ON LOCAL MACHINE
    if ((arg.tos is not None) and (arg.did is not None)):
        tos = arg.tos
        tdid = arg.did

    ii = {
        'action': 'resolve',
        'module_uoa': 'env',
        'host_os': hos,
        'target_os': tos,
        'device_id': tdid,
        'out': 'con',
        'deps': {
            'lib-clblast': copy.deepcopy(depl)
        }
    }
    r = ck.access(ii)
    if r['return'] > 0: return r
    udepl = r['deps']['lib-clblast'].get('choices', [])
    if len(udepl) == 0:
        return {'return': 1, 'error': 'no installed CLBlast libs'}

    cdeps['lib-clblast']['uoa'] = udepl[0]

    #prepare pipeline
    ii = {
        'action': 'pipeline',
        'module_uoa': 'program',
        'data_uoa': 'clblast-tune',
        'prepare': 'yes',
        'dependencies': cdeps,
        'no_compiler_description': 'yes',
        'cmd_key': kernel[0],
        "target_os": tos,
        "device_id": tdid,
        "out": 'con',
        "no_state_check": "yes",
        'flags': '-O3',
        'cpu_freq': 'max',
        'gpu_freq': 'max',
    }
    r = ck.access(ii)
    if r['return'] > 0: return r
    fail = r.get('fail', '')
    if fail == 'yes':
        return {
            'return': 10,
            'error': 'pipeline failed (' + r.get('fail_reason', '') + ')'
        }

    ready = r.get('ready', '')
    if ready != 'yes': return {'return': 11, 'error': 'pipeline not ready'}

    state = r['state']
    tmp_dir = state['tmp_dir']
    xcdeps = r.get('dependencies', {})
    # Clean pipeline.
    if 'ready' in r: del (r['ready'])
    if 'fail' in r: del (r['fail'])
    if 'return' in r: del (r['return'])
    pipeline = copy.deepcopy(r)

    print udepl
    # For each Clblast lib ***********************************
    for lib_uoa in udepl:
        print lib_uoa
        ii = {'action': 'load', 'module_uoa': 'env', 'data_uoa': lib_uoa}
        r = ck.access(ii)
        if r['return'] > 0: return r
        lib_name = r['data_name']
        #lib_tags=re.match('BVLC Caffe framework \((?P<tags>.*)\)', lib_name)
        #lib_tags=r['dict']['tags']
        lib_tags = r['dict']['customize']['used_package_uoa'].split('-')
        tags = ''
        skip_tags = ['lib', 'master', 'universal']
        for t in lib_tags:
            if t not in skip_tags:
                tags += t + '-'

        # Add the extra_tags (if any)
        lib_tags = r['dict']['setup']['version']
        tags += lib_tags
        # For each model in dataset
        for model in dataset:

            record_repo = 'local'
            record_uoa = 'explore-matrix-size-' + tags + '-' + model[
                'Model_name']
            ck.out(
                '---------------------------------------------------------------------------------------'
            )
            ck.out('Experiment - %s:%s' % (record_repo, record_uoa))

            cpipeline = copy.deepcopy(pipeline)
            ii = {
                'action':
                'autotune',
                'module_uoa':
                'pipeline',
                'data_uoa':
                'program',
                'choices_order': [['##env#CK_CLBLAST_MSIZE'],
                                  [
                                      '##env#CK_CLBLAST_NSIZE',
                                  ], ['##env#CK_CLBLAST_KSIZE']],
                'choices_selection': [{
                    "type": "loop-with-next",
                    "choice": model['matrix']['M'],
                    "default": "256"
                }, {
                    "type": "loop-with-next",
                    "choice": model['matrix']['N'],
                    "default": "256"
                }, {
                    "type": "loop-with-next",
                    "choice": model['matrix']['K'],
                    "default": "256"
                }],
                'features_keys_to_process': ['##choices#*'],
                'iterations':
                -1,
                'repetitions':
                3,
                'record':
                'yes',
                'record_failed':
                'yes',
                'record_params': {
                    'search_point_by_features': 'yes'
                },
                'record_repo':
                record_repo,
                'record_uoa':
                record_uoa,
                'tags': [
                    'explore-clblast-matrix-size-client', tags,
                    model['Model_name']
                ],
                'pipeline':
                cpipeline,
                'out':
                'con'
            }
            r = ck.access(ii)
            if DEBUG > 0: print("%s %s" % (DEBUG_STR, r))
            if r['return'] > 0: return r
            fail = r.get('fail', '')
            if fail == 'yes':
                return {
                    'return': 10,
                    'error':
                    'pipeline failed (' + r.get('fail_reason', '') + ')'
                }

    return {'return': 0}
Exemple #31
0
def do(i):
    # Detect basic platform info.
    ii = {'action': 'detect', 'module_uoa': 'platform', 'out': 'out'}
    r = ck.access(ii)
    if r['return'] > 0: return r

    # Host and target OS params.
    hos = r['host_os_uoa']
    hosd = r['host_os_dict']

    tos = r['os_uoa']
    tosd = r['os_dict']
    tdid = r['device_id']

    # Fix cmd key here since it may be used to get extra run-time deps.
    cmd_key = 'default'

    # Load TensorRT-time program meta and desc to check deps.
    ii = {
        'action': 'load',
        'module_uoa': 'program',
        'data_uoa': 'tensorrt-time'
    }
    rx = ck.access(ii)
    if rx['return'] > 0: return rx
    mm = rx['dict']

    # Get compile-time and run-time deps.
    cdeps = mm.get('compile_deps', {})
    rdeps = mm.get('run_deps', {})

    # Merge rdeps with cdeps for setting up the pipeline (which uses
    # common deps), but tag them as "for_run_time".
    for k in rdeps:
        cdeps[k] = rdeps[k]
        cdeps[k]['for_run_time'] = 'yes'

    # TensorRT engines.
    depl = copy.deepcopy(cdeps['lib-tensorrt'])

    ii = {
        'action': 'resolve',
        'module_uoa': 'env',
        'host_os': hos,
        'target_os': tos,
        'device_id': tdid,
        'deps': {
            'lib-tensorrt': copy.deepcopy(depl)
        }
    }
    r = ck.access(ii)
    if r['return'] > 0: return r

    udepl = r['deps']['lib-tensorrt'].get(
        'choices', [])  # All UOAs of env for TensorRT engines.
    if len(udepl) == 0:
        return {'return': 1, 'error': 'no registered TensorRT engines'}

    # Caffe models.
    depm = copy.deepcopy(rdeps['caffemodel'])

    ii = {
        'action': 'resolve',
        'module_uoa': 'env',
        'host_os': hos,
        'target_os': tos,
        'device_id': tdid,
        'deps': {
            'caffemodel': copy.deepcopy(depm)
        }
    }
    r = ck.access(ii)
    if r['return'] > 0: return r

    udepm = r['deps']['caffemodel'].get(
        'choices', [])  # All UOAs of env for Caffe models.
    if len(udepm) == 0:
        return {'return': 1, 'error': 'no registered Caffe models'}

    # Prepare pipeline.
    cdeps['lib-tensorrt']['uoa'] = udepl[0]
    cdeps['caffemodel']['uoa'] = udepm[0]

    ii = {
        'action': 'pipeline',
        'prepare': 'yes',
        'repo_uoa': 'ck-tensorrt',
        'module_uoa': 'program',
        'data_uoa': 'tensorrt-time',
        'cmd_key': cmd_key,
        'dependencies': cdeps,
        'no_compiler_description': 'yes',
        'compile_only_once': 'yes',
        'cpu_freq': 'max',
        'gpu_freq': 'max',
        'flags': '-O3',
        'speed': 'no',
        'energy': 'no',
        'no_state_check': 'yes',
        'skip_calibration': 'yes',
        'skip_print_timers': 'yes',
        'out': 'con',
    }

    r = ck.access(ii)
    if r['return'] > 0: return r

    fail = r.get('fail', '')
    if fail == 'yes':
        return {
            'return': 10,
            'error': 'pipeline failed (' + r.get('fail_reason', '') + ')'
        }

    ready = r.get('ready', '')
    if ready != 'yes':
        return {'return': 11, 'error': 'pipeline not ready'}

    state = r['state']
    tmp_dir = state['tmp_dir']

    # Remember resolved deps for this benchmarking session.
    xcdeps = r.get('dependencies', {})

    # Clean pipeline.
    if 'ready' in r: del (r['ready'])
    if 'fail' in r: del (r['fail'])
    if 'return' in r: del (r['return'])

    pipeline = copy.deepcopy(r)

    # For each TensorRT engine.
    for lib_uoa in udepl:
        # Load TensorRT engine.
        ii = {'action': 'load', 'module_uoa': 'env', 'data_uoa': lib_uoa}
        r = ck.access(ii)
        if r['return'] > 0: return r
        # Get the lib name e.g. 'tensorrt-3.0.4'.
        lib_version = r['dict']['customize']['version']
        lib_name = 'tensorrt-%s' % lib_version
        lib_tags = lib_name
        # Skip some libs with "in [..]" or "not in [..]".
        if lib_name in []: continue

        # For each Caffe model.
        for model_uoa in udepm:
            # Load Caffe model.
            ii = {'action': 'load', 'module_uoa': 'env', 'data_uoa': model_uoa}
            r = ck.access(ii)
            if r['return'] > 0: return r
            # Get the tags from e.g. 'Caffe model (net and weights) (deepscale, squeezenet, 1.1)'
            model_name = r['data_name']
            model_tags = re.match(
                'Caffe model \(net and weights\) \((?P<tags>.*)\)', model_name)
            if model_tags:
                model_tags = model_tags.group('tags').replace(' ', '').replace(
                    ',', '-')
            else:
                model_tags = ''
                for tag in r['dict']['tags']:
                    if model_tags != '': model_tags += '-'
                    model_tags += tag

            # Skip some models with "in [..]" or "not in [..]".
            if model_tags not in [
                    'bvlc-alexnet', 'bvlc-googlenet',
                    'deepscale-squeezenet-1.1'
            ]:
                continue

            record_repo = 'local'
            record_uoa = model_tags + '-' + lib_tags

            # Prepare pipeline.
            ck.out(
                '---------------------------------------------------------------------------------------'
            )
            ck.out('%s - %s' % (lib_name, lib_uoa))
            ck.out('%s - %s' % (model_name, model_uoa))
            ck.out('Experiment - %s:%s' % (record_repo, record_uoa))

            # Prepare autotuning input.
            cpipeline = copy.deepcopy(pipeline)

            # Reset deps and change UOA.
            new_deps = {
                'lib-tensorrt': copy.deepcopy(depl),
                'caffemodel': copy.deepcopy(depm)
            }

            new_deps['lib-tensorrt']['uoa'] = lib_uoa
            new_deps['caffemodel']['uoa'] = model_uoa

            jj = {
                'action': 'resolve',
                'module_uoa': 'env',
                'host_os': hos,
                'target_os': tos,
                'device_id': tdid,
                'deps': new_deps
            }
            r = ck.access(jj)
            if r['return'] > 0: return r

            cpipeline['dependencies'].update(new_deps)
            pipeline_name = '%s.json' % record_uoa

            ii = {
                'action':
                'autotune',
                'module_uoa':
                'pipeline',
                'data_uoa':
                'program',
                'choices_order': [['##choices#env#CK_TENSORRT_ENABLE_FP16'],
                                  ['##choices#env#CK_CAFFE_BATCH_SIZE']],
                'choices_selection': [{
                    'type': 'loop',
                    'start': fp['start'],
                    'stop': fp['stop'],
                    'step': fp['step'],
                    'default': fp['default']
                }, {
                    'type': 'loop',
                    'start': bs['start'],
                    'stop': bs['stop'],
                    'step': bs['step'],
                    'default': bs['default']
                }],
                'features_keys_to_process': [
                    '##choices#env#CK_TENSORRT_ENABLE_FP16',
                    '##choices#env#CK_CAFFE_BATCH_SIZE'
                ],
                'iterations':
                -1,
                'repetitions':
                num_repetitions,
                'record':
                'yes',
                'record_failed':
                'yes',
                'record_params': {
                    'search_point_by_features': 'yes'
                },
                'record_repo':
                record_repo,
                'record_uoa':
                record_uoa,
                'tags': [
                    'explore-batch-size-libs-models', platform_tags,
                    model_tags, lib_tags
                ],
                'pipeline':
                cpipeline,
                'out':
                'con'
            }

            r = ck.access(ii)
            if r['return'] > 0: return r

            fail = r.get('fail', '')
            if fail == 'yes':
                return {
                    'return': 10,
                    'error':
                    'pipeline failed (' + r.get('fail_reason', '') + ')'
                }

    return {'return': 0}
def do(i):

    top1 = {}
    top5 = {}

    # List accuracy entries
    r = ck.access({
        'action': 'search',
        'module_uoa': 'experiment',
        'data_uoa': 'ck-request-asplos18-caffe-intel-performance-*',
        'repo_uoa': 'local',
        'add_meta': 'yes'
    })
    if r['return'] > 0: return r
    lst = r['lst']

    for q in lst:
        duid = q['data_uid']
        duoa = q['data_uoa']

        path = q['path']

        if 'inception-v3' in duoa:
            model = 'inception-v3'
            model_species = '1b339ddb13408f8f'
        elif 'resnet50' in duoa:
            model = 'resnet50'
            model_species = 'd777f6335496db61'

        if model == '':
            return {'return': 1, 'error': 'model is not recognized'}

        prec = ''
        if '-fp32' in duoa:
            prec = 'fp32'
        elif '-int8' in duoa:
            prec = 'int8'

        if prec == '':
            return {'return': 1, 'error': 'model precision is not recognized'}

        ck.out('* ' + duoa + ' / ' + model + ' / ' + prec)

        # Search matching accuracy entry (if intel-request)
        x = 'ck-request-asplos18-caffe-intel-accuracy.*.' + model + '-' + prec
        r = ck.access({
            'action': 'search',
            'module_uoa': 'experiment',
            'data_uoa': x,
            'repo_uoa': 'local'
        })
        if r['return'] > 0: return r
        alst = r['lst']
        if len(alst) != 1:
            return {'return': 1, 'error': 'ambiguity when search for accuracy'}

        a = alst[0]
        apath = a['path']

        # There is only one point normally (no model tuning)
        dacc = {}
        xacc = os.listdir(apath)

        for f in xacc:
            if f.endswith('.flat.json'):
                r = ck.load_json_file({'json_file': os.path.join(apath, f)})
                if r['return'] > 0: return r

                dx = r['dict']

                # Get only accuracy keys (convert to common format)
                for k in dx:
                    if k.startswith('##characteristics#run#acc/top-'):
                        k1 = '##characteristics#run#accuracy_top' + k[30:]
                        dacc[k1] = dx[k]
                    elif k.startswith('##characteristics#run#accuracy/top-'):
                        k1 = '##characteristics#run#accuracy_top' + k[35:]
                        dacc[k1] = dx[k]

                break

        if len(dacc) == 0:
            return {
                'return': 1,
                'error': 'strange - no match for accuracy entries'
            }

        # Iterating over points to aggregate
        dperf = os.listdir(path)
        for f in dperf:
            if f.endswith('.flat.json'):
                ck.out(' * ' + f)

                # Load performance file
                p1 = os.path.join(path, f)

                r = ck.load_json_file({'json_file': p1})
                if r['return'] > 0: return r
                d = r['dict']

                # Merge accuracy
                for k in dacc:
                    d[k] = dacc[k]

                # Save updated dict
                r = ck.save_json_to_file({
                    'json_file': p1,
                    'dict': d,
                    'sort_keys': 'yes'
                })
                if r['return'] > 0: return r

    return {'return': 0}
def do(i, arg):
    fp = arg.fp
    if fp is not None:
        fromfile = os.path.isfile(fp)
        if (fromfile):
            print("Loading triples %s" %(fp))
            triples = json.loads(open(fp).read())
            del size_m[:]
            del size_n[:]
            del size_k[:]
            for i in triples:
                size_m.append(str(i.get('bSizeM'))) 
                size_n.append(str(i.get('bSizeN'))) 
                size_k.append(str(i.get('bSizeK'))) 


    if VERBOSE or DEBUG:
        print('[Experiment] %s' % title)
        print('[Preparing pipeline] Clock resolution: %d' % clock_resolution)
        print('[Preparing pipeline] Matrix sizes: m=%s, k=%s, n=%s: ' % (size_m, size_k, size_n))
        print('[Preparing pipeline] Precision: %d' % precision)
        print('[Preparing pipeline] Run for configuration: %d' % run)
        print('[Preparing pipeline] More parms... ')
    ntrip = len(size_m) 
    print ('[Experiment] Number of triple(s) %s' % (ntrip))
    size_tag = ''
    for tp in range (0, ntrip):
        if (tp == ntrip-1):
            size_tag += str((int(size_m[tp])*int(size_n[tp])*int(size_k[tp])))
        else:
            size_tag += str((int(size_m[tp])*int(size_n[tp])*int(size_k[tp])))+','
    
    inf_g_tag = 'INF_G:'
    for inf in pso_inf_g:
        size_tag += str(float(inf)) +','

    # Detect basic platform info.
    ii={'action':'detect',
        'module_uoa':'platform',
        'con':'con'}
    r=ck.access(ii)
    if DEBUG: print("%s %s" %(DEBUG_STR, r))
    if r['return']>0: return r

    # Host and target OS params.
    hos=r['host_os_uoa']
    hosd=r['host_os_dict']
    tos=r['os_uoa']
    tosd=r['os_dict']
    tdid=r['device_id']

    if DEBUG: print("%s %s %s" %(DEBUG_STR, hos, hosd))
    if DEBUG: print("%s %s %s %s" %( DEBUG_STR, tos, tosd, tdid))

    # Load CLBLAST program meta and desc to check deps.
    ii={'action':'load',
        'module_uoa':'program',
        'data_uoa':'clblast-tune'}
    rx=ck.access(ii)
    if DEBUG: print("%s %s " %(DEBUG_STR, rx))
    if rx['return']>0: return rx
    meta= rx['dict']

    # Get compile-time and run-time deps.
    cdeps=meta.get('compile_deps',{})
    rdeps=meta.get('run_deps',{})

    # Merge rdeps with cdeps for setting up the pipeline (which uses
    # common deps), but tag them as "for_run_time".
    for k in rdeps:
        cdeps[k]=rdeps[k]
        cdeps[k]['for_run_time']='yes'
    # CLblast libs.
    depl=copy.deepcopy(cdeps['lib-clblast'])
    #ON LOCAL MACHINE
    if ((arg.tos is not None) and (arg.did is not None) ):
       tos=arg.tos
       tdid=arg.did

    ii={'action':'resolve',
    'module_uoa':'env',
    'host_os':hos,
    'target_os':tos,
    'device_id':tdid,
    'out':'con',
    'deps':{'lib-clblast':copy.deepcopy(depl)}
    }
    r=ck.access(ii)
    if r['return']>0: return r
    udepl=r['deps']['lib-clblast'].get('choices',[])
    if len(udepl)==0: return {'return':1, 'error':'no installed CLBlast libs'}

    for curr_pso_inf_g in pso_inf_g:
        for curr_pso_inf_l in pso_inf_l:
            for curr_pso_inf_r in pso_inf_r:
                #prepare pipeline
                ii={'action':'pipeline',
                    'module_uoa':'program',
                    'data_uoa':'clblast-tune',
                    'prepare':'yes',
                    'dependencies': cdeps,
                    'no_compiler_description':'yes',
                    'cmd_key':kernel[0],
                    "target_os":tos,
                    "device_id":tdid,
                    "out":'con',
                    "no_state_check":"yes",
                    'flags':'-O3',
                 }
                r=ck.access(ii)
                if r['return']>0: return r
                fail=r.get('fail','')
                if fail=='yes': return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}
    
                ready=r.get('ready','')
                if ready!='yes': return {'return':11, 'error':'pipeline not ready'}
    
                state=r['state']
                tmp_dir=state['tmp_dir']
                xcdeps=r.get('dependencies',{})
                # Clean pipeline.
                if 'ready' in r: del(r['ready'])
                if 'fail' in r: del(r['fail'])
                if 'return' in r: del(r['return'])
                pipeline=copy.deepcopy(r)
    
                ck.out('PSO_INF_G : '+curr_pso_inf_g)
                curr_pso_inf_g_tag="inf_g" + curr_pso_inf_g
                ck.out('PSO_INF_L : '+curr_pso_inf_l)
                curr_pso_inf_l_tag="inf_l" + curr_pso_inf_l 
                ck.out('PSO_INF_R : '+curr_pso_inf_r)
                curr_pso_inf_r_tag="inf_r" + curr_pso_inf_r
                record_repo='local'
                record_uoa='explore-matrix-size-'+kernel[0]+'-pso-'+curr_pso_inf_g_tag + curr_pso_inf_l_tag + curr_pso_inf_r_tag
                ck.out('---------------------------------------------------------------------------------------')
                ck.out('Experiment - %s:%s' % (record_repo, record_uoa))
                local_pso_inf_g=[]
    	        local_pso_inf_g.append(curr_pso_inf_g)
                local_pso_inf_l=[]
    	        local_pso_inf_l.append(curr_pso_inf_l)
                local_pso_inf_r=[]
                local_pso_inf_r.append(curr_pso_inf_r)
                cpipeline=copy.deepcopy(pipeline)
                ii={
                    'action':'autotune',
                    'module_uoa':'pipeline',
                    'data_uoa':'program',
                    'choices_order':[
                        [
    	             '##env#CK_CLBLAST_MSIZE'
    	            ],
    	            [
    	             '##env#CK_CLBLAST_NSIZE',
    	            ],
    	            [
    	             '##env#CK_CLBLAST_KSIZE'
    	            ],
    	            [
                         '##env#CK_TUNER_NUM_OF_STRATEGIES'
    	            ],
                        [
    	             '##env#CK_SEARCH_STRATEGY'
    	            ],
                        [
                         '##env#CK_PSO_SWARM_SIZE'
                        ],
                        [
                         '##env#CK_PSO_INF_G'
                        ],
                        [
                         '##env#CK_PSO_INF_L'
                        ],
                        [
                         '##env#CK_PSO_INF_R'
                        ]
                    ],
                    'choices_selection':[
                        {"type":"loop-with-next", "choice":size_m, "default":"256"},
                        {"type":"loop-with-next", "choice":size_n, "default":"256"},
                        {"type":"loop-with-next", "choice":size_k, "default":"256"},
                        {"type":"loop", "choice":num_strategy, "default":"1"},
                        {"type":"loop", "choice":strategy, "default":"2"},
                        {"type":"loop", "choice":pso_swarm_size, "default":"4"},
    	            {"type":"loop", "choice":local_pso_inf_g, "default":"0.4"},
    	            {"type":"loop", "choice":local_pso_inf_l, "default":"0.0"},
    	            {"type":"loop", "choice":local_pso_inf_r, "default":"0.4"},
                    
                    ],
                    'features_keys_to_process':['##choices#*'],
    
    
                    'iterations':-1,
                    'repetitions':1,
                    'record':'yes',
                    'record_failed':'yes',
                    'record_params':{
                        'search_point_by_features':'yes'
                    },
                    'record_repo':record_repo,
                    'record_uoa':record_uoa,
                    'tags':['explore-clblast-matrix-size', kernel[0], model, size_tag,curr_pso_inf_g_tag, curr_pso_inf_l_tag, curr_pso_inf_r_tag],
                    'pipeline': cpipeline,
                    'out':'con'
    
                }
                r=ck.access(ii)
                if DEBUG > 0: print("%s %s" %(DEBUG_STR, r))
                if r['return']>0: return r
                fail=r.get('fail','')
                if fail=='yes':
                   return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}
    
    return  {'return':0}
Exemple #34
0
def do(i, arg):
    # Detect basic platform info.
    ii = {'action': 'detect', 'module_uoa': 'platform', 'out': 'out'}
    r = ck.access(ii)
    if r['return'] > 0: return r

    # Host and target OS params.
    hos = r['host_os_uoa']
    hosd = r['host_os_dict']

    tos = r['os_uoa']
    tosd = r['os_dict']
    tdid = r['device_id']
    # Program and command.
    program = 'caffe-time-opencl'
    cmd_key = 'default'
    tp = 'opencl'

    # Load Caffe program meta and desc to check deps.
    ii = {'action': 'load', 'module_uoa': 'program', 'data_uoa': program}
    rx = ck.access(ii)
    if rx['return'] > 0: return rx
    mm = rx['dict']

    # Get compile-time and run-time deps.
    cdeps = mm.get('compile_deps', {})
    rdeps = mm.get('run_deps', {})

    # Merge rdeps with cdeps for setting up the pipeline (which uses
    # common deps), but tag them as "for_run_time".
    for k in rdeps:
        cdeps[k] = rdeps[k]
        cdeps[k]['for_run_time'] = 'yes'

    # Caffe libs.
    depl = copy.deepcopy(cdeps['lib-caffe'])
    if (arg.tos is not None) and (arg.did is not None):
        tos = arg.tos
        tdid = arg.did

    ii = {
        'action': 'resolve',
        'module_uoa': 'env',
        'host_os': hos,
        'target_os': tos,
        'device_id': tdid,
        'out': 'con',
        'deps': {
            'lib-caffe': copy.deepcopy(depl)
        }
    }
    r = ck.access(ii)
    if r['return'] > 0: return r

    udepl = r['deps']['lib-caffe'].get('choices',
                                       [])  # All UOAs of env for Caffe libs.
    if len(udepl) == 0:
        return {'return': 1, 'error': 'no installed Caffe libs'}

    # Caffe models.
    depm = copy.deepcopy(cdeps['caffemodel'])

    ii = {
        'action': 'resolve',
        'module_uoa': 'env',
        'host_os': hos,
        'target_os': tos,
        'device_id': tdid,
        'out': 'con',
        'deps': {
            'caffemodel': copy.deepcopy(depm)
        }
    }
    r = ck.access(ii)
    if r['return'] > 0: return r

    udepm = r['deps']['caffemodel'].get(
        'choices', [])  # All UOAs of env for Caffe models.
    if len(udepm) == 0:
        return {'return': 1, 'error': 'no installed Caffe models'}

    # Prepare pipeline.
    cdeps['lib-caffe']['uoa'] = udepl[0]
    cdeps['caffemodel']['uoa'] = udepm[0]

    ii = {
        'action': 'pipeline',
        'prepare': 'yes',
        'dependencies': cdeps,
        'module_uoa': 'program',
        'data_uoa': program,
        'cmd_key': cmd_key,
        'target_os': tos,
        'device_id': tdid,
        'no_state_check': 'yes',
        'no_compiler_description': 'yes',
        'skip_calibration': 'yes',
        'env': {
            'CK_CAFFE_SKIP_BACKWARD': 1,
            'OPENBLAS_NUM_THREADS': 4
        },
        'cpu_freq': 'max',
        'gpu_freq': 'max',
        'flags': '-O3',
        'speed': 'no',
        'energy': 'no',
        'skip_print_timers': 'yes',
        'out': 'con'
    }

    r = ck.access(ii)
    if r['return'] > 0: return r

    fail = r.get('fail', '')
    if fail == 'yes':
        return {
            'return': 10,
            'error': 'pipeline failed (' + r.get('fail_reason', '') + ')'
        }

    ready = r.get('ready', '')
    if ready != 'yes':
        return {'return': 11, 'error': 'pipeline not ready'}

    state = r['state']
    tmp_dir = state['tmp_dir']

    # Remember resolved deps for this benchmarking session.
    xcdeps = r.get('dependencies', {})

    # Clean pipeline.
    if 'ready' in r: del (r['ready'])
    if 'fail' in r: del (r['fail'])
    if 'return' in r: del (r['return'])

    pipeline = copy.deepcopy(r)

    # For each Caffe lib.*******************************************************
    for lib_uoa in udepl:
        # Load Caffe lib.
        ii = {'action': 'load', 'module_uoa': 'env', 'data_uoa': lib_uoa}
        r = ck.access(ii)
        if r['return'] > 0: return r
        # Get the tags from e.g. 'BVLC Caffe framework (libdnn,viennacl)'
        lib_name = r['data_name']
        lib_tags = re.match('BVLC Caffe framework \((?P<tags>.*)\)', lib_name)
        lib_tags = lib_tags.group('tags').replace(' ', '').replace(',', '-')
        # Skip some libs with "in [..]" or "not in [..]".

        if lib_tags in ['opencl-clblast-tune']: continue

        #        if lib_tags not in ['opencl-clblast']: continue

        #        if lib_tags not in ['opencl-clblast-tune', 'opencl-clblast']: continue
        #        if lib_tags not in ['opencl-libdnn-clblast-tune', 'opencl-libdnn-clblast']: continue
        #        if lib_tags not in ['opencl-libdnn-viennacl', 'opencl-viennacl']: continue

        skip_compile = 'no'

        # Use the 'time_cpu' command for the CPU only lib, 'time_gpu' for all the rest.
        #        if r['dict']['customize']['params']['cpu_only']==1:
        #            cmd_key='time_cpu'
        #        else:
        #            cmd_key='time_gpu'
        #        # FIXME: Customise cmd for NVIDIA's experimental fp16 branch.
        #        if lib_tags in [ 'nvidia-fp16-cuda', 'nvidia-fp16-cudnn' ]:
        #            cmd_key='time_gpu_fp16'

        # For each Caffe model.*************************************************
        for model_uoa in udepm:
            # Load Caffe model.
            ii = {'action': 'load', 'module_uoa': 'env', 'data_uoa': model_uoa}
            r = ck.access(ii)
            if r['return'] > 0: return r
            # Get the tags from e.g. 'Caffe model (net and weights) (deepscale, squeezenet, 1.1)'
            model_name = r['data_name']
            model_tags = re.match(
                'Caffe model \(net and weights\) \((?P<tags>.*)\)', model_name)
            model_tags = model_tags.group('tags').replace(' ', '').replace(
                ',', '-')
            # Skip some models with "in [..]" or "not in [..]".
            if model_tags not in ['bvlc-alexnet']: continue

            record_repo = 'local'
            record_uoa = model_tags + '-' + lib_tags

            # Prepare pipeline.
            ck.out(
                '---------------------------------------------------------------------------------------'
            )
            ck.out('%s - %s' % (lib_name, lib_uoa))
            ck.out('%s - %s' % (model_name, model_uoa))
            ck.out('Experiment - %s:%s' % (record_repo, record_uoa))

            # Prepare autotuning input.
            cpipeline = copy.deepcopy(pipeline)

            # Reset deps and change UOA.
            new_deps = {
                'lib-caffe': copy.deepcopy(depl),
                'caffemodel': copy.deepcopy(depm)
            }

            new_deps['lib-caffe']['uoa'] = lib_uoa
            new_deps['caffemodel']['uoa'] = model_uoa

            jj = {
                'action': 'resolve',
                'module_uoa': 'env',
                'host_os': hos,
                'target_os': tos,
                'device_id': tdid,
                'deps': new_deps
            }
            r = ck.access(jj)
            if r['return'] > 0: return r

            cpipeline['dependencies'].update(new_deps)

            cpipeline['no_clean'] = skip_compile
            cpipeline['no_compile'] = skip_compile

            cpipeline['cmd_key'] = cmd_key

            ii = {
                'action':
                'autotune',
                'module_uoa':
                'pipeline',
                'data_uoa':
                'program',
                'choices_order': [['##choices#env#CK_CAFFE_BATCH_SIZE']],
                'choices_selection': [{
                    'type': 'loop',
                    'start': bs['start'],
                    'stop': bs['stop'],
                    'step': bs['step'],
                    'default': bs['default']
                }],
                'features_keys_to_process': ['##choices#*'],
                'iterations':
                -1,
                'repetitions':
                num_repetitions,
                'record':
                'yes',
                'record_failed':
                'yes',
                'record_params': {
                    'search_point_by_features': 'yes'
                },
                'record_repo':
                record_repo,
                'record_uoa':
                record_uoa,
                'tags': [
                    'explore-batch-size-libs-models', cmd_key, model_tags,
                    lib_tags, platform_tags
                ],
                'pipeline':
                cpipeline,
                'out':
                'con'
            }

            r = ck.access(ii)
            if r['return'] > 0: return r

            fail = r.get('fail', '')
            if fail == 'yes':
                return {
                    'return': 10,
                    'error':
                    'pipeline failed (' + r.get('fail_reason', '') + ')'
                }

            skip_compile = 'yes'

    return {'return': 0}
Exemple #35
0
def do(i, arg):
    # Process arguments.
    if (arg.accuracy):
        experiment_type = 'accuracy'
        num_repetitions = 1
    else:
        experiment_type = 'performance'
        num_repetitions = arg.repetitions
    random_name = arg.random_name
    share_platform = arg.share_platform

    # Detect basic platform info.
    ii={'action':'detect',
        'module_uoa':'platform',
        'out':'con'}
    if share_platform: ii['exchange']='yes'
    r=ck.access(ii)
    if r['return']>0: return r

    # Keep to prepare ReQuEST meta.
    platform_dict=copy.deepcopy(r)

    # Host and target OS params.
    hos=r['host_os_uoa']
    hosd=r['host_os_dict']

    tos=r['os_uoa']
    tosd=r['os_dict']
    tdid=r['device_id']

#    program='mobilenets-armcl-opencl'
    program='image-classification-tf-py'
    ii={'action':'show',
        'module_uoa':'env',
        'tags':'dataset,imagenet,raw,val'}

    rx=ck.access(ii)
    if len(rx['lst']) == 0: return rx
    # FIXME: It's probably better to use CK_ENV_DATASET_IMAGE_DIR.
    img_dir_val = rx['lst'][0]['meta']['env']['CK_CAFFE_IMAGENET_VAL']

    if (arg.accuracy):
        batch_count = len([f for f in os.listdir(img_dir_val)
           if f.endswith('.JPEG') and os.path.isfile(os.path.join(img_dir_val, f))])
    else:
        batch_count = 1

    ii={'action':'show',
        'module_uoa':'env',
        'tags':'dataset,imagenet,aux'}
    rx=ck.access(ii)
    if len(rx['lst']) == 0: return rx
    img_dir_aux = rx['lst'][0]['meta']['env']['CK_ENV_DATASET_IMAGENET_AUX']
    ii={'action':'load',
        'module_uoa':'program',
        'data_uoa':program}
    rx=ck.access(ii)
    if rx['return']>0: return rx
    mm=rx['dict']
    # Get compile-time and run-time deps.
    cdeps=mm.get('compile_deps',{})
    rdeps=mm.get('run_deps',{})

    # Merge rdeps with cdeps for setting up the pipeline (which uses
    # common deps), but tag them as "for_run_time".
    for k in rdeps:
        cdeps[k]=rdeps[k]
        cdeps[k]['for_run_time']='yes'
    print cdeps
    depl=copy.deepcopy(cdeps['lib-tensorflow'])
    if (arg.tos is not None) and (arg.did is not None):
        tos=arg.tos
        tdid=arg.did

    ii={'action':'resolve',
        'module_uoa':'env',
        'host_os':hos,
        'target_os':tos,
        'device_id':tdid,
        'out':'con',
        'deps':{'library':copy.deepcopy(depl)},
        'quiet':'yes'
    }
    r=ck.access(ii)
    if r['return']>0: return r

    udepl=r['deps']['library'].get('choices',[]) # All UOAs of env for TF lib
    if len(udepl)==0:
        return {'return':1, 'error':'no installed TensorFlow'}
    cdeps['lib-tensorflow']['uoa']=udepl[0]
    depm=copy.deepcopy(cdeps['model-and-weights'])

    ii={'action':'resolve',
        'module_uoa':'env',
        'host_os':hos,
        'target_os':tos,
        'device_id':tdid,
        'out':'con',
        'deps':{'weights':copy.deepcopy(depm)},
        'quiet':'yes'
    }
    r=ck.access(ii)
    if r['return']>0: return r

    udepm=r['deps']['weights'].get('choices',[])
    if len(udepm)==0:
        return {'return':1, 'error':'no installed Weights'}
    cdeps['lib-tensorflow']['uoa']=udepl[0]
    cdeps['model-and-weights']['uoa']=udepm[0]

    ii={'action':'pipeline',
        'prepare':'yes',
        'dependencies':cdeps,

        'module_uoa':'program',
        'data_uoa':program,

        'target_os':tos,
        'device_id':tdid,

        'no_state_check':'yes',
        'no_compiler_description':'yes',
        'skip_calibration':'yes',

        'env':{
          'CK_ENV_DATASET_IMAGENET_VAL':img_dir_val,
          'CK_BATCH_COUNT':batch_count,
          'CK_BATCHES_DIR':'../batches',
          'CK_BATCH_LIST':'../batches',
          'CK_IMAGE_LIST':'../images',
          'CK_RESULTS_DIR':'predictions',
          'CK_SKIP_IMAGES':0
        },

        'cpu_freq':'max',
        'gpu_freq':'max',

        'flags':'-O3',
        'speed':'no',
        'energy':'no',

        'skip_print_timers':'yes',
        'out':'con'
    }

    r=ck.access(ii)
    if r['return']>0: return r
    fail=r.get('fail','')
    if fail=='yes':
        return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}

    ready=r.get('ready','')
    if ready!='yes':
        return {'return':11, 'error':'pipeline not ready'}

    state=r['state']
    tmp_dir=state['tmp_dir']

    # Remember resolved deps for this benchmarking session.
    xcdeps=r.get('dependencies',{})
    # Clean pipeline.
    if 'ready' in r: del(r['ready'])
    if 'fail' in r: del(r['fail'])
    if 'return' in r: del(r['return'])

    pipeline=copy.deepcopy(r)
    for lib_uoa in udepl:
        # Load ArmCL lib.
        ii={'action':'load',
            'module_uoa':'env',
            'data_uoa':lib_uoa}
        r=ck.access(ii)
        if r['return']>0: return r
        lib_name=r['data_name']
        lib_tags=r['dict']['customize']['version']
        # Skip some libs with "in [..]" or "not in [..]".
        if arg.accuracy and lib_tags in [ ]: continue
        skip_compile='no'
        # For each MobileNets model.*************************************************
        for model_uoa in udepm:
            # Load model.
            ii={'action':'load',
                'module_uoa':'env',
                'data_uoa':model_uoa}
            r=ck.access(ii)
            if r['return']>0: return r
            model_name=r['data_name']
            if 'mobilenet' not in r['dict']['tags']:
                continue
            alpha = float(r['dict']['env']['CK_ENV_TENSORFLOW_MODEL_MOBILENET_MULTIPLIER'])
            rho = int(r['dict']['env']['CK_ENV_TENSORFLOW_MODEL_MOBILENET_RESOLUTION'])

            record_repo='local'
            record_uoa='mobilenets-'+experiment_type+'-'+str(rho)+'-'+str(alpha)+'-tensorflow-'+lib_tags

            # Prepare pipeline.
            ck.out('---------------------------------------------------------------------------------------')
            ck.out('%s - %s' % (lib_name, lib_uoa))
            ck.out('%s - %s' % (model_name, model_uoa))
            ck.out('Experiment - %s:%s' % (record_repo, record_uoa))

            # Prepare autotuning input.
            cpipeline=copy.deepcopy(pipeline)
            # Reset deps and change UOA.
            new_deps={'library':copy.deepcopy(depl),
                      'weights':copy.deepcopy(depm)}

            new_deps['library']['uoa']=lib_uoa
            new_deps['weights']['uoa']=model_uoa
            jj={'action':'resolve',
                'module_uoa':'env',
                'host_os':hos,
                'target_os':tos,
                'device_id':tdid,
                'deps':new_deps}
            r=ck.access(jj)
            if r['return']>0: return r

            cpipeline['dependencies'].update(new_deps)

            cpipeline['no_clean']=skip_compile
            cpipeline['no_compile']=skip_compile

            # Prepare common meta for ReQuEST tournament
            features=copy.deepcopy(cpipeline['features'])
            platform_dict['features'].update(features)

            r=ck.access({'action':'prepare_common_meta',
                         'module_uoa':'request.asplos18',
                         'platform_dict':platform_dict,
                         'deps':cpipeline['dependencies'],
                         'request_dict':request_dict})
            if r['return']>0: return r

            record_dict=r['record_dict']

            meta=r['meta']

            if random_name:
               rx=ck.gen_uid({})
               if rx['return']>0: return rx
               record_uoa=rx['data_uid']

            tags=r['tags']

            tags.append(experiment_type)

            tags.append('explore-mobilenets-'+experiment_type)
            tags.append(lib_tags)
            tags.append(platform_tags)
            tags.append(str(rho))
            tags.append(str(alpha))

            ii={'action':'autotune',
               'module_uoa':'pipeline',
               'data_uoa':'program',
               'choices_order':[
                   [
                       '##choices#env#CK_BATCH_SIZE'
                   ],
                   [
                       '##choices#env#CK_CONVOLUTION_METHOD_HINT'
                   ],
                   [
                       '##choices#env#CK_ENV_MOBILENET_RESOLUTION'
                   ],
                   [
                       '##choices#env#CK_ENV_MOBILENET_WIDTH_MULTIPLIER'
                   ]
               ],
               'choices_selection':[
                   {'type':'loop', 'start':bs['start'], 'stop':bs['stop'], 'step':bs['step'], 'default':bs['default']},
                   {'type':'loop', 'start':ch['start'], 'stop':ch['stop'], 'step':ch['step'], 'default':ch['default']},
                   {'type':'loop', 'choice': [rho], 'default': 224},
                   {'type':'loop', 'choice': [alpha], 'default': 1.0},
               ],

               'features_keys_to_process':['##choices#*'],

               'iterations':-1,
               'repetitions': num_repetitions,

               'record':'yes',
               'record_failed':'yes',

               'record_params':{
                   'search_point_by_features':'yes'
               },

               'tags':tags,
               'meta':meta,

               'record_dict':record_dict,

               'record_repo':record_repo,
               'record_uoa':record_uoa,

               'pipeline':cpipeline,
               'out':'con'
            }
            r=ck.access(ii)
            if r['return']>0: return r

            fail=r.get('fail','')
            if fail=='yes':
                return {'return':10, 'error':'pipeline failed ('+r.get('fail_reason','')+')'}

### end pipeline
    return {'return':0}
Exemple #36
0
def login(i):

    """
    Input:  {
              (username) [str]
              (api_key) [str]
              (server_url) [str]
              (server_user) [str]
              (server_pass) [str]
              (server_skip_validation) [str]
            }

    Output: {
              return  [int]    - return code = 0 if success or >0 if error
              (error) [str]    - error string if return>0 
            }
    """

    # Get current configuration
    cfg={}

    ii={'action':'load',
        'repo_uoa':config.CK_CFG_REPO_UOA,
        'module_uoa':config.CK_CFG_MODULE_UID,
        'data_uoa':config.CK_CFG_DATA_UOA}

    r=ck.access(ii)
    if (r['return']>0 and r['return']!=16): ck.err(r) 

    # If not found, setup client
    if r['return']==16:
       setup(i)

    # Load again
    cfg={}

#    ii={'action':'load',
#        'repo_uoa':config.CK_CFG_REPO_UOA,
#        'module_uoa':config.CK_CFG_MODULE_UID,
#        'data_uoa':config.CK_CFG_DATA_UOA}
#
#    r=ck.access(ii)
#    if r['return']>0: ck.err(r) 

    r=config.load({})
    if r['return']>0: return r
    cfg=r.get('dict',{})

    # Update cfg
    for k in ['username', 'api_key', 'server_url', 'server_user', 'server_pass', 'server_skip_validation']:
        v=i.get(k,'')
        if v==None: v=''
        if v!='': cfg[k]=v 

    # Sending request to test connection
    r=comm.send({'config':cfg,
                 'action':'login'
                })
    if r['return']>0: ck.err(r)

    # Success
    ck.out('cK login tested successfully!')

    return 0
Exemple #37
0
            z = z.replace(' codelet.data', ' $#src_path#$codelet.data')

        ix = z.find(' $#src_path#$codelet.data')
        if ix > 0:
            z = z.replace(' $#src_path#$codelet.data',
                          ' \"$#src_path#$codelet.data\"')

        y['run_cmd_main'] = z

        dt = x.get('dataset_tags', [])
        dcu = x.get('dataset_classes_uoa', [])

        if len(dcu) > 0:
            for k in dcu:
                if k not in dtags:
                    ck.out('')
                    ra = ck.inp({'text': 'Enter dataset tag for ' + k + ': '})
                    kk = ra['string'].strip().lower()
                    dtags[k] = kk
                kk = dtags[k]
                if kk not in dt: dt.append(kk)

            del (x['dataset_classes_uoa'])

        dt.append('dataset')
        x['dataset_tags'] = list(set(dt))

        if 'codelet' in tags:
            del (x['dataset_tags'])

    r = ck.access({
Exemple #38
0
def setup(i):

    """
    Input:  {
            }

    Output: {
              return  [int]    - return code = 0 if success or >0 if error
              (error) [str]    - error string if return>0 
            }
    """


    # Get current configuration
    cfg={}

    ii={'action':'load',
        'repo_uoa':config.CK_CFG_REPO_UOA,
        'module_uoa':config.CK_CFG_MODULE_UID,
        'data_uoa':config.CK_CFG_DATA_UOA}

    r=ck.access(ii)
    if (r['return']>0 and r['return']!=16): ck.err(r) 

    if r['return']==0: cfg=r['dict']

    # Check commands

    # Username ##########################################################
    username=cfg.get('username','')

    if i.get('username')!=None: username=i['username']

    if username=='' or username==None:
       r=ck.inp({'text':'Enter cK username: '******'return']>0: ck.err(r)

       username=r['string'].strip()

    if username==None: username=''

    cfg['username']=username

    # API key ###########################################################        
    api_key=cfg.get('api_key','')

    if i.get('api_key')!=None: api_key=i['api_key']

    if api_key=='' or api_key==None:
       r=ck.inp({'text':'Enter your cK API key: '})
       if r['return']>0: ck.err(r)

       api_key=r['string'].strip()

    if api_key==None: api_key=''

    cfg['api_key']=api_key

    # Server URL ###########################################################        
    server_url=cfg.get('server_url','')

    if i.get('server_url')!=None and i.get('server_url')!='': server_url=i['server_url']

    if server_url==None or server_url=='': server_url=config.CR_DEFAULT_SERVER_URL

    cfg['server_url']=server_url

    # Server User ###########################################################        
    server_user=cfg.get('server_user','')

    if i.get('server_user')!=None and i.get('server_user')!='': server_user=i['server_user']

    if server_user!=None and server_user!='': cfg['server_user']=server_user

    # Server Pass ###########################################################        
    server_pass=cfg.get('server_pass','')

    if i.get('server_pass')!=None and i.get('server_pass')!='': server_pass=i['server_pass']

    if server_pass!=None and server_pass!='': cfg['server_pass']=server_pass

    # Server Skip Certificate Validation ###########################################################        
    server_skip_validation=cfg.get('server_skip_validation','')

    if i.get('server_skip_validation')!=None and i.get('server_skip_validation')!='': server_skip_validation=i['server_skip_validation']

    if server_skip_validation=='yes': cfg['server_skip_validation']=server_skip_validation

    # Save configuration
    r=ck.access({'action':'update',
                 'repo_uyoa':config.CK_CFG_REPO_UOA,
                 'module_uoa':config.CK_CFG_MODULE_UID,
                 'data_uoa':config.CK_CFG_DATA_UOA,
                 'dict':cfg,
                 'sort_keys':'yes'})
    if r['return']>0: ck.err(r)

    # Print (new/updated) configuration
    ck.out('')
    ck.out('Current cBench configuration:')

    ck.out('')
    ck.out(json.dumps(cfg, indent=2, sort_keys=True))

    return 0
Exemple #39
0
    name = 'shape-' + '-'.join(
        map(str, [in_c, in_h, in_w, out_c, out_h, out_w]))
    desc = '{}x{}x{} -> {}x{}x{}'.format(in_c, in_h, in_w, out_c, out_h, out_w)
    data = OrderedDict([('CK_IN_SHAPE_C', in_c), ('CK_IN_SHAPE_H', in_h),
                        ('CK_IN_SHAPE_W', in_w), ('CK_OUT_SHAPE_C', out_c),
                        ('CK_OUT_SHAPE_H', out_h), ('CK_OUT_SHAPE_W', out_w)])

    return name, desc, data


########################################################################

if __name__ == '__main__':
    dataset_dir = os.getenv('CK_NNTEST_DATASET_PATH')
    ck.out('Processing dataset in {} ...'.format(dataset_dir))

    # Load meta
    meta_file = os.path.join(dataset_dir, '.cm', 'meta.json')
    if not os.path.isfile(meta_file):
        raise Exception('Dataset meta not found')
    with open(meta_file) as f:
        meta = json.load(f)

    # Select appropriate preparation mode
    prepare_func = None
    tags = meta.get('tags')
    if 'tensor-avgpool' in tags:
        prepare_func = prepare_avgpool
    elif 'tensor-conv' in tags:
        prepare_func = prepare_conv
      "##characteristics#run#return_code",
      "##characteristics#run#run_time_state#positive_results",
      "##characteristics#run#run_time_state#negative_results"
    ],
    "flat_keys_list_ext":"#min",
    "ignore_graph_separation":"yes"}

r=ck.access(ii)
if r['return']>0: ck.err(r)

table=r['table'].get('0',[])
real_keys=r.get('real_keys',[])

# Print failures
for q in table:
    ck.out('')
    ck.out('Dataset  UOA:    '+str(q[1]))
    ck.out('Dataset file:    '+str(q[2]))
    ck.out('Target OS:       '+str(q[4]))
    ck.out('OpenCL platform: '+str(q[5]))
    ck.out('OpenCL device:   '+str(q[6]))
    ck.out('Compiler:        '+str(q[8]))

# Convert to csv
ii={"action":"convert_table_to_csv",
    "module_uoa":"experiment",
    "table":table,
    "keys":real_keys,
    "file_name":"start_analysis_from_remote_tmp.csv"}

r=ck.access(ii)