コード例 #1
0
def LogCaller():
    frame = currentframe().f_back
    fcaller = getframeinfo(frame.f_back)
    fcallee = getframeinfo(frame)
    msg = '[{}] {}:{} called from: {}:{}'.format(g.__plugin__,
                                                 opb(fcallee.filename),
                                                 fcallee.lineno,
                                                 opb(fcaller.filename),
                                                 fcaller.lineno)
    xbmc.log(msg, Log.INFO)
コード例 #2
0
def _subprocPYC(strCmd, cmpfile, dotExt='.dll'):

    clr = None  # keep out global clr
    try:
        import clr
        sys.path.append(oprel(compiler.pycpath))
        import pyc
    except Exception as exi:
        pass

    args = None
    if clr:
        args = strCmd[2:]
        try:
            pyc.Main(args)
        except Exception as ex:
            if ex:
                print('type', type(ex))
                try:
                    print(ex)
                    print(ex.message)
                except Exception:
                    pass

                log.warn('pyc.Main err:\n' + ex.ToString())

            else:
                log.warn('pyc.Main err:\n')

            return False

    else:

        po = subprocess.Popen(strCmd, stdout=PIPE, stderr=PIPE)
        stdout, stderr = po.communicate()
        po.wait()

        if stderr:
            log.warn('ERR\n {}:\n\t compiling with {}'.format(stderr, cmpfile))
            po.kill()
            return False

        else:
            if gsBuild.Verbose or not gsBuild.INFO:
                log.info('\n - STDOUT - \n{}'.format(stdout))

        po.kill()

    if dotExt and opex(
            opj(opd(compiler.mainoutdir),
                opb(cmpfile).replace('.py', dotExt))):
        log.FILE('Build Created: {}'.format(cmpfile.replace('.py', dotExt)))

    elif opex(cmpfile):
        log.FILE('Build Created: {}'.format(cmpfile))

    else:
        return False

    return True
コード例 #3
0
def getTrueFilePath(fnpath):
    '''
       Find absolute file path if exists.

       :param: fnpath [str] - file path to check.

       :return: absolute path to existing file [str].

       :raise:  [IOError] if can't find or read.

    '''
    fpath = fnpath.strip()
    resolvedPath = None
    assert isinstance(fpath, (str, unicode)), \
        'Error type not str: send str file path'

    if os.getcwd() not in fpath:
        fpath = opab(fpath)

    dfn = opb(opn(fpath))
    dfdir = opd(opn(opab(fpath)))

    dfpath = None
    dirfs = os.listdir(opd(opn(opab(fpath))))
    dfpath = opn(op.join(dfdir, dfn))
    if dfn in dirfs and op.isfile(dfpath):
        #        log.warn('\n  dfpath {}'.format(dfpath))
        resolvedPath = dfpath
        return resolvedPath

    elif _validateFileIsPath(dfpath):
        #        log.warn('\n signle filepath return {}'.format(dfpath))
        return dfpath

    raise IOError('file | filepath not resolved: {}'.format(fpath))
コード例 #4
0
 def _proc_folders_by_grps(self):
     for f in self.folders:
         grp_num = opb(f).split('_')[1]
         if grp_num in list(self.folder_dict.keys()):
             self.folder_dict[grp_num].append(f)
         else:
             self.folder_dict.update({grp_num:[f]})
コード例 #5
0
def proc_folder(folder_name):
    persons = glob(opj(folder_name, '*'))
    for person in persons:
        print('Processing {} ...'.format(person))
        match = re.search(r'(\d+)_(\d+)_(\d+)', person)
        try:
            compressed_person = person.replace(match.group(0), '{:02d}_{:02d}_{:02d}'.format(int(match.group(1)),
                                                                                         int(match.group(2)),
                                                                                         int(match.group(3))))\
                                  .replace('npy', 'npy_compressed')
        except:
            import ipdb; ipdb.set_trace()
        os.makedirs(compressed_person, exist_ok=True)
        npys = sorted(glob(opj(person, '*.npy')))
        with tqdm(total=len(npys), leave=False) as pbar:
            for cnt, npy in enumerate(npys):
                pbar.update(1)

                base_no = int(opb(npy).strip('.npy').split('_')[-1])
                # try:
                    # assert base_no == cnt + 1
                # except AssertionError:
                    # import ipdb; ipdb.set_trace()
                data = np.load(npy)
                # np.save(opj(compressed_person, 'normal_person.npy'), data)
                np.savez_compressed(opj(compressed_person, 'im_{:03d}.npz'.format(base_no)), data)
コード例 #6
0
def Log(msg, level=def_loglevel):
    if level == xbmc.LOGDEBUG and s.verbLog:
        level = def_loglevel
    fi = getframeinfo(currentframe().f_back)
    msg = '[{0}]{2} {1}'.format(
        g.__plugin__, msg,
        '' if not s.verbLog else ' {}:{}'.format(opb(fi.filename), fi.lineno))
    xbmc.log(py2_encode(msg), level)
コード例 #7
0
ファイル: common.py プロジェクト: dokho33/xbmc
def Log(msg, level=xbmc.LOGNOTICE):
    from inspect import currentframe, getframeinfo
    from os.path import basename as opb
    if level == xbmc.LOGDEBUG and var.verbLog:
        level = xbmc.LOGNOTICE
    fi = getframeinfo(currentframe().f_back)
    msg = '[{0}]{2} {1}'.format(
        pluginname, msg, '' if not var.verbLog else ' {}:{}'.format(
            opb(fi.filename), fi.lineno))
    xbmc.log(py2_encode(msg), level)
コード例 #8
0
        def do_POST(self):
            content_length = int(self.headers['Content-Length'])
            post_data = json.loads(self.rfile.read(content_length))

            mlog(fnc="do_POST()", msg="POST req data: Last request - {}, Last quality - {}, Rebuffer Time - {}".format(
                post_data['lastRequest'], post_data['lastquality'], float(post_data['RebufferTime'] - self.input_dict['last_total_rebuf'])))
            send_data = ""

            if ( 'lastquality' in post_data ):
                rebuffer_time = float(post_data['RebufferTime'] -self.input_dict['last_total_rebuf'])
                reward = \
                   VIDEO_BIT_RATE[post_data['lastquality']] / M_IN_K \
                   - REBUF_PENALTY * (post_data['RebufferTime'] - self.input_dict['last_total_rebuf']) / M_IN_K \
                   - SMOOTH_PENALTY * np.abs(VIDEO_BIT_RATE[post_data['lastquality']] -
                                                  self.input_dict['last_bit_rate']) / M_IN_K
                # reward = BITRATE_REWARD[post_data['lastquality']] \
                #         - 8 * rebuffer_time / M_IN_K - np.abs(BITRATE_REWARD[post_data['lastquality']] - BITRATE_REWARD_MAP[self.input_dict['last_bit_rate']])

                video_chunk_fetch_time = post_data['lastChunkFinishTime'] - post_data['lastChunkStartTime']
                video_chunk_size = post_data['lastChunkSize']
                
                # log wall_time, bit_rate, buffer_size, rebuffer_time, video_chunk_size, download_time, reward
                self.log_file.write(str(time.time()) + '\t' +
                                    str(VIDEO_BIT_RATE[post_data['lastquality']]) + '\t' +
                                    str(post_data['buffer']) + '\t' +
                                    str(float(post_data['RebufferTime'] - self.input_dict['last_total_rebuf']) / M_IN_K) + '\t' +
                                    str(video_chunk_size) + '\t' +
                                    str(video_chunk_fetch_time) + '\t' +
                                    str(reward) + '\n')
                self.log_file.flush()

                self.input_dict['last_total_rebuf'] = post_data['RebufferTime']
                self.input_dict['last_bit_rate'] = VIDEO_BIT_RATE[post_data['lastquality']]

                if ( post_data['lastRequest'] == TOTAL_VIDEO_CHUNKS ):
                    send_data = "REFRESH"
                    self.input_dict['last_total_rebuf'] = 0
                    self.input_dict['last_bit_rate'] = DEFAULT_QUALITY
                    self.log_file.write('\n')  # so that in the log we know where video ends
                    lock_path = "./locks/video_" + opb(self.input_dict['log_file_path']) + ".lock"
                    with open(lock_path, "w") as fp:
                        fp.close()
                    mlog(fnc="do_POST()", msg="Created lock file: {}".format(opa(lock_path)))

            self.send_response(200)
            self.send_header('Content-Type', 'text/plain')
            self.send_header('Content-Length', len(send_data))
            self.send_header('Access-Control-Allow-Origin', "*")
            self.end_headers()
            self.wfile.write(send_data)
            if len(send_data) > 0:
                mlog(fnc="do_POST()", msg="Response to POST req: {}".format(send_data))
コード例 #9
0
def _getRelativeZipName(zips, zfile):
    roots = dict(zipDirGCA(zips))

    zfiledrv = os.path.splitdrive(zfile)[0]
    RelDirsFilePath = None
    reldir = roots[zfiledrv]['GCA']
    reldir = roots[zfiledrv]['GCA']

    if roots[zfiledrv]['baseroot']:
        RelDirsFilePath = oprel(zfile, reldir)
    else:
        RelDirsFilePath = opn(opj(reldir, opb(zfile)))

    if RelDirsFilePath:
        return RelDirsFilePath

    return
コード例 #10
0
ファイル: filecontrol.py プロジェクト: PESwim/ipybuilder
def checkRequired(path):
    '''
       Read file list of required files and check they
       if exist or partial error.

       :param:
           - path [str] - of file to read list
           - tag= [opt][str] - writelog tag for filtering

       :return: list of files **not** found - should be empty [list]

       :raise: partialError - sink errors for stdout in PartialErrors [list]

    '''
    tag = 'Exists'
    reqpath = getTrueFilePath(path)
    amsg = 'need {} file'.format(reqpath)
    assert os.path.isfile(reqpath), amsg

    with open(reqpath, 'r') as txtr:
        reqfiles = txtr.readlines()
        log.FILE('{} {}'.format(tag, path))

    reqfs = list(reqfiles)

    if gsBuild.Verbose: log.info('\ngsBuild.IPATH:\n {}'.format(gsBuild.IPATH))

    if gsBuild.IPATH != 'clr':
        for f in reqfiles:
            if gsBuild.IPATH and gsBuild.IPATH not in f:
                fr = opn(opj(gsBuild.IPATH, opb(f)))
            else:
                fr = f
            try:
                fp = getTrueFilePath(fr.strip())
                log.FILE('{} {}'.format(tag, fp))
                reqfs.remove(f)
            except IOError as ex:
                log.debug('raise partial')
                partialError(ex, ex.message)
                continue
        return reqfs

    elif gsBuild.IPATH == 'clr':
        return []
コード例 #11
0
ファイル: thePlotFMRI.py プロジェクト: mychan24/pyThePlot
 def plot(self, output_file='', title='', psc_min_max=PSC_MIN_MAX):
     """
     Generates a figure for the user using the CarpetFigureCreator()
     """
     cfc = CarpetFigureCreator()
     cfc.setFunctionalFileToDisplay(self.functional_fn)
     cfc.setCarpetMatrix(self.getCarpetData())
     cfc.setFramewiseDisplacementVector(self.getFramewiseDisplacement())
     cfc.setSliceIndex(self.sliceIndex)
     cfc.setNVoxelsPerTissue(len(self.i_gm[0]), len(self.i_wm[0]),
                             len(self.i_csf[0]))
     cfc.setBinarySegmentations(self.gm_bin, self.wm_bin, self.csf_bin)
     if title:
         cfc.setTitle(title=title)
     if len(self.vols_to_highlight):
         cfc.setVolumesToHighlight(self.vols_to_highlight)
     cfc.plot(output_file=output_file, psc_min_max=psc_min_max)
     print '\t- SingleRun Plot : ' + opb(output_file) + ' saved'
コード例 #12
0
ファイル: makedefault.py プロジェクト: PESwim/ipybuilder
def SlashArgs(config, name, jsn, out):
    '''
      Runs SlashControl and BasePath for each user path arg:
       
	   :params: (configPath, mainName, jsonp, outDir)
       
	   :return: (argc, argm, argj, argo) [tuple]
	   
    '''
    argc = argm = argj = argo = None

    if config == ' ':
        config = None
    if config:
        config = SlashContrl(config)
        argc = BasePathDir(config)
    if name:
        name = SlashContrl(name)
        argm = BasePathDir(name)
    if jsn:
        jsn = SlashContrl(jsn)
        argj = BasePathDir(jsn)
    if not out or out == '' or out == ' ':
        if argm and argm[0]:
            out = argm[0] + '\\release'
        elif 'builder' == opb(opd(os.getcwd())):
            out = os.getcwd() + '\\UserDefaulted\\release'
        else:
            out = os.getcwd() + '\\release'

    out = SlashContrl(out)
    if not op.isdir(out):
        if not op.isdir(opd(out)):
            os.mkdir(opd(out))
            log.FILE('{}'.format(opd(out)))

        if not op.isdir(out) and not op.isfile(out):
            os.mkdir(out)
            log.FILE('{}'.format(out))

    argo = BasePathDir(out)
    return (argc, argm, argj, argo)
コード例 #13
0
ファイル: thePlotFMRI.py プロジェクト: mychan24/pyThePlot
 def plot(self, output_file='', title='', sliceIndex=''):
     """
     Generates a figure for the user using the CarpetFigureCreator()
     :param output_file: string, path to output file (.png prefered), if empty, a window pops up to view the graph
     :param title: string, Supra Title for the figure, if empty, no supra title
     """
     # Go get the data from each instance of the Plot
     self.setDataForPlot()
     if sliceIndex:
         self.setSliceIndex(sliceIndex)
     else:
         self.setDefaultSliceIndex()
     cpc = CarpetFigureCreator(
     )  # Call Carpet Figure Creator to generate the figure
     cpc.setFunctionalFileToDisplay(
         self.list_fn_files[0]
     )  #Use first functional file to display functional image
     cpc.setCarpetMatrix(
         self.cp_data
     )  #Set horizontally concatenated carpet matrices from each run
     cpc.setFramewiseDisplacementVector(
         self.fd_vec)  #Horizontally concatenated FD
     cpc.setSliceIndex(
         self.sliceIndex
     )  #Slice Index used is the middle slice from the first functional run
     cpc.setNVoxelsPerTissue(self.n_GM, self.n_WM,
                             self.n_CSF)  # Binary segmentation maps
     cpc.setBinarySegmentations(self.bin_gm, self.wm_bin,
                                self.csf_bin)  #N voxels per tissue
     cpc.addRunIndices(
         self.Nt
     )  # sets vector to display on the graph when a new run has started
     if len(self.volsToHighlight) > 0:
         self.prepareVolumesToHighlightForPlot()
         cpc.setVolumesToHighlight(self.volsForPlot)
     cpc.setTitle(title=title)  #Set supra title
     cpc.plot(output_file=output_file)  #Plot and save the figure
     print '\t- MultiRuns Plot : ' + opb(output_file) + ' saved'
コード例 #14
0
def plotGlassbrainSlices(niftipath, mnipath, ortho='z', nRows=2, nCuts=6,
                         threshpos=0, threshneg=0, figLayout='Both',
                         showLRannot=True, findOptimalCut=True,
                         imageType='svg'):
    """
    Creates nice glassbrain slice figures in the direction x, y and z
    """

    # Initiation of relevant parameters
    img = nb.load(niftipath)
    lineW = 2. / (nRows + int((figLayout == 'Brain' or figLayout == 'Both')))

    # Reduce 4D volume to 3D
    if len(img.shape) == 4:
        data4D = img.get_data()
        data4D = data4D.reshape(data4D.shape[:-1])
        img = Nifti1Image(data4D, img.get_affine())

    # Get voxel extend in all directions
    dirMin = np.dot(img.get_affine(), [0, 0, 0, 1])[:3]
    dirMax = np.dot(img.get_affine(),
                    np.array(img.shape).tolist() + [1])[:3]

    if findOptimalCut:
        # Find cuts automatically
        cut_coords = find_cut_slices(img, direction=ortho, n_cuts=nCuts)
    else:
        # Split orientation in x-equal parts
        cut_coords = getEqualSpacing(dirMin, dirMax, ortho, nCuts)

    # Split cuts according nRows
    cut_coords = [cut_coords[int(i * len(cut_coords) / np.float(nRows)):
                             int((i + 1) * len(cut_coords) / np.float(nRows))]
                  for i in range(nRows)]

    # Create Slices
    for i in range(nRows):

        # Create axes for plotting
        ax = plt.subplot(nRows + int((figLayout == 'Brain' or
                                      figLayout == 'Both')),
                         1, i + 1)

        # Plot the white background for all slices as a zeros value brain
        # (without it, the view focuses around the first area plotted)
        zerobrain = Nifti1Image(img.get_data() * 0, img.get_affine())
        brain = plot_roi(
            zerobrain, zerobrain, colorbar=False, cut_coords=cut_coords[i],
            display_mode=ortho, alpha=1, draw_cross=False, cmap=plt.cm.gray,
            black_bg=False, axes=ax, annotate=False)

        # Plot positive values
        posdata = np.copy(img.get_data())
        posdata[posdata <= threshpos] = 0.001  # = 0 crashes contour function
        posbrain = Nifti1Image(posdata, img.get_affine())
        brain.add_contours(
            posbrain, filled=False, cmap=plt.cm.hot, alpha=1, linewidths=lineW)

        # Plot negative values
        negdata = np.copy(img.get_data())
        negdata[negdata >= -threshneg] = 0.001  # = 0 crashes contour function
        negbrain = Nifti1Image(negdata, img.get_affine())
        brain.add_contours(
            negbrain, filled=False, cmap=plt.cm.winter, alpha=1,
            linewidths=lineW)

        # Plot outer MNI contours
        brain.add_contours(
            smooth_img(mnipath, 4), alpha=1, filled=False,
            levels=[100], linewidths=lineW, cmap=plt.cm.gray)

        # Plot inner MNI contours
        brain.add_contours(
            nb.load(mnipath), alpha=0.8, levels=[5000], linewidths=lineW,
            cmap=plt.cm.gray)

        # Add annotation if requested
        if figLayout == 'Both' or figLayout == 'Number':
            brain.annotate(left_right=showLRannot, size=int(12 * lineW))

    # Plot overview Brain at the bottom
    if figLayout == 'Brain' or figLayout == 'Both':

        # Create axes for overview brain
        ax = plt.subplot(nRows + 1, 1, nRows + 1)

        # Find overview view direction
        if ortho == 'z':
            direction = 'x'
        elif ortho == 'x':
            direction = 'z'
        elif ortho == 'y':
            direction = 'z'

        # Plot the white backgroundas a zeros value brain
        brain = plot_roi(
            zerobrain, zerobrain, colorbar=False, cut_coords=[0],
            display_mode=direction, alpha=1, draw_cross=False,
            cmap=plt.cm.gray, black_bg=False, axes=ax, annotate=False)

        # Plot positive values
        brain.add_contours(
            posbrain, filled=False, cmap=plt.cm.hot, alpha=1, linewidths=lineW)

        # Plot negative values
        brain.add_contours(
            negbrain, filled=False, cmap=plt.cm.winter, alpha=1,
            linewidths=lineW)

        # Plot outer MNI contours
        brain.add_contours(
            smooth_img(mnipath, 4), alpha=1, filled=False,
            levels=[100], linewidths=lineW, cmap=plt.cm.gray)

        # Plot inner MNI contours
        brain.add_contours(
            nb.load(mnipath), alpha=0.8, levels=[5000], linewidths=lineW,
            cmap=plt.cm.gray)

        # Plot the line indicating the cut
        for i in np.array(cut_coords).flatten():
            if ortho == 'z' or ortho == 'y':
                ax.plot([-100, 100], [i, i], 'k-', lw=lineW)
            elif ortho == 'x':
                ax.plot([i, i], [-100, 100], 'k-', lw=lineW)

        if ortho == 'z':
            ax.axis((-300.0, 300.0, dirMin[2], dirMax[2]))
        elif ortho == 'y':
            ax.axis((-300.0, 300.0, dirMin[1], dirMax[1]))
        elif ortho == 'x':
            stretcher = (nRows + 1) / 2.
            ax.axis((-300.0 * stretcher, 300.0 * stretcher, -100.0, 100.0))

        # Add annotation if requested
        if figLayout == 'Both' or figLayout == 'Number':
            brain.annotate(left_right=showLRannot, size=int(12 * lineW))

    # Get file prefix
    if niftipath.endswith('.nii'):
        filename = opb(niftipath)[:-4]
    elif niftipath.endswith('.nii.gz'):
        filename = opb(niftipath)[:-7]

    # Create output folder
    path2Figure = opj(os.path.split(os.path.realpath(niftipath))[0], 'figures')
    if not os.path.exists(opj(path2Figure)):
        os.makedirs(opj(path2Figure))

    # Save figure
    figname = '_'.join([filename, '%s-cut' % ortho])
    plt.savefig(opj(path2Figure, '%s.%s' % (figname, imageType)))
    plt.clf()
コード例 #15
0
def AssignMakeZip(uconfig, args):

    config = ndcDict(uconfig)
    outzipPath = None
    cfz = config['ZIPPATH']

    if args['listzip']:
        zfiles = []  # missing zipfiles in listzip.txt or input
        zips = loadRes(getTrueFilePath(args['listzip']))

        uzips = None  # unique zips
        nuzips = []  #full path uniques

        # zip exists with cur name if have zipped before
        isautozip = False

        try:
            import zlib
            mode = zipfile.ZIP_DEFLATED
        except ImportError:
            mode = zipfile.ZIP_STORED

        config['LISTFILES']['zip'] = []
        if isinstance(zips, list):
            config['LISTFILES']['zip'].extend(zips)
        else:
            config['LISTFILES']['zip'].append(zips)

        # set outzip path
        manf = 'default.zip'
        if config['MAINFILE']:
            manf = ('.').join(opb(config['MAINFILE']).split('.')[:-1]) + '.zip'

        outzipPath = opj(config['OUTDIR'], manf)
        # stop trying to overwrite same file
        # current zip path
        cfzp = None
        if cfz:

            try:
                cfzp = getTrueFilePath(cfz)
                if opex(cfzp):
                    isautozip = True
            except IOError:
                pass
        # auto zip path
        elif outzipPath:
            try:
                cfzp = getTrueFilePath(outzipPath)
                if opex(cfzp):
                    isautozip = True
            except IOError:
                pass
        # update zip path
        config['ZIPPATH'] = cfzp
        cfz = cfzp

        # -- zipping ---
        # uzips
        if isautozip:
            infoExistZip(cfz, outzipPath)
            log.FILE('Confirmed: {}'.format(cfzp))
            zipsRelative = []

            for zipname in zips:
                relname = None
                relname = _getRelativeZipName(zips, zipname)
                if relname:
                    zipsRelative.append(relname)
                else:
                    zipsRelative.append(zipname)

            with zipfile.ZipFile(cfzp, 'a', mode) as ziprd:
                uzips = list(uniqueZips(zipsRelative, list(ziprd.namelist())))

                #getting back full path from relative
                for zfile in uzips:
                    if 'from' in zfile:
                        drv = zfile[5]
                        zfile = opn(zfile.replace('from_' + drv, drv + ':'))
                    else:
                        cwdlst = os.getcwd().split(os.sep)

                        cnt = 0
                        while cwdlst[-1] and not opex(
                                opab(opj((os.sep).join(cwdlst), zfile))):
                            cnt += 1
                            if cnt > 25: break
                            cwdlst.pop()

                        zfile = opab(opj((os.sep).join(cwdlst), zfile))

                    nuzips.append(zfile)

                    if not os.path.isfile(zfile):
                        log.error("can't find {}".format(zfile))
                        zfiles.append(zfile)
                        continue

                    arcname = _getRelativeZipName(nuzips, zfile)
                    if not arcname:
                        log.error("can't find arcname using {}".format(
                            oprel(zfile)))
                        zfiles.append(zfile)
                        continue
                    ziprd.write(zfile, arcname)
            ziprd.close()

            # changed if uzips
            if nuzips:
                if gsBuild.Verbose or not gsBuild.INFO:
                    log.info(('\nSome Files already zipped in:\n{}\n\t' + \
                              '- delete to replace existing' + \
                              '\nadding zip files to existing archive:\n' + \
                              '{}\n'*len(nuzips)) \
                            .format(cfz, *nuzips))

        # Need new zip with ZIPPATH/outzipPath as name
        elif not isautozip:
            warnZip(outzipPath)

            if isinstance(zips, list):

                with zipfile.ZipFile(cfz, 'a', mode) as zipr:
                    for zfile in list(set(zips)):
                        if not os.path.isfile(zfile):
                            zfiles.append(zfile)
                            continue
                        arcname = _getRelativeZipName(zips, zfile)
                        if not arcname:
                            arcname = oprel(zfile)
                        zipr.write(zfile, arcname)

                log.FILE('{}'.format(config['ZIPPATH']))
                zipr.close()

            if isinstance(zips, (str, unicode)):
                with zipfile.ZipFile(cfz, 'w', mode) as zipr:
                    arcname = oprel(zips)
                    zipr.write(zips, arcname)

                zipr.close()
                log.FILE('{}'.format(cfz))

        if zfiles:
            if gsBuild.Verbose or not gsBuild.INFO:
                log.warn(('\nFile | path does not exist - ' +\
                          'skipped adding zip files:\n\t' + \
                          '{} \n\t'*len(zfiles)).format(*zfiles))

            log.FILE(('*Missing zip: {}\n' * len(zfiles)).format(*zfiles))

            partialError('ValueError',
                         ('*Missing zip: {}\n' * len(zfiles)).format(*zfiles))

    return ndcDict(config)
コード例 #16
0
ファイル: makedefault.py プロジェクト: PESwim/ipybuilder
def BasePathDir(dp):
    '''
       Parse a file path and return dict of info about path

       :param: dp [str] - user arg path
	
       :return:
           - (main, base, basetype, isFile, isdefault) [tuple] 
           - main [str] - fullpath parent
           - base [str] - base of path dir or file path
           - basetype [basetype [python, json, config, None]]
           - isFile [bool]
           - isdefault [ bool] - True if output is going to UserDefaulted/

	'''

    dp = dp.strip()
    base = None
    main = None
    dpex = op.exists(opab(dp))
    main = opd(dp)
    mainex = op.exists(main)
    base = opb(dp)
    hasdot = '.' in base
    basetype = None
    isFile = False
    isdefault = False

    if dpex:
        isFile = op.isfile(opab(dp))

    if hasdot:
        if '.py' in base:
            basetype = 'python'
        elif '.json' in base:
            basetype = 'json'
        elif '.config' in base:
            basetype = 'config'

    if (opb(main)) == 'builder':
        main = opn(opj(main, 'UserDefaulted'))
        isdefault = True

    if not hasdot and base == opd(main):
        return (main, None, None, isFile, isdefault)

    elif not hasdot and base in os.getcwd():
        if base == 'builder':
            base = opn(opj(base, 'UserDefaulted'))
            isdefault = True
        return (opn(opj(opab(main), base)), None, None, isFile, isdefault)

    elif not mainex:
        if op.exists(opn(opab(opd(main)))):
            isdefault = True
            return (opn(opj(opab(opd(main)), 'UserDefaulted')), base, basetype,
                    isFile, isdefault)
        isdefault = True
        return (opn(opab(opj(opd(main), 'UserDefaulted'))), base, basetype,
                isFile, isdefault)

    return (main.strip(), base.strip(), basetype, isFile, isdefault)
コード例 #17
0
ファイル: buildmake.py プロジェクト: PESwim/ipybuilder
def _subprocPYC(strCmd, cmpfile, dotExt='.dll'):

    clr = None # keep out global clr
    try:
        import clr    
    except Exception as ex:
        pass
    
    if clr:
        try:
            sys.path.append(oprel(compiler.pycpath))
            import pyc
        except Exception as ex:
            pass

        try:
            clr.AddReference("StdLib")
        except System.IO.IOException as ex:
            print('StdLib.dll reference error:\n\t' + \
                  'check file | filepath')
        try:
            clr.AddReference("IronPython")
        except System.IO.IOException as ex:
            print('IronPython reference error:\n\t' + \
                  'check file | filepath')
            
        f_ipy = False
        try:
            import ipyver
            rs = ipyver.ReferenceStatus()
            f_ipy = rs.RefStatusIPMS()['ipy']['isLocal']
        except System.IO.IOException as ex:
            pass
        
        try:
            clr.AddReference("ipybuild")
        except System.IO.IOException as ex:
            try:
                clr.AddReference("ipybuilder")
            except System.IO.IOException as ex:
                if f_ipy:
                    print('IF .exe: ipybuild(er) reference error:\n\t' + \
                          'check file | filepath')

        args = None
        rslt = None
        args = strCmd[2:]

        try:
            rslt = pyc.Main(args)
        except Exception as ex:
            errtyp = ''
            gsBuild.OK = False
            try:
                errtyp = ex.GetType().ToString()
            except Exception as exf:
                pass
            if ex:
                log.warn('pyc.Main err:\n' + ex.ToString())
                log.warn('type {} or System Type {}'.format(type(ex), errtyp))
                log.warn('Error: {}'.format(ex))
                print 'ex-err'

            return False
         
        if rslt:
            return True

    else:
        
        po = subprocess.Popen(strCmd, stdout=PIPE, stderr=PIPE)
        stdout, stderr = po.communicate()
        po.wait()

        if stderr:
            log.warn('ERR\n {}:\n\t compiling with {}'.format(stderr, cmpfile))
            po.kill()
            gsBuild.OK=False
            return False
     
        else:
            if gsBuild.Verbose or not gsBuild.INFO:
                log.info('\n - STDOUT - \n{}'.format(stdout))

        po.kill()
    
    if dotExt and opex(opj(opd(compiler.mainoutdir), 
                           opb(cmpfile).replace('.py', dotExt))):   
        log.FILE('Build Created: {}'.format(cmpfile.replace('.py', dotExt)))
        return True
    elif opex(cmpfile):
        log.FILE('Build Created: {}'.format(cmpfile))   
        return True
    else:
        gsBuild.OK = False
        return False
コード例 #18
0
 def _collect_grps(self):
     for folder in self.folders:
         self.grps.add(opb(folder).split('_')[1])
     self.grps = sorted(self.grps)
コード例 #19
0
ファイル: makedefault.py プロジェクト: PESwim/ipybuilder
}

if not os.path.exists(defaultjson):
    with open(defaultjson, 'w') as jassw:
        json.dump(assembly_json, jassw, indent=4)

    log.FILE('{}'.format(defaultjson))
    log.info('\nExisting resource not found wrote default assembly:\n {}' \
             .format(defaultjson))

DefaultReq = ipyreq
DefaultReqList = []
for txtPath in DefaultReq:
    if gsBuild.IPATH and gsBuild.IPATH != 'clr':
        DefaultReqList.append(
            opn(opj(gsBuild.IPATH, opb(opn(txtPath.strip())))))
    elif gsBuild.IPATH == 'clr':
        pass
    else:
        DefaultReqList.append(os.path.normpath(txtPath.strip()))

DefaultReqPath = opn(opj(os.getcwd(), 'requirements.txt'))

with open(DefaultReqPath, 'w') as tw:
    tw.writelines(('\n').join(DefaultReqList))

if opex(DefaultReqPath):
    log.FILE('Exists {}'.format(DefaultReqPath))

#else:
#    with open(DefaultReqPath, 'w') as tw:
コード例 #20
0
        def do_POST(self):
            content_length = int(self.headers['Content-Length'])
            post_data = json.loads(self.rfile.read(content_length))

            mlog(
                fnc="do_POST()",
                msg=
                "POST req data: Last request - {}, Last quality - {}, Rebuffer Time - {}"
                .format(
                    post_data['lastRequest'], post_data['lastquality'],
                    float(post_data['RebufferTime'] -
                          self.input_dict['last_total_rebuf'])))
            send_data = ""

            if ('pastThroughput' in post_data):
                # @Hongzi: this is just the summary of throughput/quality at the end of the load
                # so we don't want to use this information to send back a new quality
                mlog(fnc="do_POST()",
                     msg="Past throughput is present in post_data, \
                        not using this information to send back quality")
            else:
                # option 1. reward for just quality
                # reward = post_data['lastquality']
                # option 2. combine reward for quality and rebuffer time
                #           tune up the knob on rebuf to prevent it more
                # reward = post_data['lastquality'] - 0.1 * (post_data['RebufferTime'] - self.input_dict['last_total_rebuf'])
                # option 3. give a fixed penalty if video is stalled
                #           this can reduce the variance in reward signal
                # reward = post_data['lastquality'] - 10 * ((post_data['RebufferTime'] - self.input_dict['last_total_rebuf']) > 0)

                # option 4. use the metric in SIGCOMM MPC paper
                rebuffer_time = float(post_data['RebufferTime'] -
                                      self.input_dict['last_total_rebuf'])

                # --linear reward--
                reward = VIDEO_BIT_RATE[post_data['lastquality']] / M_IN_K \
                        - REBUF_PENALTY * rebuffer_time / M_IN_K \
                        - SMOOTH_PENALTY * np.abs(VIDEO_BIT_RATE[post_data['lastquality']] -
                                                  self.input_dict['last_bit_rate']) / M_IN_K

                # --log reward--
                # log_bit_rate = np.log(VIDEO_BIT_RATE[post_data['lastquality']] / float(VIDEO_BIT_RATE[0]))
                # log_last_bit_rate = np.log(self.input_dict['last_bit_rate'] / float(VIDEO_BIT_RATE[0]))

                # reward = log_bit_rate \
                #          - 4.3 * rebuffer_time / M_IN_K \
                #          - SMOOTH_PENALTY * np.abs(log_bit_rate - log_last_bit_rate)

                # --hd reward--
                # reward = BITRATE_REWARD[post_data['lastquality']] \
                #         - 8 * rebuffer_time / M_IN_K - np.abs(BITRATE_REWARD[post_data['lastquality']] - BITRATE_REWARD_MAP[self.input_dict['last_bit_rate']])

                self.input_dict['last_bit_rate'] = VIDEO_BIT_RATE[
                    post_data['lastquality']]
                self.input_dict['last_total_rebuf'] = post_data['RebufferTime']

                # retrieve previous state
                if len(self.s_batch) == 0:
                    state = [np.zeros((S_INFO, S_LEN))]
                else:
                    state = np.array(self.s_batch[-1], copy=True)

                # compute bandwidth measurement
                video_chunk_fetch_time = post_data[
                    'lastChunkFinishTime'] - post_data['lastChunkStartTime']
                video_chunk_size = post_data['lastChunkSize']

                # compute number of video chunks left
                video_chunk_remain = TOTAL_VIDEO_CHUNKS - self.input_dict[
                    'video_chunk_coount']
                self.input_dict['video_chunk_coount'] += 1

                # dequeue history record
                state = np.roll(state, -1, axis=1)

                # this should be S_INFO number of terms
                try:
                    state[
                        0,
                        -1] = VIDEO_BIT_RATE[post_data['lastquality']] / float(
                            np.max(VIDEO_BIT_RATE))
                    state[1, -1] = post_data['buffer'] / BUFFER_NORM_FACTOR
                    state[2, -1] = rebuffer_time / M_IN_K
                    state[3, -1] = float(video_chunk_size) / float(
                        video_chunk_fetch_time) / M_IN_K  # kilo byte / ms
                    state[4, -1] = np.minimum(video_chunk_remain,
                                              CHUNK_TIL_VIDEO_END_CAP) / float(
                                                  CHUNK_TIL_VIDEO_END_CAP)
                except ZeroDivisionError:
                    # this should occur VERY rarely (1 out of 3000), should be a dash issue
                    # in this case we ignore the observation and roll back to an eariler one
                    if len(self.s_batch) == 0:
                        state = [np.zeros((S_INFO, S_LEN))]
                    else:
                        state = np.array(self.s_batch[-1], copy=True)

                # log wall_time, bit_rate, buffer_size, rebuffer_time, video_chunk_size, download_time, reward
                self.log_file.write(
                    str(time.time()) + '\t' +
                    str(VIDEO_BIT_RATE[post_data['lastquality']]) + '\t' +
                    str(post_data['buffer']) + '\t' +
                    str(rebuffer_time / M_IN_K) + '\t' +
                    str(video_chunk_size) + '\t' +
                    str(video_chunk_fetch_time) + '\t' + str(reward) + '\n')
                self.log_file.flush()

                # pick bitrate according to MPC
                # first get harmonic mean of last 5 bandwidths
                past_bandwidths = state[3, -5:]
                while past_bandwidths[0] == 0.0:
                    past_bandwidths = past_bandwidths[1:]
                #if ( len(state) < 5 ):
                #    past_bandwidths = state[3,-len(state):]
                #else:
                #    past_bandwidths = state[3,-5:]
                bandwidth_sum = 0
                for past_val in past_bandwidths:
                    bandwidth_sum += (1 / float(past_val))
                future_bandwidth = 1.0 / (bandwidth_sum / len(past_bandwidths))

                # future chunks length (try 4 if that many remaining)
                last_index = int(post_data['lastRequest'])
                future_chunk_length = MPC_FUTURE_CHUNK_COUNT
                if (TOTAL_VIDEO_CHUNKS - last_index < 4):
                    future_chunk_length = TOTAL_VIDEO_CHUNKS - last_index

                # all possible combinations of 5 chunk bitrates (9^5 options)
                # iterate over list and for each, compute reward and store max reward combination
                max_reward = -100000000
                best_combo = ()
                start_buffer = float(post_data['buffer'])
                #start = time.time()
                for full_combo in CHUNK_COMBO_OPTIONS:
                    combo = full_combo[0:future_chunk_length]
                    # calculate total rebuffer time for this combination (start with start_buffer and subtract
                    # each download time and add 2 seconds in that order)
                    curr_rebuffer_time = 0
                    curr_buffer = start_buffer
                    bitrate_sum = 0
                    smoothness_diffs = 0
                    last_quality = int(post_data['lastquality'])
                    for position in range(0, len(combo)):
                        chunk_quality = combo[position]
                        index = last_index + position + 1  # e.g., if last chunk is 3, then first iter is 3+0+1=4
                        download_time = (
                            get_chunk_size(chunk_quality, index) / 1000000.
                        ) / future_bandwidth  # this is MB/MB/s --> seconds
                        if (curr_buffer < download_time):
                            curr_rebuffer_time += (download_time - curr_buffer)
                            curr_buffer = 0
                        else:
                            curr_buffer -= download_time
                        curr_buffer += 4

                        # linear reward
                        #bitrate_sum += VIDEO_BIT_RATE[chunk_quality]
                        #smoothness_diffs += abs(VIDEO_BIT_RATE[chunk_quality] - VIDEO_BIT_RATE[last_quality])

                        # log reward
                        # log_bit_rate = np.log(VIDEO_BIT_RATE[chunk_quality] / float(VIDEO_BIT_RATE[0]))
                        # log_last_bit_rate = np.log(VIDEO_BIT_RATE[last_quality] / float(VIDEO_BIT_RATE[0]))
                        # bitrate_sum += log_bit_rate
                        # smoothness_diffs += abs(log_bit_rate - log_last_bit_rate)

                        # hd reward
                        bitrate_sum += BITRATE_REWARD[chunk_quality]
                        smoothness_diffs += abs(BITRATE_REWARD[chunk_quality] -
                                                BITRATE_REWARD[last_quality])

                        last_quality = chunk_quality
                    # compute reward for this combination (one reward per 5-chunk combo)
                    # bitrates are in Mbits/s, rebuffer in seconds, and smoothness_diffs in Mbits/s

                    # linear reward
                    #reward = (bitrate_sum/1000.) - (4.3*curr_rebuffer_time) - (smoothness_diffs/1000.)

                    # log reward
                    # reward = (bitrate_sum) - (4.3*curr_rebuffer_time) - (smoothness_diffs)

                    # hd reward
                    reward = bitrate_sum - (8 * curr_rebuffer_time) - (
                        smoothness_diffs)

                    if (reward > max_reward):
                        max_reward = reward
                        best_combo = combo
                # send data to html side (first chunk of best combo)
                send_data = 0  # no combo had reward better than -1000000 (ERROR) so send 0
                if (best_combo != ()):  # some combo was good
                    send_data = str(best_combo[0])

                end = time.time()
                #print "TOOK: " + str(end-start)

                end_of_video = False
                if (post_data['lastRequest'] == TOTAL_VIDEO_CHUNKS):
                    send_data = "REFRESH"
                    end_of_video = True
                    self.input_dict['last_total_rebuf'] = 0
                    self.input_dict['last_bit_rate'] = DEFAULT_QUALITY
                    self.input_dict['video_chunk_coount'] = 0
                    self.log_file.write(
                        '\n')  # so that in the log we know where video ends
                    lock_path = "./locks/video_" + opb(
                        self.input_dict['log_file_path']) + ".lock"
                    with open(lock_path, "w") as fp:
                        fp.close()
                    mlog(fnc="do_POST()",
                         msg="Created lock file: {}".format(opa(lock_path)))

                self.send_response(200)
                self.send_header('Content-Type', 'text/plain')
                self.send_header('Content-Length', len(send_data))
                self.send_header('Access-Control-Allow-Origin', "*")
                self.end_headers()
                self.wfile.write(send_data)
                if len(send_data) > 0:
                    mlog(fnc="do_POST()",
                         msg="Response to POST req: {}".format(send_data))

                # record [state, action, reward]
                # put it here after training, notice there is a shift in reward storage

                if end_of_video:
                    self.s_batch = [np.zeros((S_INFO, S_LEN))]
                else:
                    self.s_batch.append(state)
コード例 #21
0
def _setCompilerClass(rconfig):

    config = ndcDict(rconfig)
    f_standalone = None
    f_embed = None
    f_libembed = None
    f_parerr = None
    # f_parerr - Stop build on partialError that is Fatal to Compile
    # Let makeBuild finish so that user can fix partialErrs

    with open(config['CONFIGPATH'], 'r') as jbcr:
        config = ndcDict(json.load(jbcr))

    if gsBuild.Verbose or not gsBuild.INFO:
        log.info('\n Building from CONFIG:\n {}\n'.format(
            json.dumps(config, indent=4)))

    if not opex(config['MAINFILE']):
        try:
            raise IOError
        except IOError as ex:
            msg = 'File Filepath does Not exist:\n "{}"' \
                    .format(config['MAINFILE'])
            partialError(ex, msg)
            f_parerr = True

    if not f_parerr:
        log.FILE('Build Loaded: {}'.format(config['MAINFILE']))

    assemInfo = config['ASSEMBLY']
    if isinstance(assemInfo['standalone'], bool) or \
        str(assemInfo['standalone']).upper() in ['TRUE', 'FALSE']:
        f_standalone = True if str(assemInfo['standalone']).upper() == \
                                                            'TRUE' else False
    if isinstance(assemInfo['embed'], bool) or \
        str(assemInfo['embed']).upper() in ['TRUE', 'FALSE']:
        f_embed = True if str(assemInfo['embed']).upper() == 'TRUE' else False

    if isinstance(assemInfo['libembed'], bool) or \
        str(assemInfo['libembed']).upper() in ['TRUE', 'FALSE']:
        f_libembed = True if str(assemInfo['libembed']).upper() \
                        == 'TRUE' else False

    ext = '.dll'
    if config['MAKEEXE'] == True or \
        str(config['MAKEEXE']).upper() == 'TRUE':
        ext = '.exe'

    if f_standalone and not config['MAKEEXE']:
        log.warn('\n** Switching to exe /stanalone == true in Assembly:' + \
                 '\n {}\n   Overrides default or makeEXE input arg == False' \
                 .format(config['JSONPATH']))

    MAINOUT = opn(opj(config['OUTDIR'], ('.').join(opb(config['MAINFILE']) \
                      .split('.')[:-1])) + ext)
    IPATH = gsBuild.IPATH
    STDLIBSOURCE = opabs(opj(IPATH, 'StdLib.dll'))
    LIBPATH = opabs(opj(IPATH, 'Lib'))
    compiler.pycpath = (opn(opd(opabs(gsBuild.IPYBLDPATH)))) + '\pyc.py'
    compiler.stdlibsource = STDLIBSOURCE
    compiler.ipath = IPATH
    compiler.libpath = LIBPATH

    if not op.isfile(STDLIBSOURCE):
        _createStdLib()

    MAINOUTDIR = ('.').join(MAINOUT.split('.')[:-1])
    PYCDIR = opn(opj(os.getcwd(), opb(MAINOUTDIR)) + ext)
    STDLIBRELEASE = opj(opd(MAINOUTDIR), 'StdLib.dll')
    MAINFILE = config['MAINFILE']
    isLib = opex(LIBPATH)
    isStdLib = op.isfile(STDLIBSOURCE)
    haveStdLib = op.isfile(opj(os.getcwd(), 'StdLib.dll'))
    isReleasedStdLib = op.isfile(STDLIBRELEASE)

    lstdll = []
    if config['LISTFILES']['dll']:
        if isinstance(config['LISTFILES']['dll'], list):
            for lfile in config['LISTFILES']['dll']:
                if lfile and '__init__' not in lfile:
                    lstdll.append(lfile)
        else:
            lstdll.append(config['LISTFILES']['dll'])

    lstexe = []
    if config['LISTFILES']['exe']:
        if isinstance(config['LISTFILES']['exe'], list):
            for xfile in config['LISTFILES']['exe']:
                if xfile and '__init__' not in xfile:
                    lstexe.append(xfile)
        else:
            lstexe.append(config['LISTFILES']['exe'])

        lstexe = nullList(lstexe)

    compiler.f_standalone = f_standalone
    compiler.f_embed = f_embed
    compiler.f_libembed = f_libembed
    compiler.f_parerr = f_parerr
    compiler.mainout = MAINOUT
    compiler.ipath = IPATH
    compiler.mainoutdir = MAINOUTDIR
    compiler.pycdir = PYCDIR
    compiler.stdlibrelease = STDLIBRELEASE
    compiler.stdlibsource = STDLIBSOURCE
    compiler.libpath = LIBPATH
    compiler.mainfile = MAINFILE
    compiler.isLib = isLib
    compiler.isStdLib = isStdLib
    compiler.haveStdLib = haveStdLib
    compiler.isReleasedStdLib = isReleasedStdLib
    compiler.lstdll = lstdll
    compiler.lstexe = lstexe
    compiler.ext = ext
    compiler.lstexedlls = None

    if not opex(opd(compiler.pycdir)):
        raise IOError('FilePath {}:\t Use absolute or relative to:\n\t {}' \
                      .format(opd(compiler.pycdir), os.getcwd()))
    if compiler.f_standalone:
        if gsBuild.Verbose or not gsBuild.INFO:
            log.info('\nNew {} compile standalone from:\n {}' \
                            .format(ext.upper().replace('.', ''),
                                    config['MAINFILE']))
    else:
        mfn = 'application/lib'
        if config['MAINFILE']:
            mfn = opb(config['MAINFILE'])
        if gsBuild.Verbose or not gsBuild.INFO:
            log.info(("\nNew {} compile from: \n {}" + \
                      "\n\tAs Required: add your {}, project, and ironpython"+ \
                      "\n\tdll(s) to path:\n\t{}\n\n")
                     .format(ext.upper().replace('.', ''),
                             config['MAINFILE'], mfn,
                             config['OUTDIR']))

    if gsBuild.Verbose or not gsBuild.INFO:
        log.info('\n Lib source path {}'.format(LIBPATH))
        log.info('\n "IF" set "True", f_libembed adds ~23mb to file:' + \
                 'now set as {}'.format(compiler.f_libembed))

    if compiler.f_libembed and compiler.isStdLib:
        if gsBuild.Verbose or not gsBuild.INFO:
            if compiler.isReleasedStdLib:
                log.info('\nOK - "StdLib.dll" exists delete'+ \
                         ' or move to update:\n{}'.format(STDLIBRELEASE))
            else:
                log.info('\nOK - "StdLib.dll" exists delete'+ \
                         ' or move to update:\n{}'.format(STDLIBSOURCE))

    elif not compiler.isStdLib and compiler.f_libembed and \
        not compiler.isReleasedStdLib and compiler.isLib:

        _createStdLib()

    if not compiler.isStdLib:
        raise NotImplementedError('StdLib: Need ironpython2.7 distribution' + \
                                  ' in something like ../ironpython path')
コード例 #22
0
def createUserLibs(config):
    '''
       Loads files from user arg "listdll".
       If .py file creates .dll then/else adds
       created or listdll .dll file to to compiler config.dlls.

       If assembly f_embed or f_standalone:
           is true:
             Agglomerates all dll libraries into one dll
             that is embeded and then removed from
             user arg "outDir" or or auto-named outdir.
           else:
               Add each lib .dll file to "outDir" or outdir.

    '''

    dllNames = []
    if not compiler.lstdll:
        return []

    dllName = None
    gb = []

    if isinstance(compiler.lstdll, list):
        for resfile in compiler.lstdll:
            if '.py' not in resfile:
                #skip compile
                if '.dll' in resfile:
                    dllNames.append(resfile)
                continue

            if resfile and '.py' in resfile:
                dllName = opj(opd(compiler.mainoutdir),
                              opb(resfile).replace('.py', ''))

                dllNames.append(dllName + '.dll')

                gb.append(resfile)

            if not compiler.f_embed and not compiler.f_standalone:

                gb.extend(_getAssembly(config))
                gb.append("/out:" + dllName)
                gb.append("/target:dll")
                gb = nullList(gb)

                ipystr = [compiler.ipath + '/ipy'] + [compiler.pycpath] + gb
                #                for gbs in gb:
                #                    log.warn('\ndll gbs {}'.format(gbs))

                _subprocPYC(ipystr, dllName, '.dll')
                gb = []
                continue

#        log.error('\n conpiled dllNames:\n {}'.format(dllNames))

# make one lib dll to embed
        if compiler.f_embed or compiler.f_standalone:

            dllNames = []
            gb.extend(_getAssembly(config))

            dllName = opj(opd(compiler.mainoutdir),
                          ('.').join(compiler.mainout.split('.')[:-1]) + 'DLL')

            dllNames.append(dllName + '.dll')

            gb.append("/out:" + dllName)
            gb.append("/target:dll")
            gb = nullList(gb)

            ipystr = [compiler.ipath + '/ipy'] + [compiler.pycpath] + gb
            _subprocPYC(ipystr, dllName, '.dll')


#        if not dllName:
#            dllName = opj(opd(compiler.mainoutdir),
#                      ('.').join(compiler.mainout.split('.')[:-1])) + 'DLL.dll'
#        log.error('\n returing dllnames {}'.format(dllNames))
        return dllNames
    return None
コード例 #23
0
ファイル: logging.py プロジェクト: wabbo69/xbmc
def LogCaller():
    fi = getframeinfo(currentframe().f_back.f_back)
    msg = '[{}] Called from: {}:{}'.format(g.__plugin__, opb(fi.filename),
                                           fi.lineno)
    xbmc.log(msg, Log.INFO)
コード例 #24
0
        def do_POST(self):
            content_length = int(self.headers['Content-Length'])
            post_data = json.loads(self.rfile.read(content_length))
            
            mlog(fnc="do_POST()", msg="POST req data: Last request - {}, Last quality - {}, Rebuffer Time - {}".format(
                post_data['lastRequest'], post_data['lastquality'], float(post_data['RebufferTime'] - self.input_dict['last_total_rebuf'])))
            send_data = ""

            if ( 'pastThroughput' in post_data ):
                # @Hongzi: this is just the summary of throughput/quality at the end of the load
                # so we don't want to use this information to send back a new quality
                mlog(fnc="do_POST()", msg="Past throughput is present in post_data, \
                        not using this information to send back quality")
            else:
                # option 1. reward for just quality
                # reward = post_data['lastquality']
                # option 2. combine reward for quality and rebuffer time
                #           tune up the knob on rebuf to prevent it more
                # reward = post_data['lastquality'] - 0.1 * (post_data['RebufferTime'] - self.input_dict['last_total_rebuf'])
                # option 3. give a fixed penalty if video is stalled
                #           this can reduce the variance in reward signal
                # reward = post_data['lastquality'] - 10 * ((post_data['RebufferTime'] - self.input_dict['last_total_rebuf']) > 0)

                # option 4. use the metric in SIGCOMM MPC paper
                rebuffer_time = float(post_data['RebufferTime'] -self.input_dict['last_total_rebuf'])

                # --linear reward--
                reward = VIDEO_BIT_RATE[post_data['lastquality']] / M_IN_K \
                        - REBUF_PENALTY * rebuffer_time / M_IN_K \
                        - SMOOTH_PENALTY * np.abs(VIDEO_BIT_RATE[post_data['lastquality']] -
                                                  self.input_dict['last_bit_rate']) / M_IN_K

                # --log reward--
                # log_bit_rate = np.log(VIDEO_BIT_RATE[post_data['lastquality']] / float(VIDEO_BIT_RATE[0]))   
                # log_last_bit_rate = np.log(self.input_dict['last_bit_rate'] / float(VIDEO_BIT_RATE[0]))

                # reward = log_bit_rate \
                #          - 4.3 * rebuffer_time / M_IN_K \
                #          - SMOOTH_PENALTY * np.abs(log_bit_rate - log_last_bit_rate)

                # --hd reward--
                # reward = BITRATE_REWARD[post_data['lastquality']] \
                #         - 8 * rebuffer_time / M_IN_K - np.abs(BITRATE_REWARD[post_data['lastquality']] - BITRATE_REWARD_MAP[self.input_dict['last_bit_rate']])

                self.input_dict['last_bit_rate'] = VIDEO_BIT_RATE[post_data['lastquality']]
                self.input_dict['last_total_rebuf'] = post_data['RebufferTime']

                # retrieve previous state
                if len(self.s_batch) == 0:
                    state = [np.zeros((S_INFO, S_LEN))]
                else:
                    state = np.array(self.s_batch[-1], copy=True)

                # compute bandwidth measurement
                video_chunk_fetch_time = post_data['lastChunkFinishTime'] - post_data['lastChunkStartTime']
                video_chunk_size = post_data['lastChunkSize']

                # compute number of video chunks left
                video_chunk_remain = TOTAL_VIDEO_CHUNKS - self.input_dict['video_chunk_coount']
                self.input_dict['video_chunk_coount'] += 1

                # dequeue history record
                state = np.roll(state, -1, axis=1)

                next_video_chunk_sizes = []
                for i in xrange(A_DIM):
                    next_video_chunk_sizes.append(get_chunk_size(i, self.input_dict['video_chunk_coount']))

                # this should be S_INFO number of terms
                try:
                    state[0, -1] = VIDEO_BIT_RATE[post_data['lastquality']] / float(np.max(VIDEO_BIT_RATE))
                    state[1, -1] = post_data['buffer'] / BUFFER_NORM_FACTOR
                    state[2, -1] = float(video_chunk_size) / float(video_chunk_fetch_time) / M_IN_K  # kilo byte / ms
                    state[3, -1] = float(video_chunk_fetch_time) / M_IN_K / BUFFER_NORM_FACTOR  # 10 sec
                    state[4, :A_DIM] = np.array(next_video_chunk_sizes) / M_IN_K / M_IN_K  # mega byte
                    state[5, -1] = np.minimum(video_chunk_remain, CHUNK_TIL_VIDEO_END_CAP) / float(CHUNK_TIL_VIDEO_END_CAP)
                except ZeroDivisionError:
                    # this should occur VERY rarely (1 out of 3000), should be a dash issue
                    # in this case we ignore the observation and roll back to an eariler one
                    if len(self.s_batch) == 0:
                        state = [np.zeros((S_INFO, S_LEN))]
                    else:
                        state = np.array(self.s_batch[-1], copy=True)

                # log wall_time, bit_rate, buffer_size, rebuffer_time, video_chunk_size, download_time, reward
                self.log_file.write(str(time.time()) + '\t' +
                                    str(VIDEO_BIT_RATE[post_data['lastquality']]) + '\t' +
                                    str(post_data['buffer']) + '\t' +
                                    str(rebuffer_time / M_IN_K) + '\t' +
                                    str(video_chunk_size) + '\t' +
                                    str(video_chunk_fetch_time) + '\t' +
                                    str(reward) + '\n')
                self.log_file.flush()

                action_prob = self.actor.predict(np.reshape(state, (1, S_INFO, S_LEN)))
                action_cumsum = np.cumsum(action_prob)
                bit_rate = (action_cumsum > np.random.randint(1, RAND_RANGE) / float(RAND_RANGE)).argmax()
                # Note: we need to discretize the probability into 1/RAND_RANGE steps,
                # because there is an intrinsic discrepancy in passing single state and batch states

                # send data to html side
                send_data = str(bit_rate)

                end_of_video = False
                if ( post_data['lastRequest'] == TOTAL_VIDEO_CHUNKS ):
                    send_data = "REFRESH"
                    end_of_video = True
                    self.input_dict['last_total_rebuf'] = 0
                    self.input_dict['last_bit_rate'] = DEFAULT_QUALITY
                    self.input_dict['video_chunk_coount'] = 0
                    self.log_file.write('\n')  # so that in the log we know where video ends
                    lock_path = "./locks/video_" + opb(self.input_dict['log_file_path']) + ".lock"
                    with open(lock_path, "w") as fp:
                        fp.close()
                    mlog(fnc="do_POST()", msg="Created lock file: {}".format(opa(lock_path)))

                self.send_response(200)
                self.send_header('Content-Type', 'text/plain')
                self.send_header('Content-Length', len(send_data))
                self.send_header('Access-Control-Allow-Origin', "*")
                self.end_headers()
                self.wfile.write(send_data)
                if len(send_data) > 0:
                    mlog(fnc="do_POST()", msg="Response to POST req: {}".format(send_data))

                # record [state, action, reward]
                # put it here after training, notice there is a shift in reward storage

                if end_of_video:
                    self.s_batch = [np.zeros((S_INFO, S_LEN))]
                else:
                    self.s_batch.append(state)
コード例 #25
0
 def _make_classes(self, folder):
     return opb(folder).split('_')[-1]
コード例 #26
0
def LogCaller():
    fi = getframeinfo(currentframe().f_back.f_back)
    msg = '[{}] Called from: {}:{}'.format(g.__plugin__, opb(fi.filename), fi.lineno)
    xbmc.log(py2_encode(msg), xbmc.LOGNOTICE)