示例#1
0
def simulate(cache, trace):
    global misses
    global hits
    for line in trace:
        splitLine = line.split()
        if (len(splitLine) == 3):
            trash, op, address = splitLine
            if op == 'R':
                result = cache.read(address)
                if (result == 0):
                    misses += 1
                    cache.load(address)
                    cache.read(address)
                else:
                    hits += 1

            else:
                result = cache.write(address)
                if (result == 0):
                    misses += 1
                    cache.load(address)
                    cache.write(address)
                else:
                    hits += 1
    print_results(misses, hits)
    def do_POST(self):
        # Check password and auth Header
        if not ((not cache.read(['config'])['secure-api'])
                or self.path == '/http_upload' or
                (self.headers['X-API-Auth'] in [
                    cache.read(['config'])['password'],
                    cache.read(['config'])['master_key']
                ] if 'X-API-Auth' in self.headers.keys() else False)):
            self.send_response(500)
            self.end_headers()
            self.wfile.write(bytes('Not Authenticated.', 'utf-8'))
            return
        status = 200
        body = self.rfile.read(int(self.headers['Content-Length']))
        out = b'OK'
        try:  # try to execute a function from post_routes.py that has the same name as the first sub-directory
            function = getattr(post_routes, self.path[1:])
            try:
                out = function(multidecode(body.decode()))
            except:
                out = function(self.headers, body)

        except Exception as e:  # the loaded function fails:
            status = 500  # set the correct status code
            out = traceback.format_exc()  # return the traceback
            print(out)
        self.send_response(status)
        self.end_headers()
        self.wfile.write(bytes(str(out), 'utf-8'))
def http_upload(*args):
    try:
        if args[0] == {}:
            crash()

        correction = 8
        if 'png' in str(args[1][:400]).lower():
            correction = 7
        elif 'JFIF' in str(args[1][:400]):
            correction = 8
        img = args[1]

        password = str(args[1]).split(
            '''name="password"\\r\\n\\r\\n''')[1].split('\\r\\n------')[0]

        filename = str(
            img)[str(img).index('filename="'):str(img).index('filename="') +
                 50]
        filename = filename[filename.index('"') + 1:]
        filename = filename[:filename.index('"')]
        img = img[str(img).index('image/') + correction:]
        img = img[:str(img).index('------')]
        if not cache.read(['config'])['secure-api'] or password == cache.read(
            ['config'])['password']:
            with open('../Frontend/uploads/' + filename, 'wb') as f:
                f.write(img)
            return ''
        else:
            return '<script>alert("Authentication eror")</script>'
    except ValueError:
        return '<script>alert("Generic error")</script>'
def grid_settings(*args):
    try:
        node_template = """<div class="node"><div class="node-content"></div><div class="node-content hidden"></div></div>""".replace(
            '"', '\'')
        return "[" + str(cache.read(['grid', 'rows'])) + "," + str(
            cache.read(['grid', 'columns'])) + "," + str(
                len(cache.read(['update_delay'
                                ]))) + ",\"" + node_template + "\"]"
    except Exception as e:
        print(traceback.format_exc())
    def do_GET(self):
        # self.send_header("Access-Control-Allow-Origin", "*")
        out = bytes('Generic Error/ Not authenticated', 'utf-16')
        # auto register first UUID
        if not cache.read(['config', 'secure-gets']):
            pass
        elif 'X-API-Auth' in self.headers.keys(
        ) and self.headers['X-API-Auth'] in [
                cache.read(['config'])['password'],
                cache.read(['config'])['master_key']
        ]:
            pass
        elif not cache.read(['auth_uuid']):
            args = dictify(self.path.split('?')[1:])
            if 'auth' in args.keys():
                cache.write(['auth_uuid'], args['auth'])

        status = 200
        try:  # try to execute a function from get_routes.py that has the same name as the first sub-directory
            if not self.path[1:].split('?')[0].startswith('_'):
                function = getattr(
                    get_routes,
                    self.path[1:].split('?')[0])  # gets function from module
                data = {}
                if len(self.path.split(
                        '?')) > 1:  # if there are GET parameters:
                    data = dictify(
                        self.path.split('?')[1:])  # convert GETs to dict
                if not cache.read(['config', 'secure-gets']) or (
                        'auth' in data.keys()
                        and cache.read(['auth_uuid']) == data['auth']) or (
                            'X-API-Auth' in self.headers.keys()
                            and self.headers['X-API-Auth'] in [
                                cache.read(['config'])['password'],
                                cache.read(['config'])['master_key']
                            ]):
                    out = function(data)
                    out = bytes(
                        str(out),
                        'utf-16')  # convert string to returnable format
            else:
                raise AttributeError('cannot call function starting with _')
        except AttributeError:  # No dynamic route found, try loading a static file
            try:
                with open('../Frontend/' + self.path[1:],
                          'rb') as f:  # open the file
                    out = f.read()  # and read the content
            except:  # there is no file?
                status = 404  # set the correct status code
                out = bytes(
                    "No dynamic or static route is linked to this path",
                    'utf-16')  # Notify the user
        except Exception as e:  # the loaded function fails:
            status = 500  # set the correct status code
            out = bytes(traceback.format_exc(),
                        'utf-16')  # return the traceback

        self.send_response(status)
        self.end_headers()
        self.wfile.write(out)
示例#6
0
def lookup(searchDate):
    slug = 'giantbomb-'+searchDate+'-v11'
    cacheContent = cache.read(slug)
    if cacheContent != None:
        return cacheContent
    print("Hitting the GiantBomb API for "+searchDate)
    query = "http://www.giantbomb.com/api/releases/?api_key={}&format=json&filter=release_date:{},region:1".format(
        config.get().GiantBombApiKey,
        searchDate)
    headers = {
        'User-Agent': 'Olava Data Grabber',
        'From': '*****@*****.**'
    }
    # rate limit to 1 query per second
    time.sleep(1)
    response = requests.get(
        query,
        headers=headers
    )
    if response == None:
        print("Response wasn't populated for [{}].", query)
        import pprint
        pprint.pprint(vars(response))
        return {}
    entries = {}
    try:
        entries = response.json()
    except ValueError as e:
        print("An error occurred while grabbing JSON.")
        import pprint
        pprint.pprint(vars(response))
        return {}
    cache.write(slug, entries)
    return entries
示例#7
0
def buildMarkdown():
    log.log('\n> Retrieving community metrics...')
    startTime = time()

    settings = getSettings()
    markdown = ''

    if settings['dev_mode'] == True:
        log.log('\t... done! (using a cached copy)')
        return cache.read('community_metrics.txt')
    

    if settings['sidebar']['social']['ts_enabled']:
        teamspeakMd = getTeamspeakUsersMarkdown(settings)
    else:
        log.log('\t\REDTeamspeak metrics disabled.')
        teamspeakMd = None
    
    if settings['sidebar']['social']['irc_enabled']:
        ircMd = getIrcUsersMarkdown(settings)
    else:
        log.log('\t\REDIRC metrics disabled.')
        ircMd = None

    if settings['sidebar']['social']['discord_enabled']:
        disMd = getDiscordUsersMarkdown(settings)
    else:
        log.log('\t\REDDiscord metrics disabled.')
        disMd = None


    if teamspeakMd is not None:
        markdown += teamspeakMd + '\n'
    if ircMd is not None:
        markdown += ircMd + '\n'
    if disMd is not None:
        markdown += disMd

    # markdown = teamspeakMd + '\n' + ircMd + '\n' + disMd
    # markdown = teamspeakMd + '\n' + ircMd + '\n' + ' '
    cache.save('community_metrics.txt', markdown)

    elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
    log.log('\GREENDone retrieving community metrics. %s \n' % elapsedTime)

    return markdown
示例#8
0
def call_api_with_cache(user_id, method, method_name,parameters={}):
    # first check the cache
    if cache.has(method_name,user_id):
        logger.debug("%s %s exists in cache." % (method_name, user_id))
        return cache.read(method_name,user_id)
    else:
    # if not in cache call the API
        logger.debug("%s %s does not exists in cache. Will retrieve it from web." % (method_name, user_id))
        try:
            data = call_api(method,
                            dict([(id_or_sn(user_id),user_id)]
                            + parameters.items()))
            cache.write(method_name,user_id,data)
            return data
        except TwitterHTTPError as e:
            logger.error(e)
            #hack to prevent crawling this
            return {'error': e}
示例#9
0
def call_api_with_cache(user_id, method, method_name, parameters={}):
    # first check the cache
    if cache.has(method_name, user_id):
        logger.debug("%s %s exists in cache." % (method_name, user_id))
        return cache.read(method_name, user_id)
    else:
        # if not in cache call the API
        logger.debug(
            "%s %s does not exists in cache. Will retrieve it from web." %
            (method_name, user_id))
        try:
            data = call_api(
                method,
                dict([(id_or_sn(user_id), user_id)] + parameters.items()))
            cache.write(method_name, user_id, data)
            return data
        except TwitterHTTPError as e:
            logger.error(e)
            #hack to prevent crawling this
            return {'error': e}
示例#10
0
def game_info(gameApiUrl):
    slug = gameApiUrl.replace("http://www.", '')
    slug = gameApiUrl.replace('/', '-')
    slug += 'v11'

    cacheContent = cache.read(slug)
    if cacheContent != None:
        return cacheContent

    print("Hitting GiantBomb API for "+slug)
    query = gameApiUrl+"?api_key={}&format=json".format(
        config.get().GiantBombApiKey)
    headers = {
        'User-Agent': 'Olava Data Grabber',
        'From': '*****@*****.**'
    }
    # rate limit to 1 query per second
    time.sleep(1)
    response = requests.get(
        query,
        headers=headers
    )
    if response == None:
        print("Response wasn't populated for [{}].", query)
        import pprint
        pprint.pprint(vars(response))
        return {}
    game = {}
    try:
        game = response.json()
    except ValueError as e:
        print("An error occurred while grabbing JSON.")
        import pprint
        pprint.pprint(vars(response))
        return {}
    cache.write(slug, game)
    return game
示例#11
0
    startTime = time()
    log.log('> Uploading livestreams spritesheet...')
    try:
        r.subreddit(settings['subreddit']).stylesheet.upload(
            settings['sidebar']['livestreams']['spritesheet_name'],
            livestreams['spritesheet_path'])
    except praw.exceptions.APIException as e:
        print(e)
    log.log('\t\GREEN...done! \BLUE(%s s)\n' %
            str(round(time() - startTime, 3)))

# Get the PRAW subreddit object
subreddit = r.subreddit(settings['subreddit'])

# Upload the new sidebar markdown if it's any different
if cache.read('sidebar_markdown.txt') != sidebar:
    startTime = time()
    log.log('> Uploading sidebar markdown...')
    subreddit.mod.update(description=sidebar)
    cache.save('sidebar_markdown.txt', sidebar)
    log.log('\t\GREEN...done! \BLUE(%s s)\n' %
            str(round(time() - startTime, 3)))
else:
    log.log('Not uploading sidebar -- it hasn\'t changed!')

# Upload the new stylesheet
# (ALWAYS! Any image changes rely on this being uploaded)
if stylesheet != None:
    startTime = time()
    log.log('> Uploading stylesheet...')
    subreddit.stylesheet.update(stylesheet=stylesheet)
示例#12
0
def read_from_cache(the_id):
    global CACHE_NAME
    lines = cache.read(CACHE_NAME)
    if lines is not None and lines[0].strip() == the_id:
        return lines[1]
    return None
示例#13
0
def read_from_cache(the_id):
    global CACHE_NAME
    lines = cache.read(CACHE_NAME)
    if lines is not None and lines[0].strip() == the_id:
        return lines[1]
    return None
def windowtitle(*args):
    return cache.read(['title'])
示例#15
0
文件: light_loss.py 项目: ntim/famous
    theta = gps.angleThetaMax
    phi = gps.anglePhiMax
    #
    return n, theta, phi, pos

n = np.zeros(len(files))
theta = np.zeros(len(files))
phi = np.zeros(len(files))
n_pix = np.zeros(len(files))
n_tot = np.zeros(len(files))
direction = np.zeros((len(files), 2))
center_id = [None] * len(files)

# Cache
if os.path.exists(path + "/cache_light_loss.npy"):
    cached_files, n, theta, phi, n_pix, n_tot, direction, center_id = cache.read(path + "/cache_light_loss.npy", (files, n, theta, phi, n_pix, n_tot, direction, center_id))
    if np.array_equal(cached_files, files):
        print "Using cache ..."
        files = []

for i, filename in enumerate(files):
    print i
    n[i], theta[i], phi[i], points = read(filename)
    n_tot[i] = len(points)
    # Determine center pixel from angular coordinates
    direction[i] = famousModel.ang_to_cart(theta[i], phi[i])
    center_id[i] = find(direction[i], pixels)
    # If simulation fell into an pixel, continue
    if center_id[i] == None:
        n_tot[i] = np.nan
        continue
示例#16
0
def buildMarkdown():
    log.log('> Beginning to build the matchticker...')
    startTime = time()

    settings = getSettings()

    if settings['dev_mode'] == True:
        log.log('\t...done! (using a cached copy)')
        return cache.read('matchticker.txt')

    if 'api_key' not in settings or 'gosugamers' not in settings['api_key']:
        log.error('No GosuGamers API key -- cannot build matchticker.')
        return ''

    # Get the stream information
    try:
        api_url = ''
        req = requests.get(api_url % settings['api_key']['gosugamers'])
    except requests.exceptions.RequestException as e:
        elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
        log.error('From GosuGamers API: %s %s' % (str(e), elapsedTime), 1)
        return ''
    if req.status_code == 403 or not req.ok or 'IP Address Not Allowed' in str(
            req.content):
        elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
        log.error('GosuGamers rejected our IP ' + elapsedTime, 1)
        return blankTicker(startTime)
    try:
        upcomingMatches = req.json()['matches']
    except Exception as e:
        elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
        log.error(
            'Issue with GosuGamers API JSON: %s %s' % (str(e), elapsedTime), 1)
        return ''

    # Matches to display
    matches = []
    gamesToGrab = 0

    if len(upcomingMatches) == 0:
        return blankTicker(startTime)

    if len(upcomingMatches) < settings['sidebar']['matchticker']['max_shown']:
        gamesToGrab = len(upcomingMatches)
    else:
        gamesToGrab = settings['sidebar']['matchticker']['max_shown']
    for i in range(0, gamesToGrab):
        matches.append({
            'tourny':
            prepareTournyTitle(upcomingMatches[i]['tournament']['name']),
            'team1': {
                'name':
                str(upcomingMatches[i]['firstOpponent']['shortName']),
                'cc':
                str(upcomingMatches[i]['firstOpponent']['country']
                    ['countryCode']).lower()
            },
            'team2': {
                'name':
                str(upcomingMatches[i]['secondOpponent']['shortName']),
                'cc':
                str(upcomingMatches[i]['secondOpponent']['country']
                    ['countryCode']).lower()
            },
            'time':
            getMatchTime(upcomingMatches[i]['datetime']),
            'url':
            upcomingMatches[i]['pageUrl'],
            'is_live':
            bool(upcomingMatches[i]["isLive"])
        })
    # Build the markdown
    matchtickerMd = ''
    matchMdTemplate = ('>>>\n'
                       '[~~__TOURNY__~~\n'
                       '~~__TIME__~~\n'
                       '~~__TEAM1__~~\n'
                       '~~__TEAM2__~~](__URL__#info)\n'
                       '[ ](#lang-__LANG1__)\n'
                       '[ ](#lang-__LANG2__)\n\n'
                       '>>[](#separator)\n\n')
    matchtickerMd = '[*Match Ticker*](#heading)\n\n'
    i = 0
    for match in matches:
        matchMd = matchMdTemplate
        matchMd = (matchMd.replace('__TOURNY__', match['tourny']).replace(
            '__TIME__',
            match['time']).replace('__TEAM1__',
                                   match['team1']['name']).replace(
                                       '__TEAM2__',
                                       match['team2']['name']).replace(
                                           '__LANG1__',
                                           match['team1']['cc']).replace(
                                               '__LANG2__',
                                               match['team2']['cc']).replace(
                                                   '__URL__', match['url']))
        matchtickerMd += matchMd
        i += 1
    matchtickerMd += '>>**[See all](http://bit.ly/1xGEuiJ#button#slim)**'

    cache.save('matchticker.txt', matchtickerMd)

    characters = '\YELLOW(%d characters)' % len(matchtickerMd)
    elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
    log.log('\t\GREEN...done! %s %s \n' % (characters, elapsedTime))

    return matchtickerMd