Ejemplo n.º 1
0
 def test_save_cache(self):
     self.news_article_2.pub_date = 'Fri, 23 Nov 2019 15:47:25 -0500'
     news_articles = [self.news_article_1, self.news_article_2]
     with patch('cache.read_from_file') as mocked_read:
         save_cache(news_articles, self.url)
         assert call('20191122') and call(
             '20191123') in mocked_read.mock_calls
Ejemplo n.º 2
0
def main(argv):
    global CACHE
    dest_dir = '/home/killbots/killbots.net/random/'
    if (len(argv) > 1):
        dest_dir = argv[1]
    CACHE = os.path.join(dest_dir, CACHE)

    data = cache.load_cache(CACHE)
    latest_update = get_latest_update()
    url_suffix = latest_update[0]
    time = latest_update[1:]
    places = [
        'Kita Ibaraki City', 'Takahagi City', 'Daigo Town', 'KEK', 'AIST (3F)',
        'AIST (Carpark)'
    ]
    try:
        data = get_levels(url_suffix, data)
    except:
        traceback.print_exc()
    try:
        data = get_kek(data)
    except:
        traceback.print_exc()
    try:
        data = get_aist(data)
    except:
        traceback.print_exc()
    cache.save_cache(data, CACHE)
    plot_data(places, dest_dir)
Ejemplo n.º 3
0
def update_cache(view):
    projects_cache = cache.projects_cache
    file_key, project_key = cache.get_keys(view)
    # if there is no project_key set the project_key as the file_key
    # so that we can cache on a per file basis
    if not project_key:
        project_key = file_key
    if project_key in projects_cache:
        _cache = projects_cache[project_key]
    else:
        _cache = {}

    for symbol in commands.symbol_dict:
        if '_command' in symbol:
            continue
        if symbol not in _cache:
            _cache[symbol] = {}
        _completions = completions.get_view_completions(view, symbol)
        if _completions:
            _cache[symbol][file_key] = _completions
        elif not _cache[symbol]:
            _cache.pop(symbol, None)
    if _cache:
        projects_cache[project_key] = _cache
        cache.save_cache()
Ejemplo n.º 4
0
def main():
    """Entry point for RSS reader"""
    try:
        args = get_args()
        if args.verbose:
            logging.basicConfig(level=logging.INFO,
                                format='%(asctime)s %(message)s')

        if not args.date:
            response = check_response(go_for_rss(args.source))
            news_articles = xml_parser(response, args.limit)
            save_cache(news_articles, args.source)
        else:
            news_articles = read_cache(args.date, args.source, args.limit)

        if args.to_html or args.to_pdf:
            converter(news_articles, args.to_html, args.to_pdf)
        else:
            result = output_format(news_articles, args.json)
            print_result(result, args.limit)
    except CacheNotFoundError as ex:
        print(ex.__doc__)
    except GoForRssError as ex:
        print(ex.__doc__)
    except WrongResponseTypeError as ex:
        print(ex.__doc__)
    except NoDataToConvertError as ex:
        print(ex.__doc__)
Ejemplo n.º 5
0
def main(argv):
    global CACHE
    dest_dir = '/home/killbots/killbots.net/random/'
    if (len(argv) > 1):
        dest_dir = argv[1]
    CACHE = os.path.join(dest_dir, CACHE)

    data = cache.load_cache(CACHE)
    latest_update = get_latest_update()
    url_suffix = latest_update[0]
    time = latest_update[1:]
    places = ['Kita Ibaraki City', 'Takahagi City', 'Daigo Town', 'KEK',
            'AIST (3F)', 'AIST (Carpark)']
    try:
        data = get_levels(url_suffix, data)
    except:
        traceback.print_exc()
    try:
        data = get_kek(data)
    except:
        traceback.print_exc()
    try:
        data = get_aist(data)
    except:
        traceback.print_exc()
    cache.save_cache(data, CACHE)
    plot_data(places, dest_dir)
Ejemplo n.º 6
0
def update_data():
    f = open(LATEST_TXT, 'r')
    raw = unicode(f.read(), 'utf-8')
    f.close()

    start_line = re.search(r'\d{3}\s\d{1,2}:\d{1,2}', raw)
    end_line = re.search(r'\d+km', raw[start_line.end():])

    raw = raw[start_line.end():end_line.start() + start_line.end()]

    date = re.search(r'^(?P<mon>\d)(?P<day>\d{1,2})', raw, re.M)
    ts = re.search(r'^\s?(?P<hour>\d{1,2}):(?P<min>\d{2})', raw, re.M)
    raw = raw[ts.end():].strip()

    data = cache.load_cache(CACHE)
    ts = datetime.datetime(2011, int(date.group('mon')),
            int(date.group('day')), int(ts.group('hour')),
            int(ts.group('min')))
    cells = [m[0] for m in re.findall(r'((\d{1,2}.\d{1,2})|-)? ?', raw)]
    if len(cells) > 12:
        # Hack because we can't use a variable-length lookbehind assertion
        # in the regex, so we almost always get an empty extra cell
        cells = cells[:12]
    while len(cells) < 12:
        cells.append('')
    for ii, c in enumerate(cells):
        if c.strip() == '':
            continue
        try:
            float(c.strip())
        except ValueError:
            continue
        data.set_value(ts, ii, c.strip())
    cache.save_cache(data, CACHE)
Ejemplo n.º 7
0
    def sign_in(self, username, password):
        self.invalidate_token(
        )  # don't pass a user token when we're trying to sign in
        result = self.make_api_call(
            "/o/token/",
            data="grant_type=password&username=%s&password=%s" %
            (username, password),
            method="POST",
            data_type="application/x-www-form-urlencoded")

        if "access_token" in result:
            cache_data = cache.get_cache()
            cache_data["token"] = result["access_token"]
            cache_data["token_type"] = result["token_type"]
            cache.save_cache()
        return result
Ejemplo n.º 8
0
def load_txs():
    cache = load_cache()

    if 'last_load' in cache:
        logging.info("Last load was at %s", cache['last_load'])
        if cache['last_load'] + load_interval_max > datetime.utcnow():
            return cache['txs']

    last_tx_date = None
    for tx in cache['txs'].values():
        if not last_tx_date or tx.data['date'] > last_tx_date:
            last_tx_date = tx.data['date']

    logging.info("Latest tx cached from %s", last_tx_date)
    load_back = load_back_initial
    if last_tx_date and date.today() - last_tx_date < load_back_incremental:
        load_back = load_back_incremental
    now = date.today()
    back = now - load_back
    logging.info("Loading from %s to %s", back, now)
    acc = hbci.get_account()
    conn = hbci.get_connection()
    txs = conn.get_transactions(acc, back, now)
    new = 0
    shas_this = {}
    for tx in txs:
        sha = tx_id(tx)
        if sha not in cache['txs']:
            cache['txs'][sha] = tx
            new += 1
        if sha in shas_this:
            other = shas_this[sha]
            logging.warning("Duplicate SHA %s! \n old %s\n   vs\n new %s", sha,
                            pformat(other.data), pformat(tx.data))
        else:
            shas_this[sha] = tx
    logging.info("Fetched %d new txs. Got %d total.", new, len(cache['txs']))
    cache['last_load'] = datetime.utcnow()
    save_cache()
    return cache['txs']
Ejemplo n.º 9
0
def cleanup():
	if not WORLD_INFO['id']:
		return False
	
	gfx.title('Saving cache...')
	cache.save_cache('items')
Ejemplo n.º 10
0
def cleanup():
    if not WORLD_INFO['id']:
        return False

    gfx.title('Saving cache...')
    cache.save_cache('items')
Ejemplo n.º 11
0
class UDPRequestHandler(BaseRequestHandler):

    def get_data(self):
        return self.request[0].strip()

    def send_data(self, data):
        return self.request[1].sendto(data, self.client_address)


if __name__ == '__main__':
    host, port = '127.0.0.1', 53
    servers = [
        socketserver.ThreadingUDPServer(('', port), UDPRequestHandler)
    ]
    for s in servers:
        thread = threading.Thread(target=s.serve_forever)
        thread.daemon = True
        thread.start()
        print("%s server loop running in thread: %s" % (s.RequestHandlerClass.__name__[:3], thread.name))
    try:
        while 1:
            time.sleep(1)
            sys.stderr.flush()
            sys.stdout.flush()

    except KeyboardInterrupt:
        for s in servers:
            s.shutdown()
        cache.save_cache()
        os._exit(0)
Ejemplo n.º 12
0
def update_stats_cache():
    global JxStateArray,JxStatsMap
    JxProfile('Load Cache')
    JxCache = load_cache()
    JxProfile('Load Cache ended')
    try:
        query = """
	select cards.factId,reviewHistory.cardId, reviewHistory.time, reviewHistory.lastInterval, reviewHistory.nextInterval, reviewHistory.ease 
	from reviewHistory,cards where cards.id = reviewHistory.cardId and cards.modified>%s 
	order by cards.factId,reviewHistory.cardId,reviewHistory.time""" % JxCache['TimeCached']
        JxStateArray = JxCache['StateArray']
    except:
        query = """
	select cards.factId ,reviewHistory.cardId, reviewHistory.time, reviewHistory.lastInterval, reviewHistory.nextInterval, reviewHistory.ease 
	from reviewHistory,cards where cards.id = reviewHistory.cardId order by cards.factId ,reviewHistory.cardId,reviewHistory.time"""
        JxStateArray = {}
    rows = mw.deck.s.all(query)	
    JxProfile("Query ended")

    length = len(rows)
    index = 0
    JxCardState = []
    JxCardStateArray = []
    StatusStart = 0
    # We will initialize other stuff on the fly !
    while index < length:
        # 0:FactId 1:CardId, 2:Time, 3: lastInterval, 4: next interval, 5:ease
        (FactId,CardId,Time,Interval,NextInterval,Ease) = rows[index]                  
        # first, we have to build a list of the days where status changes happened for this card (+ - + - + - ...)
        if (Interval <= 21 and NextInterval > 21): 
            #Card Status Change
            Day = int(Time / 86400.0)
            JxCardState.append(Day)
            if StatusStart == 0:
                StatusStart = 1
        elif (Interval > 21 and Ease == 1):
            #Card Status Change
            Day = int(Time / 86400.0)
            JxCardState.append(Day)
            if StatusStart == 0:
                StatusStart = -1		
        index += 1
        if index == length: 
            # we have finished parsing the Entries.Flush the last Fact and break
            JxCardStateArray.append((StatusStart,JxCardState[:]))
            flush_facts(JxCardStateArray,CardId)
            break
            # we have finished parsing the Entries, flush the Status change
        elif CardId == rows[index][1]:
            # Same Card : Though it does nothing, we put this here for speed purposes because it happens a lot.
            pass
        elif FactId != rows[index][0]:                        
            # Fact change : Though it happens a bit less often than cardId change, we have to put it there or it won't be caught, flush the status change.
            JxCardStateArray.append((StatusStart,JxCardState[:]))
            flush_facts(JxCardStateArray,CardId)
            JxCardState = []
            JxCardStateArray = []
            StatusStart = 0
        else:
            # CardId change happens just a little bit more often than fact changes (if deck has more than 3 card models;..). Store and intit the card status change
            JxCardStateArray.append((StatusStart,JxCardState[:]))
            JxCardState = []
	    StatusStart = 0
	    
    JxProfile("NewAlgorythm Ends")
    
    
    # let's partition the deck now
    #try:
        #query = """select id, factId, interval, reps from cards where modified>%s order by factId""" % dJxCache['TimeCached']
    #except:
    query = """select id, factId, interval, reps from cards order by factId"""

    rows = mw.deck.s.all(query)
    # let's create a list of Facts with all associated cards and their state : Known/Seen and produce the equivalent list for facts
    
    TempFacts={}
    def munge_row(x):
            if x[2] > 21:
                y = (x[0], 1) # Known
            elif x[3] > 0:
                y = (x[0], -1) # Seen
            else:
                y = (x[0], 0) # In Deck
            try:
                TempFacts[x[1]].append(y)
            except KeyError:
                TempFacts[x[1]] = [y]
    map(munge_row,rows)
    
    # now update the fact list to include the fact state 
    def partition(x):
            L = zip(*x[1])[1]
            if not any(L):
                Facts[x[0]]= (2, x[1])# InDeck                    
            elif sum(L)>=0 :
                Facts[x[0]]= (0, x[1])# Known
            else:
                Facts[x[0]]= (1, x[1])# Seen
    map(partition,TempFacts.iteritems())
    JxProfile(str(len(filter(lambda x:(x[0]==0),Facts.values())))+" "+str(len(filter(lambda x:(x[0]==1),Facts.values())))+" "+str(len(filter(lambda x:(x[0]==2),Facts.values()))))    


    
    
    # now cache the updated graphs
    JxCache['StateArray'] = JxStateArray
    JxCache['TimeCached'] = time.time() # among the few things that could corrupt the cache : 
    # new entries in the database before the cache was saved...sigh...
    save_cache(JxCache)
    JxProfile("Saving Cache")
Ejemplo n.º 13
0
def compute_count(): 
    """Computes the stats"""
    
    global JxStatsArray,JxPartitionLists,NoType
    
    JxCache = load_cache()
    try:
        Query = """select cards.factId, cards.id, cards.reps, cards.interval from cards, 
	cards as mCards where mCards.modified>%s and cards.factId=mCards.factId 
	group by cards.id order by cards.factId""" % JxCache['TimeCached']
        JxStatsArray = JxCache['Stats']
        JxPartitionLists = JxCache['Partitions']
        NoType = JxCache['NoType']
    except:
        Query = """select factId, id, reps, interval from cards order by factId"""
        NoType = 0 # known/seen/in deck
        for (Type,List) in JxStatsMap.iteritems():
            for (k, Map) in enumerate(List):
                for (Key,String) in Map.Order+[('Other','Other')]:
                    if k != 1:
                        JxStatsArray[(Type,k,Key)] = (0, 0, 0, 
			        len([Item for (Item,Value) in Map.Dict.iteritems() if Value == Key])) 
                    elif Type =='Word':
                        JxStatsArray[(Type,k,Key)] = (0, 0, 0, sum([Jx_Word_Occurences[Item] 
				for (Item,Value) in Map.Dict.iteritems() if Value == Key]))
                    else:
                        JxStatsArray[(Type,k,Key)] = (0, 0, 0, sum([Jx_Kanji_Occurences[Item] 
			        for (Item,Value) in Map.Dict.iteritems() if Value == Key])) 
                    for Label in ['Known','Seen','InDeck']:
                        JxPartitionLists[(Type,k,Key,Label)] = []	
			
    # we compute known/seen/in deck/total stats for each value of each map and each type
    Rows = mw.deck.s.all(Query)  
    CardState = []
    Length = len(Rows)
    Index = 0
    while True and Index<Length:
        (FactId,CardId,CardRep,Interval) = Rows[Index]
        # set the card's status                       
        if Interval > 21 and CardRep:
            CardState.append(0)
        elif CardRep:
            CardState.append(1)
        else:
            CardState.append(2)
        Index += 1
        if Index == Length: 
            # we have finished parsing the Entries.Flush the last Fact and break
            JxFlushFactStats(CardState,CardId)
            break
            # we have finished parsing the Entries, flush the Status change
        elif FactId == Rows[Index][0]:
            # Same Fact : Though it does nothing, we put this here for speed purposes because it happens a lot.
            pass
        else:                        
            # Fact change
            JxFlushFactStats(CardState,CardId)
            CardState = []
	    
    # now cache the updated stats  
    JxCache['Stats'] = JxStatsArray
    JxCache['Partitions'] = JxPartitionLists
    JxCache['NoType'] = NoType
    JxCache['TimeCached'] = time() # among the few things that coul corrupt the cache : 
    # new entries in the database before the cache was saved...sigh...
    save_cache(JxCache)
Ejemplo n.º 14
0
#!/usr/bin/env python

import datetime

import cache


f = open('previous.txt', 'r')
lines = f.readlines()[2:]
f.close()

dest = cache.load_cache('fukushima.dat')
for l in lines:
    cells = l.rstrip().split('\t')
    print cells, ' | ',
    ts = datetime.datetime.strptime(cells[1], '%m/%d/%Y %H:%M:%S')
    cells = cells[3:]
    print cells
    for ii, c in enumerate(cells[:7]):
        try:
            float(c.strip())
        except ValueError:
            continue
        dest.set_value(ts, ii, c.strip())
    if len(cells) >= 8:
        dest.set_value(ts, 8, cells[7].strip())
cache.save_cache(dest, 'fukushima.dat')

Ejemplo n.º 15
0
#!/usr/bin/env python

import datetime

import cache

f = open('previous.txt', 'r')
lines = f.readlines()[2:]
f.close()

dest = cache.load_cache('fukushima.dat')
for l in lines:
    cells = l.rstrip().split('\t')
    print cells, ' | ',
    ts = datetime.datetime.strptime(cells[1], '%m/%d/%Y %H:%M:%S')
    cells = cells[3:]
    print cells
    for ii, c in enumerate(cells[:7]):
        try:
            float(c.strip())
        except ValueError:
            continue
        dest.set_value(ts, ii, c.strip())
    if len(cells) >= 8:
        dest.set_value(ts, 8, cells[7].strip())
cache.save_cache(dest, 'fukushima.dat')
Ejemplo n.º 16
0
 def invalidate_token(self):
     data = cache.get_cache()
     if "token" in data:
         del data["token"]
         cache.save_cache()