def parse_date_field(date_field):
    """
    Return a utcdatetime from `datetime_field`, which is of one of the
    following formats:

    YYYYMMDDhhmmssZ
    YYYYMMDDhhmmss+hhmm
    YYYYMMDDhhmmss-hhmm

    See http://pyopenssl.sourceforge.net/pyOpenSSL.html/openssl-x509.html
    """
    match = re.match(
        '(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})'
        '(?P<hour>\d{2})(?P<minute>\d{2})(?P<second>\d{2})'
        '(?P<timezone>[+-]\d{4}|Z)', date_field.decode('utf-8'))

    isodate = '{}-{}-{}T{}:{}:{}{}'.format(
        match.group('year'),
        match.group('month'),
        match.group('day'),
        match.group('hour'),
        match.group('minute'),
        match.group('second'),
        match.group('timezone'))

    return utcdatetime.utcdatetime.from_datetime(parse_datetime(isodate))
Example #2
0
def test_parse_datetime():
    d = iso8601.parse_datetime("2006-10-20T15:34:56Z")
    assert d.year == 2006
    assert d.month == 10
    assert d.day == 20
    assert d.hour == 15
    assert d.minute == 34
    assert d.second == 56
    assert d.tzinfo == iso8601.UTC
Example #3
0
def test_parse_datetime_tz():
    d = iso8601.parse_datetime("2006-10-20T15:34:56.123+02:30")
    assert d.year == 2006
    assert d.month == 10
    assert d.day == 20
    assert d.hour == 15
    assert d.minute == 34
    assert d.second == 56
    assert d.microsecond == 123000
    assert d.tzinfo.tzname(None) == "+02:30"
    offset = d.tzinfo.utcoffset(None)
    assert offset.days == 0
    assert offset.seconds == 60 * 60 * 2.5
Example #4
0
def test_parse_datetime_fraction_2():
    """From issue 6, allow slightly looser date parsing
    
    """
    d = iso8601.parse_datetime("2007-5-7T11:43:55.328Z'", strict=False)
    assert d.year == 2007
    assert d.month == 5
    assert d.day == 7
    assert d.hour == 11
    assert d.minute == 43
    assert d.second == 55
    assert d.microsecond == 328000
    assert d.tzinfo == iso8601.UTC
Example #5
0
def test_space_separator():
    """Handle a separator other than T
    
    """
    d = iso8601.parse_datetime("2007-06-23 06:40:34.00Z")
    assert d.year == 2007
    assert d.month == 6
    assert d.day == 23
    assert d.hour == 6
    assert d.minute == 40
    assert d.second == 34
    assert d.microsecond == 0
    assert d.tzinfo == iso8601.UTC
Example #6
0
def test_parse_no_timezone_no_strict():
    """issue 4 - Handle datetime string without timezone
    
    This tests what happens when you parse a date with no timezone. While not
    strictly correct this is quite common. I'll assume UTC for the time zone
    in this case.
    """
    d = iso8601.parse_datetime("2007-01-01T08:00:00", strict=False)
    assert d.year == 2007
    assert d.month == 1
    assert d.day == 1
    assert d.hour == 8
    assert d.minute == 0
    assert d.second == 0
    assert d.microsecond == 0
    assert d.tzinfo == iso8601.UTC
Example #7
0
def test_parse_invalid_datetime():
    iso8601.parse_datetime(None)
Example #8
0
def test_parse_issue_6_strict():
    iso8601.parse_datetime("2007-5-7T11:43:55.328Z'")
Example #9
0
def test_parse_datetime_different_timezone():
    tz = iso8601.FixedOffset(1, 0, "IST")
    d = iso8601.parse_datetime("2006-10-20T15:34:56", default_timezone=tz, strict=False)
    assert d.tzinfo == tz
Example #10
0
def test_parse_datetime_z():
    d = iso8601.parse_datetime("2006-10-20T15:34:56Z", default_timezone=iso8601.FixedOffset(1, 0, "IST"))
    assert d.tzinfo == iso8601.UTC
Example #11
0
 def _set(self, value):
     if isinstance(value, (unicode, str)):
         value = iso8601.parse_datetime(value)
     super(DateTimeField, self)._set(value)
Example #12
0
def test_parse_incorrect_date_digits():
    iso8601.parse_datetime("2007-1-1T08:00:00Z")
Example #13
0
def test_parse_no_timezine_strict():
    """Variation of issue 4, raise a ParseError when there is no time zone
    
    """
    iso8601.parse_datetime("2007-01-01T08:00:00")
Example #14
0
def create_badges(gpx_inst, con_mngr, cache_mngr, force_tb_update,
                  force_owned_update):
    ''' Generate the badges from statistical data.
    
    Use the statistical data from the parser runs and the badge definitions to
    create badges and set their status.
    
    '''
    all = gpx_inst.all_caches
    ##### LOGS ####
    if Pers.ownfoundlogcount > 0:
        avgwordcount = Pers.wordcount / Pers.ownfoundlogcount
    else:
        avgwordcount = 0
    print "Average  word count: " + str(avgwordcount)
    badgeManager.setStatus('Author', avgwordcount)
   
    #### OVERALL COUNT #####
    badgeManager.setStatus('Geocacher', Pers.ownfoundlogcount)
    print "Geocaches " + str(Pers.ownfoundlogcount)
   
    ##### TYPES #####
    print '\n'
    types = ['Traditional Cache', 'Multi-cache', 'Unknown Cache', 
             'Letterbox Hybrid', 'Earthcache', 'Wherigo Cache', 
             'CITO Event Cache','Event Cache','Virtual Cache',
             'Mega Social Event Cache','Benchmark','Waymark','Webcam Cache',
             'Project Ape Cache']
   
    for type_ in types:
        generate_type_badges(type_)
    lostn = len([a for a in all if 'Lost and Found' in a.desc])
    badgeManager.setStatus('Lost', lostn)
    print '10 Years! Cache ' + str(lostn)
    
    ##### CONTAINERS #####
    print '\n',
    types = [u'Not chosen', u'Large', u'Micro', u'Regular', u'Small', u'Other']
    found_types = [a.cache.container for a in all]
    container_hist = {}
    for con in found_types:
        container_hist[con] = container_hist.get(con,0) + 1
    
    for key in types:
        try:
            badgeManager.setStatus(key[:5], container_hist.get(key,0))
            print key + ' ' + str(container_hist.get(key,0))
        except NameError:
            print key + " No Match"      

    ######### D/T Matrix #############
    print '\n\t',

    dtm = [(float(a.cache.difficulty), float(a.cache.terrain))
           for a in all]
    matrix = defaultdict(lambda: defaultdict(lambda: 0))
    for pair in dtm:
        matrix[pair[0]][pair[1]] = matrix[pair[0]].get(pair[1],0) + 1
    difficult = terrain = [1.0, 1.5 , 2.0 , 2.5 , 3.0, 3.5, 4.0, 4.5, 5.0]
    mcount = 0
    for dif in difficult:
        for ter in terrain:
            amount = matrix[dif][ter]
            print("%3d" % amount),
            if amount > 0: 
                mcount += 1 
        print "\n\n\t",
    print "Found %d of 81 D/T combinations"% mcount
   
    badgeManager.setStatus('Matrix', mcount)
   
    ####### OTHERS #####
    print '\n',
    try:
        hccs = [wpt.cache for wpt in all
                if wpt.cache.terrain == u'5' and wpt.cache.difficulty == u'5']
    except AttributeError:
        return all
    badgeManager.setStatus('Adventur', len(hccs))
    print('HCC Caches: ' + str(len(hccs)) +
          " (" + str([a.name for a in hccs]) + ")")
    
    ftfs = [a.cache for a in all if 'FTF' in a.cache.logs[0].text]
    badgeManager.setStatus('FTF', len(ftfs))
    print('FTF Caches: ' + str(len(ftfs)) +
          " (" + str([a.name for a in ftfs]) + ")")

    print('\n'),
    if(not force_owned_update):
            try:
                owned = int(cache_mngr.get('OWNCACHES', 'caches_hidden'))
                ownedcaches = cache_mngr.get('OWNCACHES','caches')
                owned_events = int(cache_mngr.get('OWNCACHES', 'owned_events'))
            except NoOptionError:
                print "Cached data incomplete, please redownload with -o"
                owned = 0
                ownedcaches = []
                owned_events = 0
    else:
        print 'No list of hidden caches cached, retrieving new data ...'
        ownlist = con_mngr.get_owner_list()
        ownlist = re.compile("<script([^>]*)>.*?</script>",
                              re.DOTALL).sub("", ownlist)
        ownlist = re.compile("<span([^>]*)>.*?</span>",
                              re.DOTALL).sub("", ownlist)
        ownparser = OwnParser()
        ownparser.feed(ownlist)
        owned = ownparser.owncount
        owned_events = ownparser.own_event_count
        ownedcaches = ownparser.owncaches
        cache_mngr.set('OWNCACHES', 'caches_hidden', str(owned))
        cache_mngr.set('OWNCACHES', 'owned_events', str(owned_events))
        cache_mngr.set('OWNCACHES', 'caches', str(ownedcaches))
        with open('cache.dat', 'ab') as cachefile:
            cache_mngr.write(cachefile)
   
    badgeManager.setStatus('Owner', owned)
    print "Owner of " + str(owned) + ' Caches: ' + str(ownedcaches)

    badgeManager.setStatus('Host', owned_events)
    print "Hosted " + str(owned_events) + ' Events.'
    
    scuba = [a.name for a in all
             if (5,1) in [(b.id, b.inc) for b in a.cache.attributes]]
    badgeManager.setStatus('Scuba', len(scuba))
    
   
    ##### COUNTRIES #######
    print '\n',
    travel = [a.cache.country for a in all]
    travel_hist = {}
    for country in travel:
        travel_hist[country] = travel_hist.get(country, 0) + 1
    badgeManager.setStatus('Travelling', len(travel_hist))
    print 'Countries traveled ' + str(len(travel_hist)) + ': '+ str(travel_hist)
   
    try:
        with open("statelist.txt",'r') as filehandle:
            statelist = filehandle.read()
    except IOError:
        # Couldn't read file, download new.
        try:
            statelist = con_mngr.getcountrylist()
        except Exception:
            # Failed, abort.
            print "Not able to retrieve country list"
            raise
        else:
            # New statelist downloaded, saving for further use.
            try: 
                with open("statelist.txt",'w') as filehandle:
                    filehandle.write(statelist)
            except IOError:
                print("Could not write 'statelist.txt' file.\n"
                      "Continuing without saving")
    if statelist:
        # Only generate with valid statelist, else skip
        badgeManager.setCountryList(statelist)
        for country in travel_hist.keys():
            cbadge = stateBadge(country)
            cbadge.setStatus(len(Pers.stateList[country]))
            badgeManager.addBadge(cbadge)      
            print('Visited ' + str(len(Pers.stateList[country])) +
                  ' state(s) in ' + country + "\n\t" + 
                  str(Pers.stateList[country].keys()))
   
    ##### GEOGRAPHY #######
    print('\n'),
    badgeManager.setStatus('Clouds', Pers.hMax)
    print("Found cache above " + str(Pers.hMax) + "m N.N.")
    badgeManager.setStatus('Gound', Pers.hMin)
    print("Found cache below " + str(Pers.hMin) + "m N.N.")
    badgeManager.setStatus('Distance', Pers.max_distance[1])
    print("Found cache " + str(Pers.max_distance[0]) + " in " +
          str(Pers.max_distance[1]) + "km distance")
       
    #### COINS ##########
    print('\n'),
    if(cache_mngr.has_option('TRAVELITEMS', 'coins') and 
       cache_mngr.has_option('TRAVELITEMS', 'travelbugs') and 
       not force_tb_update):
        coins = int(cache_mngr.get('TRAVELITEMS', 'coins'))
        tbs   = int(cache_mngr.get('TRAVELITEMS', 'travelbugs'))
    else:
        print 'No Coin list cached, retrieving new data ...'
        coinlist = con_mngr.getmycoinlist()
        coinlist = re.compile("<script([^>]*)>.*?</script>", 
                              re.DOTALL).sub("", coinlist)
        coinlist = re.compile("<span([^>]*)>.*?</span>", 
                              re.DOTALL).sub("", coinlist)
        coinparser = CoinParser()
        coinparser.feed(coinlist)
        coins = coinparser.CoinCount
        tbs = coinparser.TBCount
        cache_mngr.set('TRAVELITEMS', 'travelbugs', str(tbs))
        cache_mngr.set('TRAVELITEMS', 'coins', str(coins))
        with open('cache.dat', 'wb') as cachefile:
            cache_mngr.write(cachefile)
   
    badgeManager.setStatus('Coin', coins)
    print "Coins " + str(coins)
    badgeManager.setStatus('Travelbug', tbs)
    print "Travelbugs " + str(tbs)
    
    #### DATE ##########
    print('\n'),
    cachebyday = defaultdict(lambda: 0)
    cachebydate = defaultdict(lambda: 0)
    for wpt in all:
        if 'Z' not in wpt.cache.logs[0].date:
            wpt.cache.logs[0].date += 'Z'
        found = wpt.cache.logs[0].date
        cachebyday[str(parse_datetime(found).date())] += 1
        cachebydate[parse_datetime(found).date().strftime('%m-%d')] += 1
    maxfind = 43 #max(cachebyday.values())
    for (key, value) in zip(cachebyday.keys(), cachebyday.values()):
        if value == maxfind:
            maxfinddate = key
    badgeManager.setStatus('Busy', maxfind)
    # print("Found %i caches on %s"% (maxfind, maxfinddate))
    badgeManager.setStatus('Calendar', len(cachebydate))
    print("Found caches on %d dates"% (len(cachebydate)))
    days = cachebyday.keys()
    days.sort()
    maxdays = dayscount = 1
    prev = "0000-00-00"
    for date in days:
        if int(date[-2:]) == int(prev[-2:]) + 1:            
            dayscount += 1
        elif (int(date[-2:]) == 1 and 
             int(prev[-2:]) == monthrange(int(prev[:4]) , int(prev[5:7]))[1]):
            dayscount += 1
        else:
            maxdays = max(dayscount, maxdays)
            dayscount = 1
        prev = date
    badgeManager.setStatus('Daily', maxdays)
    print "Found caches on %i consecutive days"% maxdays
Example #15
0
 def EndElementHandler(self, name) :
     if self.level == 2 :
         if name == "array" :
             raise DoneException()
     if self.level == 3 :
         if name == "key" :
             self.currentmode = "data"
             self.currentdict[self.currentkey] = ""
         elif name == "dict" :
             self.level = 2
             if not self.currentdict.has_key("Album") :
                 self.currentdict["Album"] = "*Unknown Album*"
             if not self.currentdict.has_key("Artist") :
                 self.currentdict["Artist"] = "*Unknown Artist*"
             albums = Album.objects.filter(album_library__id = self.lib.id, album_name = self.currentdict["Album"])
             album = None
             if len(albums) == 0 :
                 album = Album(album_library = self.lib,
                               album_name = self.currentdict["Album"],
                               album_synchronized = True)
                 album.save()
             else :
                 album = albums[0]
                 album.album_synchronized = True
                 album.save()
             songs = Song.objects.filter(song_filename = self.currentdict["Location"]) # assuming filename is a good enough primary key
             song = None
             d = self.currentdict
             try :
                 modified = iso8601.parse_datetime(d.get("Date Modified", None))
             except :
                 modified = None
             if (not skipiTunesKind(d["Kind"])) and (modified is not None) :
                 if len(songs) == 0 :
                     song = Song(song_name = d.get("Name", None),
                                 song_grouping = d.get("Grouping", None),
                                 song_composer = d.get("Composer", None),
                                 song_artist = d.get("Artist", None),
                                 song_album = album,
                                 song_genre = d.get("Genre", None),
                                 song_time = d.get("Total Time", None),
                                 song_tracknum = d.get("Track Number", None),
                                 song_numbertracks = d.get("Track Count", None),
                                 song_discnum = d.get("Disc Number", None),
                                 song_numberdiscs = d.get("Disc Count", None),
                                 song_filetype = iTunesKindToMime(d["Kind"]),
                                 song_filesize = d.get("Size", None),
                                 song_bitrate = d.get("Bit Rate", None),
                                 song_filename = d.get("Location", None),
                                 song_synchronized = True,
                                 song_modified = modified)
                     song.save()
                 elif songs[0].song_modified < modified :
                     songs.update(song_name = d.get("Name", None),
                                  song_grouping = d.get("Grouping", None),
                                  song_composer = d.get("Composer", None),
                                  song_artist = d.get("Artist", None),
                                  song_album = album,
                                  song_genre = d.get("Genre", None),
                                  song_time = d.get("Total Time", None),
                                  song_tracknum = d.get("Track Number", None),
                                  song_numbertracks = d.get("Track Count", None),
                                  song_discnum = d.get("Disc Number", None),
                                  song_numberdiscs = d.get("Disc Count", None),
                                  song_filetype = iTunesKindToMime(d["Kind"]),
                                  song_filesize = d.get("Size", None),
                                  song_bitrate = d.get("Bit Rate", None),
                                  song_filename = d.get("Location", None),
                                  song_synchronized = True,
                                  song_modified = modified)
                 else :
                     songs.update(song_synchronized = True)
             else :
                 if modified is None :
                     self.skippedSongs.append((d.get("Location"), "Bad last modified string"))
                 else :
                     self.skippedSongs.append((d.get("Location"), "Type \""+d["Kind"]+"\" is unsupported"))
         else :
             self.currentmode = None
Example #16
0
def test_parse_invalid_datetime2():
    iso8601.parse_datetime("23")
import json
import codecs
import iso8601
import time

col_names = ['date', 'size', 'activity', 'perms', 'uid', 'gid', 'inode', 'file']

mactime_file = open(sys.argv[1])
output_file = codecs.open(sys.argv[2], 'w', encoding = "utf-8")

content = mactime_file.read()

records = list(map(lambda x:x.split(','), content.strip().split('\n')))

for record in records:
    json_dict = dict()
    for index, value in enumerate(record):
        if index == 0:
            date = iso8601.parse_datetime(value)
            timestamp = time.mktime(date.timetuple())
            json_dict[col_names[index]] = int(timestamp)
        elif index == 1: # convert the size from string to int for query selection purpose
            json_dict[col_names[index]] = int(value)
        else:
            json_dict[col_names[index]] = value
    json.dump(json_dict, output_file)
    output_file.write('\n')

mactime_file.close()
output_file.close()
Example #18
0
import iso8601
import time

col_names = [
    'date', 'size', 'activity', 'perms', 'uid', 'gid', 'inode', 'file'
]

mactime_file = open(sys.argv[1])
output_file = codecs.open(sys.argv[2], 'w', encoding="utf-8")

content = mactime_file.read()

records = list(map(lambda x: x.split(','), content.strip().split('\n')))

for record in records:
    json_dict = dict()
    for index, value in enumerate(record):
        if index == 0:
            date = iso8601.parse_datetime(value)
            timestamp = time.mktime(date.timetuple())
            json_dict[col_names[index]] = int(timestamp)
        elif index == 1:  # convert the size from string to int for query selection purpose
            json_dict[col_names[index]] = int(value)
        else:
            json_dict[col_names[index]] = value
    json.dump(json_dict, output_file)
    output_file.write('\n')

mactime_file.close()
output_file.close()