示例#1
0
 def search(self, look_for, data=False, limit=20):
     if data:
         url = '%s/search/data/%s' % (self.source, look_for)
     else:
         url = '%s/search/%s/%s' % (self.source, look_for, limit)
         #url = '%s/search/%s' % (self.source, look_for)
     #GET:
     req = urllib2.urlopen(url)
     json_raw = req.read()
     req.close()
     result = json.loads(json_raw)
     found = []
     if data:
         #for e in result['matches']:
         for e in result:
             m = Moment(data=e['data'],
                        tags=e['tags'],
                        created=e['created'],
                        path=e['path'])
             #m = Moment(data=e['data'], tags=e['tags'], created=e['created'])
             found.append(m)
     else:
         #this should be a list of tags
         #found = result['matches']
         found = result
     return found
示例#2
0
    def range(self, start=None, end=None):
        """
        """
        expect_entries = True
        if end:
            start = Timestamp(start)
            end = Timestamp(end)
            url = '%s/range/%s/%s' % (self.source, start.compact(),
                                      end.compact())
        elif start:
            start = Timestamp(start)
            url = '%s/range/%s' % (self.source, start.compact())
        else:
            url = '%s/range' % (self.source)
            expect_entries = False

        #print url

        req = urllib2.urlopen(url)
        response = req.read()
        req.close()
        if expect_entries:
            result = json.loads(response)
            elist = []
            for e in result['entries']:
                m = Moment(data=e['data'],
                           tags=e['tags'],
                           created=e['created'],
                           path=e['path'])
                #m = Moment(data=e['data'], tags=e['tags'], created=e['created'])
                elist.append(m)
            return elist
        else:
            return response
示例#3
0
 def log_obj(ff):
     rel_path = ff.relative_to(self.sp_f)
     c.execute(
         'INSERT INTO sp (file_path, is_folder, tstamp) VALUES (?, ?, ?)',
         (str(rel_path), ff.is_folder, Moment(ff.timestamp).done()))
     if ff.is_folder:
         for fff in ff:
             log_obj(fff)
    def __get_current_index(self) -> str:
        if not self.__daily_index: return self.__index

        today = datetime.utcnow().astimezone(
            tz=timezone.utc).strftime("%a, %d %b %Y %H:%M:%S GMT")
        date_pattern = Moment(today).format(self._date_format)

        return self.__index + '-' + date_pattern
示例#5
0
 def make(self, data, tags=[], created=None, source='', position=0):
     """
     helper for making a new entry right in a journal object
     this way should not need to import moments.entry.Entry elsewhere
     """
     if not created:
         created = datetime.now()
     entry = Moment(data, tags, created, path=source)
     #print "Journal.make.position: %s" % position
     self.update(entry, position=position)
     return entry
示例#6
0
 def entry(self, item=''):
     url = '%s/entry/%s' % (self.source, item)
     #GET:
     req = urllib2.urlopen(url)
     json_raw = req.read()
     req.close()
     #print json_raw
     e = json.loads(json_raw)
     #print e
     m = Moment(data=e['data'],
                tags=e['tags'],
                created=e['created'],
                path=e['path'])
     #print m
     return m
示例#7
0
 def entries(self):
     url = '%s/entries' % (self.source)
     #GET:
     req = urllib2.urlopen(url)
     json_raw = req.read()
     req.close()
     result = json.loads(json_raw)
     elist = []
     for e in result['entries']:
         m = Moment(data=e['data'],
                    tags=e['tags'],
                    created=e['created'],
                    path=e['path'])
         #m = Moment(data=e['data'], tags=e['tags'], created=e['created'])
         elist.append(m)
     return elist
示例#8
0
 def tag(self, tag_key=''):
     url = '%s/tag/%s' % (self.source, tag_key)
     #print url
     #GET:
     req = urllib2.urlopen(url)
     json_raw = req.read()
     #print "json from server: %s" % json_raw
     req.close()
     result = json.loads(json_raw)
     elist = []
     for e in result[tag_key]:
         m = Moment(data=e['data'],
                    tags=e['tags'],
                    created=e['created'],
                    path=e['path'])
         #m = Moment(data=e['data'], tags=e['tags'], created=e['created'])
         elist.append(m)
     #return { tag_key:elist }
     return elist
示例#9
0
def _lookup(data, tags, created):
    """
    return the Moment containing matching content passed in

    *2011.07.09 10:46:16 
    abstract "check for existing" functionality in journal.update
    may be other cases it is useful
    as in a lookup entry on the journal server, before remove
    to make sure we get the equivalent moment (not creating a copy)

    similar to journal.date()

    I think the right answer is to make an entry
    use journal.date() to get any other entries at that time
    then for each entry (even if just one)
    check if the entry is_equal
    if so, remove

    """
    global j
    moment = Moment(data=data, created=created, tags=tags)
    options = j.date(created)
    if len(options):
        matches = []
        for o in options:
            print "type moment: %s, type o: %s" % (type(moment), type(o))
            if moment.is_equal(o):
                matches.append(o)

        #we should only have one item at most if there was a match
        assert len(matches) <= 1
        if len(matches):
            return matches[0]
        else:
            return None
    else:
        #no existing option
        return None
示例#10
0
 def date(self, date_key=''):
     values = {}
     params = urllib.urlencode(values)
     if isinstance(date_key, Timestamp):
         ts = date_key
     else:
         ts = Timestamp(compact=date_key)
     url = '%s/date/%s' % (self.source, ts.compact())
     #print url
     #GET:
     req = urllib2.urlopen(url)
     json_raw = req.read()
     #print "json from server: %s" % json_raw
     req.close()
     result = json.loads(json_raw)
     elist = []
     for e in result[ts.compact()]:
         m = Moment(data=e['data'],
                    tags=e['tags'],
                    created=e['created'],
                    path=e['path'])
         elist.append(m)
     return {ts.compact(): elist}
示例#11
0
def load_game(game_path, logger):
    '''
    Loads a game from a specified JSON path and returns a Game object.
    '''
    with open(game_path, 'r') as g:
        logger.info(
            "Reading in the raw SportVU data from {}...".format(game_path))
        d = json.loads(g.read())

        logger.info("Reading in the player and team data...")
        visitor_team_name = d['events'][0]['visitor']['name']
        visitor_players = list(
            set([
                Player(x['firstname'], x['lastname'], visitor_team_name,
                       x['jersey'], x['playerid'], x['position'])
                for y in d['events'] for x in y['visitor']['players']
            ]))
        home_team_name = d['events'][0]['home']['name']
        home_players = list(
            set([
                Player(x['firstname'], x['lastname'], home_team_name,
                       x['jersey'], x['playerid'], x['position'])
                for y in d['events'] for x in y['home']['players']
            ]))
        visitor_team = Team(visitor_team_name,
                            d['events'][0]['visitor']['teamid'],
                            d['events'][0]['visitor']['abbreviation'],
                            visitor_players)
        home_team = Team(home_team_name, d['events'][0]['home']['teamid'],
                         d['events'][0]['home']['abbreviation'], home_players)
        logger.info("Reading in the location data...")
        moments = []
        for event in d['events']:
            for moment_array in event['moments']:
                moments.append(Moment(moment_array, event['eventId']))
        return Game(d['gamedate'], d['gameid'], home_team, visitor_team,
                    moments)
示例#12
0
    def sync(self):
        self.from_fs()
        self.from_sp()

        c = self.conn.cursor()
        c.execute(sync_query)

        for row in c.fetchall():
            sync = (row[1], Moment(row[2]).locale('UTC') if row[2] else None)
            sp = (row[3], Moment(row[4]) if row[4] else None)
            fs = (row[5], unix(row[6], utc=True) if row[6] else None)
            # Figure out which version is newest and sync that version to the other
            local_p = self.path / row[0]
            #            print(row[0], sync, sp, fs)
            if sync[0] is None:
                if sp[0] is None and fs[0] is not None:
                    # The file only exists on the FS and hasn't previously been seen
                    # so sync it to the server
                    self.sync_to_sp(row)
                elif sp[0] is not None and fs[0] is None:
                    # The file only exists on the server and hasn't previously been seen.
                    # Sync it to the FS
                    self.sync_to_fs(row)
                elif sp[0] is not None and fs[0] is not None:
                    # The file has appeared on both sides since the last sync.
                    print(' *** Error: file {} conflict'.format(row[0]))
                else:
                    # The file has been deleted on both sides since the last sync.
                    print(' --- Deleted from Both: {}'.format(row[0]))
                    self.remove_from_sync(row)
            else:
                if sp[0] is None or fs[0] is None:
                    if sp[0] is not None:
                        self.unlink_from_sp(row)
                    if fs[0] is not None:
                        self.unlink_from_fs(row)
                    if sp[0] is None and fs[0] is None:
                        print(' --- Deleted from Both: {}'.format(row[0]))
                    self.remove_from_sync(row)
                else:
                    if sync[1] >= sp[1] and sync[1] >= fs[1]:
                        # Both sides are older than the last sync
                        if sp[0]:
                            print('     Up to Date Folder: {}'.format(row[0]))
                        else:
                            print('     Up to Date: {}'.format(row[0]))
                    elif sp[0] and fs[0]:
                        # Both sides are folders.  Leave them be.
                        print('     Up to Date Folder: {}'.format(row[0]))
                    elif sync[1] < sp[1] and sync[1] < fs[1]:
                        print(' *** Error: file {} conflict'.format(row[0]))
                        resp = ''
                        while len(resp) == 0 or (resp[0] != 'r'
                                                 and resp[0] != 'l'):
                            resp = input(
                                '     Take [R]emote or [L]ocal? ').lower()
                        if resp[0] == 'l':
                            self.sync_to_sp(row)
                        else:
                            self.sync_to_fs(row)
                    elif sp[1] >= sync[1] and sp[1] >= fs[1]:
                        # SP version is newer
                        self.sync_to_fs(row)
                    elif fs[1] >= sync[1] and fs[1] >= sp[1]:
                        # Local version is newer
                        self.sync_to_sp(row)
        self.conn.commit()
示例#13
0
def format_published(input_datetime_str):
    moment_obj = Moment(datetime.now()).format('DD MMMM YYYY HH:mm:ss')
    if input_datetime_str:
        moment_obj = Moment(input_datetime_str).format('DD MMMM YYYY HH:mm:ss')
    return moment_obj
示例#14
0
def date_sorter(left, right):
    return Moment(left).format('X') - Moment(right).format('X')
示例#15
0
    def to_entries(self, add_tags=[], add_time=False, moments_only=False):
        """
        convert log to a list of entry objects (essentially what a log is)

        if moments_only is true, only Moments will be created
        
        if add_time is false, and moments_only is true,
        upon reaching an Entry only (*... ) (no timestamp)
        that information will be added to the previous Moment
        (this is useful when parsing data that was not originally intended to
        be used as part of a moment... it may contain lines that start with '*')
        """
        entries = []

        entry_regex = "\*"
        entry_search = re.compile(entry_regex)

        cur_entry = Moment()
        cur_entry.path = self.name

        new_entry = None

        try:
            self.seek(0)
            line = self.readline()
            line = unicode(line)
        except:
            print "Problem reading file"
            return entries

        #first line of a log should have an entry... this is our check
        if entry_search.match(line):
            self.has_entries = True
            while line:
                #we might have found a new entry...
                #see what kind, if any:
                (ts, line_tags) = timestamp.parse_line_for_time(line)
                if ts:
                    new_entry = Moment()
                    new_entry.created = timestamp.Timestamp(ts)
                elif entry_search.match(line):
                    if not moments_only:
                        new_entry = Moment()
                    elif add_time and moments_only:
                        #ok to make a default time for the entry
                        new_entry = Moment()
                        print "no timestamp found in this entry"
                    else:
                        #must be moments only,
                        #but we don't want to add a timestamp
                        #just include the data with the previous moment
                        new_entry = None

                if new_entry:
                    #finish up last entry...
                    #only need to add if it had information
                    if cur_entry.data or cur_entry.tags:
                        entries.append(cur_entry)

                    new_entry.path = self.name

                    current_tags = line_tags.strip().split()

                    if add_tags:
                        temp_tags = add_tags[:]
                        for t in current_tags:
                            if t not in temp_tags:
                                temp_tags.append(t)
                        current_tags = temp_tags

                    new_entry.tags.extend(current_tags)
                    cur_entry = new_entry
                    new_entry = None

                else:
                    # only want to add the entry itself
                    cur_entry.data += line

                line = unicode(self.readline())

            #need to get the last entry from the file, if there is one.
            if cur_entry.data:
                entries.append(cur_entry)

        #if not, don't scan
        else:
            print "File does not start with an entry: %s" % self.name

        return entries
示例#16
0
 def test_moment_time_datetime(self):
     """Test that Moment.time datatype is correct."""
     moment = Moment()
     assert isinstance(moment.time, datetime.date)