Пример #1
0
 def get_dropbox_metadata(self, dest_dir, use_cache=True):
     if use_cache:
         if dest_dir in self.dropbox_metadata:
             content = self.dropbox_metadata[dest_dir]
         else:
             resp, content = self.client.request(
                 "https://api.dropbox.com/0/metadata" + urllib.quote(dest_dir), "GET"
             )
             if resp.status == 200:
                 self.dropbox_metadata[dest_dir] = content
             else:
                 return content
     else:
         resp, content = self.client.request("https://api.dropbox.com/0/metadata" + urllib.quote(dest_dir), "GET")
         if resp.status == 200:
             self.dropbox_metadata[dest_dir] = content
         else:
             return content
     content = json.loads(content)
     if not dest_dir in self.metadata:
         # print('RESP: ' + str(resp))
         # print('CONTENT: ' + str(content))
         self.metadata[dest_dir] = {"modified": timelib.strtotime(content["modified"])}
     for dir_entry in content["contents"]:
         # print('PUT TO METADATA:', '/dropbox' + dir_entry['path'])
         self.metadata["/dropbox" + dir_entry["path"]] = {
             "modified": timelib.strtotime(dir_entry["modified"]),
             "bytes": dir_entry["bytes"],
         }
     return content
Пример #2
0
def add_task(out, x, chat):
    #print (x)
    with sqlite3.connect("reminders.db") as con:
        cursor = con.cursor()
        if 'now' in x:
            cursor.execute("INSERT INTO tasks VALUES (?, ?, ?)", (out, timelib.strtotime(x.encode('utf-8')), chat))
        else:
            cursor.execute("INSERT INTO tasks VALUES (?, ?, ?)", (out, timelib.strtotime(x.encode('utf-8'))-10800, chat))
        con.commit()
Пример #3
0
    def parse_query_params(self):  # noqa C901
        query_params = {}

        since = self.request.query_params.get('since')
        if since:
            try:
                # first try to parse since with dateutil in case it is a datetime
                since_datetime = dateutil.parser.parse(since)
            except ValueError:
                try:
                    # normally we get here if since has a relative value, try to parse
                    # it with timelib
                    since_datetime = datetime.fromtimestamp(timelib.strtotime(bytes(since, 'utf-8')))
                except ValueError:
                    raise exceptions.ValidationError('Invalid value for since parameter.')

            # local timezone is assumed if no timezone is given
            if not since_datetime.tzinfo:
                since_datetime = timezone.make_aware(since_datetime)

            query_params['since'] = since_datetime

        for int_param in ('history', 'limit', 'temporal_resolution'):
            value = self.request.query_params.get(int_param)
            if value:
                try:
                    query_params[int_param] = int(value)
                except ValueError:
                    raise exceptions.ValidationError('Invalid value for %s parameter.' % int_param)

        self.parsed_query_params = query_params
Пример #4
0
    def get(self, plow_id):
        args = parser.parse_args()
        history = args['history']

        since = args['since']
        if since:
            try:
                since = datetime.fromtimestamp(timelib.strtotime(since))
            except ValueError, TypeError:
                since = None
Пример #5
0
    def get(self):
        plows = db.session.query(Plow).order_by(desc('last_timestamp'))
        plow_res = SnowPlow()
        args = parser.parse_args()

        since = args['since']
        if since:
            try:
                since = datetime.fromtimestamp(timelib.strtotime(since))
            except ValueError, TypeError:
                since = None
Пример #6
0
def special_trust_managers(request):

    USER_EID = request.META['HTTP_UTLOGIN_EID']

    conn = TEDConnection(eid=SERVICE_EID, password=SERVICE_PASS)

    filt = '(&(utexasEduPersonSpecialTrustSw=Y)(eduPersonOrgDN=ou=UTAUS,ou=institutions,dc=entdir,dc=utexas,dc=edu)(manager=uid='+USER_EID+',ou=people,dc=entdir,dc=utexas,dc=edu))'

    attrs = ['utexasEduPersonEid','mail','displayName','utexasedupersonprimarytitle']

    search = conn.search(filt, attrs=attrs)

    subordinates = []

    for item in search:
        tmp = []
        tmp.append(item['utexasEduPersonEid'][0])
        tmp.append(item['mail'][0])
        tmp.append(item['displayName'][0])
        tmp.append(item['utexasEduPersonPrimaryTitle'][0])
        tmp.append(last_post_or_never(tmp[0]))
        
        if tmp[4] == 'Never' or strtotime(tmp[4]) < strtotime('-1 year -2 weeks'):
            tmp.append('<span class="overdue">Overdue</span>')
        elif strtotime(tmp[4]) <= strtotime('-1 year'):
            tmp.append('Due')
        else:
            tmp.append('Current')

        subordinates.append(tmp)
    
    subordinates.sort()

    person = {'eid':USER_EID}

    return render(request, 'forms/specialtrust_manager.html', {
        'person': person,
        'subordinates': subordinates
    })
Пример #7
0
def update_posts_for_feed_task(partner):
    """
	Load and parse the RSS or ATOM feed associated with the given feed url, and for each entry, parse out the individual
	entries and save each one as a partner_feeds.
	"""
    from feedparser import parse
    from partner_feeds.models import Post
    import timelib, re, time

    feed = parse(partner.feed_url)

    for entry in feed.entries:
        p = Post()
        try:

            p.partner_id = partner.id
            p.title = entry.title

            p.subheader = entry.summary

            try:
                p.author = entry.author
            except AttributeError:
                pass

            try:
                p.guid = entry.id
            except AttributeError:
                p.guid = entry.link

            p.url = entry.link

            # try to get the date of the entry, otherwise, try the date of the feed
            try:
                entry_date = re.sub('\|', '', entry.date)
                entry_date = timelib.strtotime(
                    entry_date)  # convert to a timestamp
                entry_date = time.localtime(
                    entry_date
                )  # converts to a time.struct_time (with regards to local timezone)
                entry_date = time.strftime(
                    "%Y-%m-%d %H:%M:%S",
                    entry_date)  # converts to mysql date format
                p.date = entry_date
            except AttributeError:
                p.date = time.strftime("%Y-%m-%d %H:%M:%S", feed.date)

            p.save()
        except AttributeError:
            # needs logging
            pass
Пример #8
0
def last_post(eid):
    db = MySQLdb.connect(MYSQL_HOST, MYSQL_USER, MYSQL_PASS, MYSQL_DB)
    cursor = db.cursor()
    cutoffdate = strtotime('-1 year -2 weeks -1 day')
    definitely_cc_boss_date = str(datetime.fromtimestamp(cutoffdate).strftime('%Y-%m-%d'))
    query = "SELECT COALESCE( MAX(timestamp), '{0}' ) FROM specialtrust WHERE eid = '{1}'".format(definitely_cc_boss_date, eid)
    last_post = ''
    try:
        cursor.execute(query)
        results = cursor.fetchall()
        last_post = results[0][0]
    except:
        return ''

    db.close()
    return last_post
Пример #9
0
def update_posts_for_feed_task(partner):
	"""
	Load and parse the RSS or ATOM feed associated with the given feed url, and for each entry, parse out the individual
	entries and save each one as a partner_feeds.
	"""
	from feedparser import parse
	from partner_feeds.models import Post
	import timelib, re, time

	feed = parse(partner.feed_url)

	for entry in feed.entries:
		p = Post()
		try:
			
			p.partner_id = partner.id
			p.title = entry.title

			p.subheader = entry.summary
			
			try:
				p.author = entry.author
			except AttributeError:
				pass

			try:
				p.guid = entry.id
			except AttributeError:
				p.guid = entry.link

			p.url = entry.link

			# try to get the date of the entry, otherwise, try the date of the feed
			try:
				entry_date = re.sub('\|','', entry.date)
				entry_date = timelib.strtotime(entry_date) # convert to a timestamp
				entry_date = time.localtime(entry_date) # converts to a time.struct_time (with regards to local timezone)
				entry_date = time.strftime("%Y-%m-%d %H:%M:%S", entry_date) # converts to mysql date format
				p.date = entry_date
			except AttributeError:
				p.date =  time.strftime("%Y-%m-%d %H:%M:%S", feed.date)

			p.save()
		except AttributeError:
			# needs logging
			pass
Пример #10
0
def active_eids():
    db = MySQLdb.connect(MYSQL_HOST, MYSQL_USER, MYSQL_PASS, MYSQL_DB)
    cursor = db.cursor()
    timestamp = strtotime('-1 year')
    yearago = datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d')
    query = "SELECT eid FROM specialtrust WHERE timestamp >= '{0}' ORDER BY eid ASC".format(yearago)
    entries = []
    try:
        cursor.execute(query)
        results = cursor.fetchall()
        for row in results:
            entries.append(row[0])
    except:
        db.close()
        return ''
    
    db.close()
    return entries
Пример #11
0
def last_valid_post(eid):
    db = MySQLdb.connect(MYSQL_HOST, MYSQL_USER, MYSQL_PASS, MYSQL_DB)
    cursor = db.cursor()
    timestamp = strtotime('-1 year')
    yearago = datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d')
    query = "SELECT st.* FROM ( SELECT eid, MAX( timestamp ) AS newest FROM specialtrust WHERE eid = '{0}' ) AS st_max INNER JOIN specialtrust AS st ON ( st.eid = st_max.eid AND st.timestamp = st_max.newest ) WHERE st.eid='{0}' AND st.timestamp >= '{1}'".format(eid, yearago)
    last_post = ''
    try:
        cursor.execute(query)
        last_post = cursor.fetchall()
    except:
        db.close()
        return ''

    db.close()

    date = last_post[0][0].strftime("%B %d, %Y %H:%M:%S")
    return date
Пример #12
0
def special_trust_dump_csv(request):
    response = HttpResponse(content_type='text/csv')
    response['Content-Disposition'] = 'attachment; filename="dump_current.csv"'

    writer = csv.writer(response, quotechar='"',quoting=csv.QUOTE_NONNUMERIC,delimiter=',')
    writer.writerow(["Dept_Code", "Name", "EID", "Agree", "Timestamp", "Department", "Manager", "Manager_EID"])

    db = MySQLdb.connect(MYSQL_HOST, MYSQL_USER, MYSQL_PASS, MYSQL_DB)
    cursor = db.cursor()
    timestamp = strtotime('-1 year')
    yearago = datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d')
    query = "SELECT department, name, eid, agree, timestamp, dept_name, manager_name, manager FROM specialtrust WHERE timestamp >= '{0}'".format(yearago)
    try:
        cursor.execute(query)
        results = cursor.fetchall()
        for row in results:
            writer.writerow([row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7]])
    except:
        print("Error: unable to fetch data")

    db.close()

    return response
Пример #13
0
def strtotime(s):
    if isinstance(s, str):
        s = s.encode("utf-8")
    return datetime.datetime.fromtimestamp(timelib.strtotime(s), tz=pytz.utc)
Пример #14
0
def special_trust_hrcontacts(request):

    USER_EID = request.META['HTTP_UTLOGIN_EID']

    conn = TEDConnection(eid=SERVICE_EID, password=SERVICE_PASS)
    
    filt = '(&(utexasEduRoleSource=OHSC)(member=uid='+USER_EID+',ou=people,dc=entdir,dc=utexas,dc=edu)(|(utexasEduRoleCode=0HC001)(utexasEduRoleCode=0DC200)(utexasEduRoleCode=0UN004))(utexasEduRoleAttribute=All))'

    attrs = ['utexasEduRoleScope']
    search = conn.search(filt, attrs=attrs)
    
    subordinates = []
    dept_search = ''
    filt = ''

    if len(search):

        for item in search:
            dept_search += "(eduPersonOrgUnitDn="+item['utexasEduRoleScope'][0]+")"

        if len(search) > 1:
            dept_search = "(|" + dept_search

        dept_search += ")"

        attrs = [
            'utexasEduPersonEid',
            'mail',
            'displayName',
            'title',
            'edupersonorgunitDN'
        ]

        filt = "(&(utexasEduPersonSpecialTrustSw=Y)(eduPersonOrgDN=ou=UTAUS,ou=institutions,dc=entdir,dc=utexas,dc=edu)" + dept_search+")"

        search = conn.search(filt, attrs=attrs)

        for item in search:
            if item['utexasEduPersonEid']:
                tmp = []
                tmp.append(item['displayName'][0])
                tmp.append(item['utexasEduPersonEid'][0])
                tmp.append(item['title'][0])
                
                if 'mail' in item:
                    tmp.append(item['mail'][0])
                else:
                    tmp.append('')
                
                tmp.append(last_post_or_never(tmp[1]))
                
                if tmp[4] == 'Never' or strtotime(tmp[4]) < strtotime('-1 year -2 weeks'):
                    tmp.append('<span class="overdue">Overdue</span>')
                elif strtotime(tmp[4]) <= strtotime('-1 year'):
                    tmp.append('Due')
                else:
                    tmp.append('Current')
                
                dept_name = item['eduPersonOrgUnitDn'][0].split(',')[0][6:].upper()
                tmp.append(dept_name)
                subordinates.append(tmp)  
        subordinates.sort()

    person = {'eid':USER_EID}

    return render(request, 'forms/specialtrust_hrcontacts.html', {
        'person': person,
        'subordinates': subordinates,
    })
Пример #15
0
def test_epoch():
    d = timelib.strtotime("1970-01-01")
    assert d == 0, "epoch should be 0"
Пример #16
0
 def str_to_time(date):
     #if str(date) == 'None':
     if isinstance(date, types.NoneType):
         return 0
     else:
         return strtotime(date)
Пример #17
0
def notify_users(request):
    conn = TEDConnection(eid=SERVICE_EID, password=SERVICE_PASS)
    notify_users = []

    filt = '(&(utexasEduPersonSpecialTrustSw=Y)(eduPersonOrgDN=ou=UTAUS,ou=institutions,dc=entdir,dc=utexas,dc=edu))'

    attrs = ['utexasEduPersonEid','mail','displayName']

    search = conn.search(filt, attrs=attrs)
    
    # GET SPECIAL TRUST USERS
    for item in search:
        if item['utexasEduPersonEid']:
            tmp = []
            tmp.append(item['displayName'][0])
            tmp.append(item['utexasEduPersonEid'][0])
            
            if 'mail' in item:
                tmp.append(item['mail'][0])
            else:
                tmp.append('')
            
            notify_users.append(tmp)  
    notify_users.sort()

    unable_to_notify = []

    active = active_eids()

    # Remove active eids from notify_users
    notify_users = [user for user in notify_users if user[1] not in active]
    timestamp = strtotime('-1 year -2 weeks')
    cc_manager_date = datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d')

    users_to_notify = []
    l = len(notify_users)

    for user in notify_users:
        name = user[0]
        eid = user[1]
        email = user[2]
        if email:
            attrs = ['cn','manager','edupersonorgunitdn']

            p = conn.get_by_eid(eid, attrs=attrs)

            manager = ''
            manager_email = ''
            dept_name = ''
            tmp = []

            if 'manager' in p:
                manager = p['manager'][0].split(',')[0][4:]
                attrs = ['cn','mail']
                conn1 = TEDConnection(eid=SERVICE_EID, password=SERVICE_PASS)
                if manager:
                    q = conn1.get_by_eid(manager, attrs=attrs)
                if 'mail' in q:
                    manager_email = q['mail'][0]

            if 'edupersonorgunitdn' in p:
                dept_name = p['edupersonorgunitdn']

            if len(dept_name) < 2:
                # if last_post(eid) <= cc_manager_date and manager:
            
                tmp.append(eid)
                tmp.append(name)
                tmp.append(email)
                tmp.append(manager_email)                
                tmp.append(last_post(eid))
                tmp.append(cc_manager_date)
                users_to_notify.append(tmp)
                    # cron_send_user_mail(eid,name,email,manager_email)
        else:
            unable_to_notify.append(eid)

        # cron_send_iso_mail( len(notify_users), unable_to_notify)

    #delete this after
    unable_to_notify.sort()

    return render(request, 'forms/notify_users.html', {
        'l':l,
        'users_count': len(users_to_notify),
        'users_to_notify': users_to_notify,
        'unable_count': len(unable_to_notify),
        'unable_to_notify': unable_to_notify
    })
Пример #18
0
def strtotime(s):
    if isinstance(s, str):
        s = s.encode("utf-8")
    return datetime.datetime.fromtimestamp(timelib.strtotime(s), tz=pytz.utc)
Пример #19
0
def update_posts_for_feed(partner):
    """ Load and parse the RSS or ATOM feed associated with the given feed url, and
    for each entry, parse out the individual entries and save each one as a partner_feeds.

    feedparser does a good job normalizing the data, but for a couple of fields we need to
    do a little more work
    """
    from feedparser import parse
    from partner_feeds.models import Post, Partner
    import timelib
    import time
    from datetime import datetime
    from django.utils.text import get_text_list

    feed = parse(partner.feed_url)

    for entry in feed.entries:

        # Required: title, link, skip the entry if it doesn't have them
        if 'title' in entry or 'link' in entry:
            p = Post(partner_id=partner.id, title=entry.title)

            # Links and GUID
            if 'id' in entry:
                p.guid = entry.id
            else:
                p.guid = entry.link
            p.url = entry.link

            # Date
            if 'date' in entry:
                entry_date = entry.date
            elif 'published' in entry:
                entry_date = entry.published
            elif 'date' in feed:
                entry_date = feed.date
            else:
                entry_date = None

            # entry.date and entry.published appear to be strings while
            # feed.date is a time.struct_time for some reason
            if type(entry_date) is not time.struct_time:
                entry_date = timelib.strtotime(entry_date)  # convert to a timestamp
                entry_date = time.localtime(entry_date)  # converts to a time.struct_time (with regards to local timezone)

            if entry_date is not None:
                entry_date = time.strftime("%Y-%m-%d %H:%M:%S", entry_date)  # converts to mysql date format
            else:
                entry_date = time.strftime("%Y-%m-%d %H:%M:%S")

            p.date = entry_date

            # feedparser doesn't seem to save the ATOM summary tag to
            # entry.description, but the summary is saved as one of the
            # rows in the entry.content list
            #
            # To find the summary, we loop through the list and
            # use the smallest field
            if 'content' in entry and len(entry.content) > 1:
                summary = entry.content.pop(0)['value']
                for content in entry.content:
                    if len(content['value']) < len(summary):
                        summary = content['value']
                p.description = summary
            elif 'description' in entry:
                p.description = entry.description

            if 'media_content' in entry and 'url' in entry.media_content[0]:
                p.image_url = entry.media_content[0]['url']

            if 'authors' in entry and entry.authors[0]:
                authors = [a['name'] for a in entry.authors if 'name' in a]
                p.byline = get_text_list(authors, 'and')
            elif 'author' in entry:
                p.byline = entry.author

            p.save()

    # Set the current time as when the partner feed was last retrieved
    # Needs to be an UPDATE and not a SAVE or else we will get an infinite loop
    Partner.objects.filter(
        pk=partner.pk).update(date_feed_updated=datetime.now())