Example #1
0
	def get_sensor_data_for(self, deviceId, type = 0, start = None, end = None):
		cursor = self.conn.cursor()
		
		params = (type,)
		whereAppend = None
		if start is not None:
			whereAppend = 'AND sensor_data.timestamp > ? '
			params = params + (calendar.timegm(start),)
		if end is not None:
			whereAppend = whereAppend + ' AND sensor_data.timestamp < ? '
			params = params + (calendar.timegm(end),)
			
		sql = 'SELECT sensor_data.value, sensor_data.timestamp, devices.name, devices.type FROM sensor_data INNER JOIN devices ON sensor_data.device_id = devices.device_id '
		sql = sql + ' WHERE sensor_data.type = ? '
		if whereAppend is not None:
			sql = sql + whereAppend
		sql = sql + 'ORDER BY sensor_data.timestamp ASC'
		
		print sql
		print whereAppend
		print params
		res = cursor.execute(sql, params)#((datetime.today() - timedelta(hours=1)).strftime('%s'),))
		rows = res.fetchall()
		cursor.close()
		return rows
Example #2
0
 def test_listdir_full_list_get_everything(self):
     """Verify we get all the values, even if we can't fully check them."""
     path = self._setup_listdir()
     dirlist = self.fs.listdir(path, ignore_directories=False,
                               ignore_files=False, include_size=True,
                               include_type=True, include_time=True,
                               recursive=True)
     entries = [dd for dd in dirlist]
     self.assertEquals(6, len(entries), msg="%r" % entries)
     self.assertEquals(4, len(entries[0]), msg="%r" % entries)
     self.assertEquals(path + '/file1.dat', entries[0][0], msg="%r" % entries)
     self.assertEquals(0, entries[0][1], msg="%r" % entries)
     self.assertTrue(re.match(r'[-f]', entries[0][2]), msg="%r" % entries)
     self.assertTrue(timegm(datetime.now().timetuple()) -
                     timegm(entries[0][3].timetuple()) < TIMESTAMP_DELAY)
     self.assertEquals(4, len(entries[1]), msg="%r" % entries)
     self.assertEquals(path + '/file2.dat', entries[1][0], msg="%r" % entries)
     self.assertEquals(4, len(entries[2]), msg="%r" % entries)
     self.assertEquals(path + '/sub1', entries[2][0], msg="%r" % entries)
     self.assertEquals(4, len(entries[3]), msg="%r" % entries)
     self.assertEquals(path + '/sub1/file3.dat', entries[3][0], msg="%r" % entries)
     self.assertEquals(4, len(entries[4]), msg="%r" % entries)
     self.assertEquals(path + '/sub2', entries[4][0], msg="%r" % entries)
     self.assertEquals(4, len(entries[5]), msg="%r" % entries)
     self.assertEquals(path + '/sub2/file4.dat', entries[5][0], msg="%r" % entries)
Example #3
0
    def scan_list(self, start_time=None, end_time=None, **kwargs):
        """List scans stored in Security Center in a given time range.

        Time is given in UNIX timestamps, assumed to be UTC. If a `datetime` is
        passed it is converted. If `end_time` is not specified it is NOW. If
        `start_time` is not specified it is 30 days previous from `end_time`.

        :param start_time: start of range to filter
        :type start_time: date, datetime, int
        :param end_time: end of range to filter
        :type start_time: date, datetime, int

        :return: list of dictionaries representing scans

        """

        try:
            end_time = datetime.utcfromtimestamp(int(end_time))
        except TypeError:
            if end_time is None:
                end_time = datetime.utcnow()

        try:
            start_time = datetime.utcfromtimestamp(int(start_time))
        except TypeError:
            if start_time is None:
                start_time = end_time - timedelta(days=30)

        data = {"startTime": calendar.timegm(start_time.utctimetuple()),
                "endTime": calendar.timegm(end_time.utctimetuple())}
        data.update(kwargs)

        result = self.raw_query("scanResult", "getRange", data=data)
        return result["scanResults"]
Example #4
0
def verify_signature(payload, signing_input, header, signature, audience_sk,
                     verify_expiration=True, leeway=0, audience=None):

    if isinstance(leeway, timedelta):
        leeway = timedelta_total_seconds(leeway)

    issuer_pk = payload.get('iss')
    if issuer_pk is None:
        issuer_pk = payload.get('sub')

    if issuer_pk is None:
        raise DecodeError("Issuer or subject not found.")

    issuer_pk = xid_to_key(issuer_pk)

    if not _verify(signing_input, issuer_pk, audience_sk, signature):
        raise DecodeError('Signature verification failed')

    if 'nbf' in payload and verify_expiration:
        utc_timestamp = timegm(datetime.utcnow().utctimetuple())

        if payload['nbf'] > (utc_timestamp + leeway):
            raise ExpiredSignatureError('Signature not yet valid')

    if 'exp' in payload and verify_expiration:
        utc_timestamp = timegm(datetime.utcnow().utctimetuple())

        if payload['exp'] < (utc_timestamp - leeway):
            raise ExpiredSignatureError('Signature has expired')
Example #5
0
    def get_tables(self, dataset_id, app_id, start_time, end_time):
        """Retrieve a list of tables that are related to the given app id
        and are inside the range of start and end times.

        Args:
            dataset_id: The BigQuery dataset id to consider.
            app_id: The appspot name
            start_time: The datetime or unix time after which records will be
                        fetched.
            end_time: The datetime or unix time up to which records will be
                      fetched.

        Returns:
            A list of table names.
        """

        if isinstance(start_time, datetime):
            start_time = calendar.timegm(start_time.utctimetuple())

        if isinstance(end_time, datetime):
            end_time = calendar.timegm(end_time.utctimetuple())

        every_table = self._get_all_tables(dataset_id)
        app_tables = every_table.get(app_id, {})

        return self._filter_tables_by_time(app_tables, start_time, end_time)
Example #6
0
 def _get_utilization(self, cr, uid, ids, name, arg, context=None):
     res = {}
     for meter in self.browse(cr, uid, ids, context=context):
         Dn = 1.0*calendar.timegm(time.strptime(time.strftime('%Y-%m-%d',time.gmtime()),"%Y-%m-%d"))
         Da = Dn - 3600*24*meter.av_time
         meter_line_obj = self.pool.get('mro.pm.meter.line')
         meter_line_ids = meter_line_obj.search(cr, uid, [('meter_id', '=', meter.id),('date', '<=', time.strftime('%Y-%m-%d',time.gmtime(Da)))], limit=1, order='date desc')
         if not len(meter_line_ids):
             meter_line_ids = meter_line_obj.search(cr, uid, [('meter_id', '=', meter.id),('date', '>', time.strftime('%Y-%m-%d',time.gmtime(Da)))], limit=1, order='date')
             if not len(meter_line_ids):
                 res[meter.id] = meter.min_utilization
                 continue
         meter_line = meter_line_obj.browse(cr, uid, meter_line_ids[0])
         Dci = 1.0*calendar.timegm(time.strptime(meter_line.date, "%Y-%m-%d"))
         Ci = meter_line.total_value
         number = 0
         Us = 0
         meter_line_ids = meter_line_obj.search(cr, uid, [('meter_id', '=', meter.id),('date', '>',meter_line.date)], order='date')
         for meter_line in meter_line_obj.browse(cr, uid, meter_line_ids):
             Dci1 = 1.0*calendar.timegm(time.strptime(meter_line.date, "%Y-%m-%d"))
             Ci1 = meter_line.total_value
             if Dci1 != Dci:
                 Us = Us + (3600*24*(Ci1 - Ci))/(Dci1 - Dci)
                 Dci = Dci1
                 Ci = Ci1
                 number += 1
         if number:
             U = Us/number
             if U<meter.min_utilization:
                 U = meter.min_utilization
         else:   U = meter.min_utilization
         res[meter.id] = U
     return res
Example #7
0
def time_parse(s):
    try:
        epoch = int(s)
        return epoch
    except ValueError:
        pass

    try:
        epoch = int(calendar.timegm(time.strptime(s, '%Y-%m-%d')))
        return epoch
    except ValueError:
        pass

    try:
        epoch = int(calendar.timegm(time.strptime(s, '%Y-%m-%d %H:%M:%S')))
        return epoch
    except ValueError:
        pass

    m = re.match(r'^(?=\d)(?:(\d+)w)?(?:(\d+)d)?(?:(\d+)h)?(?:(\d+)m)?(?:(\d+)s?)?$', s, re.I)
    if m:
        return -1*(int(m.group(1) or 0)*604800 +  \
                int(m.group(2) or 0)*86400+  \
                int(m.group(3) or 0)*3600+  \
                int(m.group(4) or 0)*60+  \
                int(m.group(5) or 0))

    raise ValueError('Invalid time: "%s"' % s)
    def test_age(self):
        now = datetime.now(tz=tz.tzutc())
        three_months = now - timedelta(90)
        two_months = now - timedelta(60)
        one_month = now - timedelta(30)

        def i(d):
            return instance(LaunchTime=d)

        fdata = {
            "type": "value",
            "key": "LaunchTime",
            "op": "less-than",
            "value_type": "age",
            "value": 32,
        }

        self.assertFilter(fdata, i(three_months), False)
        self.assertFilter(fdata, i(two_months), False)
        self.assertFilter(fdata, i(one_month), True)
        self.assertFilter(fdata, i(now), True)
        self.assertFilter(fdata, i(now.isoformat()), True)
        self.assertFilter(fdata, i(now.isoformat()), True)
        self.assertFilter(fdata, i(calendar.timegm(now.timetuple())), True)
        self.assertFilter(fdata, i(str(calendar.timegm(now.timetuple()))), True)
Example #9
0
 def __init__(self,file):
     while True:
         line=file.readline()
         if line[0]=='-':
             #end of header
             break
         lineSplitPos=line.find(":")
         if lineSplitPos>=0:
             lineType=line[0:lineSplitPos]
             lineValue=line[lineSplitPos+1:]
             if lineType=='UUID':
                 self.patientID=lineValue
             elif lineType=='Sampling Rate':
                 self.samplingRate=float(lineValue)
             elif lineType=='Start Time':
                 offsetPos=lineValue.find("Offset:")
                 if offsetPos>0:
                     self.startTime=calendar.timegm(time.strptime(lineValue[0:offsetPos],' %Y-%m-%d %H:%M:%S '))
                     self.hoursOffset=int(lineValue[offsetPos+7:])
                     self.startTime-=self.hoursOffset*60*60
                 else:
                     self.startTime=calendar.timegm(time.strptime(lineValue,' %Y-%m-%d %H:%M:%S '))
                     self.hoursOffset=0
     self.numSignals=6            
     fileLen=os.fstat(file.fileno()).st_size
     curPos=file.tell()
     self.recordCount=(fileLen-curPos)/(14)
Example #10
0
    def perform_post_validation_configuration_changes(self, config):
        """
        Take the date strings entered into the data point config and set the start and end time elements

        Note
        ----
        This method also updated the GUID config element to ensure that the aggregation controller does not filter out
        content before it is sent to the search server based on time.
        """
        start_date = [e for e in config['elements'] if e['name'] == 'start_date'][0]['value']
        start_time_element = [e for e in config['elements'] if e['name'] == 'start_time'][0]
        start_time_element['value'] = '%i' % calendar.timegm(datetime.datetime.strptime(start_date, '%m/%d/%Y').timetuple())

        end_date = [e for e in config['elements'] if e['name'] == 'end_date'][0]['value']
        end_time_element = [e for e in config['elements'] if e['name'] == 'end_time'][0]
        end_time_element['value'] = '%i' % calendar.timegm(datetime.datetime.strptime(end_date, '%m/%d/%Y').timetuple())

        config['elements'].append({
            'name':'guid',
            'display_name':'Guid',
            'help':'',
            'type':'hidden',
            'value':config['id']})

        metrics_element = [e for e in config['elements'] if e['name'] == 'metrics'][0]
        formatted_selected_metrics = ['extensions_%s_f' % m.replace('ga:', '') for m in metrics_element['value']]
        config['meta_data'] = [m for m in config['meta_data'] if m['name'] in formatted_selected_metrics]

        return config
Example #11
0
    def test_can_get_domain_reviews(self):
        dt = datetime(2010, 11, 12, 13, 14, 15)
        dt_timestamp = calendar.timegm(dt.utctimetuple())

        dt2 = datetime(2011, 12, 13, 14, 15, 16)
        dt2_timestamp = calendar.timegm(dt2.utctimetuple())

        domain = DomainFactory.create(url="http://www.domain-details.com", name="domain-details.com")

        page = PageFactory.create(domain=domain, last_review_date=dt)
        page2 = PageFactory.create(domain=domain, last_review_date=dt2)

        ReviewFactory.create(page=page, is_active=True, is_complete=True, completed_date=dt, number_of_violations=20)
        ReviewFactory.create(page=page, is_active=False, is_complete=True, completed_date=dt2, number_of_violations=30)
        ReviewFactory.create(page=page2, is_active=True, is_complete=True, completed_date=dt2, number_of_violations=30)
        ReviewFactory.create(page=page2, is_active=False, is_complete=True, completed_date=dt, number_of_violations=20)

        response = yield self.http_client.fetch(
            self.get_url('/domains/%s/reviews/' % domain.name)
        )

        expect(response.code).to_equal(200)

        domain_details = loads(response.body)

        expect(domain_details['pages']).to_length(2)

        expect(domain_details['pages'][1]['url']).to_equal(page2.url)
        expect(domain_details['pages'][1]['uuid']).to_equal(str(page2.uuid))
        expect(domain_details['pages'][1]['completedAt']).to_equal(dt2_timestamp)

        expect(domain_details['pages'][0]['url']).to_equal(page.url)
        expect(domain_details['pages'][0]['uuid']).to_equal(str(page.uuid))
        expect(domain_details['pages'][0]['completedAt']).to_equal(dt_timestamp)
Example #12
0
def epoch_in_seconds(epoch):
    """
    >>> epoch_in_seconds(datetime_from_seconds(-12345678999.0001))
    -12345679000
    """
    if epoch == 0:
        # if you specifiy zero year, assume you meant AD 0001 (Anno Domini, Christ's birth date?) 
        epoch = datetime.datetime(1, 1, 1)
    try:
        epoch = int(epoch)
    except:
        try:
            epoch = float(epoch)
        except:
            pass
    # None will use the default epoch (1970 on Unix)
    epoch = epoch or DEFAULT_DATETIME_EPOCH
    if epoch:
        try:
            return calendar.timegm(epoch.timetuple())
        except:
            try:
                return calendar.timegm(datetime.date(epoch,1,1).timetuple())
            except:  # TypeError on non-int epoch
                try:
                    epoch = float(epoch)
                    assert(abs(int(float(epoch))) <= 5000)
                    return calendar.timegm(datetime.date(int(epoch),1,1).timetuple())
                except:
                    pass
    return epoch
        def check_changes(r):
            self.assertEquals(len(self.request.addedChanges), 2)
            change = self.request.addedChanges[0]

            self.assertEquals(change["files"], ["filepath.rb"])
            self.assertEquals(change["repository"], "http://github.com/defunkt/github")
            self.assertEquals(calendar.timegm(change["when_timestamp"].utctimetuple()), 1203116237)
            self.assertEquals(change["author"], "Fred Flinstone <*****@*****.**>")
            self.assertEquals(change["revision"], "41a212ee83ca127e3c8cf465891ab7216a705f59")
            self.assertEquals(change["comments"], "okay i give in")
            self.assertEquals(change["branch"], "master")
            self.assertEquals(
                change["revlink"], "http://github.com/defunkt/github/commit/" "41a212ee83ca127e3c8cf465891ab7216a705f59"
            )

            change = self.request.addedChanges[1]
            self.assertEquals(change["files"], ["modfile", "removedFile"])
            self.assertEquals(change["repository"], "http://github.com/defunkt/github")
            self.assertEquals(calendar.timegm(change["when_timestamp"].utctimetuple()), 1203114994)
            self.assertEquals(change["author"], "Fred Flinstone <*****@*****.**>")
            self.assertEquals(change["src"], "git")
            self.assertEquals(change["revision"], "de8251ff97ee194a289832576287d6f8ad74e3d0")
            self.assertEquals(change["comments"], "update pricing a tad")
            self.assertEquals(change["branch"], "master")
            self.assertEquals(
                change["revlink"], "http://github.com/defunkt/github/commit/" "de8251ff97ee194a289832576287d6f8ad74e3d0"
            )
Example #14
0
def get_collection(request):
    if not 'path' in request.GET:
        raise HTTPBadRequest()
    path = request.GET['path']
    logger.debug(path)

    try:
        obj = DataStoreSession.collections.get(str(path))
    except CollectionDoesNotExist:
        try: 
            obj = DataStoreSession.data_objects.get(str(path))
        except DataObjectDoesNotExist:
            raise HTTPNotFound()

    logger.debug(obj)

    response = {
        'name': obj.name,
        'path': obj.path,
        'metadata': [m.__dict__ for m in obj.metadata.items()],
        'is_dir': isinstance(obj, iRODSCollection),
    }
    if isinstance(obj, iRODSDataObject):
        response['size'] = obj.size
        response['create_time'] = timegm(obj.create_time.utctimetuple())
        response['modify_time'] = timegm(obj.modify_time.utctimetuple())
        response['checksum'] = obj.checksum
    return response
Example #15
0
 def transform_python_types(self, obj):
     """handle special scalars, default to default json encoder
     """
     # Pandas Timestamp
     if is_pandas and isinstance(obj, pd.tslib.Timestamp):
         return obj.value / 10**6.0  #nanosecond to millisecond
     elif np.issubdtype(type(obj), np.float):
         return float(obj)
     elif np.issubdtype(type(obj), np.int):
         return int(obj)
     elif np.issubdtype(type(obj), np.bool_):
         return bool(obj)
     # Datetime
     # datetime is a subclass of date.
     elif isinstance(obj, dt.datetime):
         return calendar.timegm(obj.timetuple()) * 1000. + obj.microsecond / 1000.
     # Date
     elif isinstance(obj, dt.date):
         return calendar.timegm(obj.timetuple()) * 1000.
     # Numpy datetime64
     elif isinstance(obj, np.datetime64):
         epoch_delta = obj - np.datetime64('1970-01-01T00:00:00Z')
         return (epoch_delta / np.timedelta64(1, 'ms'))
     # Time
     elif isinstance(obj, dt.time):
         return (obj.hour * 3600 + obj.minute * 60 + obj.second) * 1000 + obj.microsecond / 1000.
     elif is_dateutil and isinstance(obj, relativedelta):
         return dict(years=obj.years, months=obj.months, days=obj.days, hours=obj.hours,
             minutes=obj.minutes, seconds=obj.seconds, microseconds=obj.microseconds)
     # Decimal
     elif isinstance(obj, decimal.Decimal):
         return float(obj)
     else:
         return super(BokehJSONEncoder, self).default(obj)
    def check_changes(self, r, project='', codebase=None):
        self.assertEquals(len(self.changeHook.master.addedChanges), 2)
        change = self.changeHook.master.addedChanges[0]

        self.assertEquals(change["repository"], "git@localhost:diaspora.git")
        self.assertEquals(
            calendar.timegm(change["when_timestamp"].utctimetuple()),
            1323692851
        )
        self.assertEquals(change["author"], "Jordi Mallach <*****@*****.**>")
        self.assertEquals(change["revision"], 'b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327')
        self.assertEquals(change["comments"], "Update Catalan translation to e38cb41.")
        self.assertEquals(change["branch"], "master")
        self.assertEquals(change["revlink"], "http://localhost/diaspora/commits/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327")

        change = self.changeHook.master.addedChanges[1]
        self.assertEquals(change["repository"], "git@localhost:diaspora.git")
        self.assertEquals(
            calendar.timegm(change["when_timestamp"].utctimetuple()),
            1325626589
        )
        self.assertEquals(change["author"], "GitLab dev user <gitlabdev@dv6700.(none)>")
        self.assertEquals(change["src"], "git")
        self.assertEquals(change["revision"], 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7')
        self.assertEquals(change["comments"], "fixed readme")
        self.assertEquals(change["branch"], "master")
        self.assertEquals(change["revlink"], "http://localhost/diaspora/commits/da1560886d4f094c3e6c9ef40349f7d38b5d27d7")

        self.assertEquals(change.get("project"), project)
        self.assertEquals(change.get("codebase"), codebase)
Example #17
0
def calendar_events_list(start, end):
	# Get all events between the two dates:
	print 'Retrieve events between {} and {}'.format(start, end)
	import time
	#print time.gmtime(int(start))
	#print int(start)
	#print datetime.fromtimestamp(int(start))
	calendar_begins = datetime.fromtimestamp(int(start))
	calendar_ends = datetime.fromtimestamp(int(end))
	import calendar
	print 'The calendar runs from {} to {}'.format(calendar_begins, calendar_ends)#time.strftime("%Y", time.gmtime(calendar.timegm(calendar_begins))), time.strftime("%Y", time.gmtime(calendar.timegm(calendar_ends))))
	events = Event.objects.filter(date_time_end__gt=calendar_begins).filter(date_time_begin__lt=calendar_ends)
	events_list = []
	for event in events:
		events_list.append({
			'title': event.title,
			'start': calendar.timegm(event.date_time_begin.utctimetuple()),
			'end': calendar.timegm(event.date_time_end.utctimetuple()),
			'id': event.id,
			'url': '/vidburdur/'+str(event.id),
		})
	import pprint
	pprint.pprint(events_list)
	return HttpResponse(json.dumps(events_list), mimetype='application/javascript')
	print 'FINISHED'
def get_timestamp(s):
    d_s = datetime.datetime.strptime(s, '%d-%b-%y')
    td = datetime.timedelta(days=1)
    d_n = d_s+td
    t_d_s = calendar.timegm(d_s.utctimetuple())
    t_d_n = calendar.timegm(d_n.utctimetuple())
    return t_d_s, t_d_n
Example #19
0
 def read_BSM_series_from_db(self, start_date, end_date, callback = None, *args, **keywords):
     db = Database(**config['ERIconfig'])
     
 ##  get meters by id    
     MIDs = db.query(SQL_TEMPLATES['SELECT']['BMS_SCANDA_MID_QUERY_TEMPLATE'])
     
     for entry in MIDs:
         print(entry)
 ## set up querying range
     from datetime import date, datetime
     import calendar
 
     start_timestamp = calendar.timegm( date(*start_date).timetuple() ) #date(2014, 5, 26)
     end_timestamp = calendar.timegm( date(*end_date).timetuple() ) #date(2014, 7, 28)
 
 ##  retrieve data from database 
     for entry in MIDs:
         mid = entry['global_MID']
          
         series = db.query(SQL_TEMPLATES['SELECT']['BMS_SCANDA_TIMESTAMPS_QUERY_TEMPLATE'], 
                           {'tb_col':'power_kw','db_table':'ntu_scada_hdata_historic_mirror','global_MID':mid},
                           start_timestamp,
                           end_timestamp,
                           )
         try:
             # Call back routines
             callback(datetime(*start_date), datetime(*end_date), series, mid=mid)
             # for test purpose
             # break
         except IndexError as err:
             print('ERROR!:', entry, ' series : ', series.__len__() )
             
Example #20
0
def user_reputation(request, user, context):
    reputes = (
        models.Repute.objects.filter(user=user)
        .select_related("question", "question__thread", "user")
        .order_by("-reputed_at")
    )

    # prepare data for the graph - last values go in first
    rep_list = ["[%s,%s]" % (calendar.timegm(datetime.datetime.now().timetuple()) * 1000, user.reputation)]
    for rep in reputes:
        rep_list.append("[%s,%s]" % (calendar.timegm(rep.reputed_at.timetuple()) * 1000, rep.reputation))
    reps = ",".join(rep_list)
    reps = "[%s]" % reps

    data = {
        "active_tab": "users",
        "page_class": "user-profile-page",
        "tab_name": "reputation",
        "tab_description": _("user reputation in the community"),
        "page_title": _("profile - user reputation"),
        "reputation": reputes,
        "reps": reps,
    }
    context.update(data)
    return render_into_skin("user_profile/user_reputation.html", context, request)
def cal_run_hours(cal_date,compare_date=datetime.datetime.utcnow().strftime('%Y%m')):
    compare_date = str(compare_date)
    if int(compare_date[-2:]) == 1:
        compare_date_year = int(compare_date[:4]) - 1
        compare_date_month = 12
    else:
        compare_date_year = int(compare_date[:4])
        compare_date_month = int(compare_date[-2:]) - 1

    compare_date_month_hours = calendar.monthrange(compare_date_year,compare_date_month)[1] * 24
    compare_date_begin_ts = calendar.timegm(datetime.datetime(compare_date_year,compare_date_month,1).timetuple())

    if compare_date_month == 12:
        compare_date_end_ts = calendar.timegm(datetime.datetime(compare_date_year+1,1,1).timetuple())
    else:
        compare_date_end_ts = calendar.timegm(datetime.datetime(compare_date_year,compare_date_month+1,1).timetuple())

    # print compare_date_begin_time,compare_date_end_time,compare_date_month_hours
    cal_date_ts = calendar.timegm(cal_date.timetuple())

    run_hours = 0

    if compare_date_begin_ts < cal_date_ts < compare_date_end_ts:
        run_hours = (compare_date_end_ts - cal_date_ts) / 3600

    if cal_date_ts < compare_date_begin_ts:
        run_hours = compare_date_month_hours

    return run_hours
Example #22
0
    def build_cookie_parameters(self, params):
        domain_hash = self._generate_domain_hash()
        params._utma = "%s.%s.%s.%s.%s.%s" % (
                domain_hash,
                self.visitor.unique_id,
                calendar.timegm(self.visitor.first_visit_time.timetuple()),
                calendar.timegm(self.visitor.previous_visit_time.timetuple()),
                calendar.timegm(self.visitor.current_visit_time.timetuple()),
                self.visitor.visit_count
            )
        params._utmb = '%s.%s.10.%s' % (
                domain_hash,
                self.session.track_count,
                calendar.timegm(self.session.start_time.timetuple()),
            )
        params._utmc = domain_hash
        cookies = []
        cookies.append('__utma=%s;' % params._utma)
        if params._utmz:
            cookies.append('__utmz=%s;' % params._utmz)
        if params._utmv:
            cookies.append('__utmv=%s;' % params._utmv)

        params.utmcc = '+'.join(cookies)
        return params
    def test_get_id_token(self):
        """Verify that ID tokens are signed with the correct secret and generated with the correct claims."""
        token = get_id_token(self.user, self.client_name)

        payload = jwt.decode(
            token,
            self.oauth2_client.client_secret,
            audience=self.oauth2_client.client_id,
            issuer=settings.OAUTH_OIDC_ISSUER,
        )

        now = datetime.datetime.utcnow()
        expiration = now + datetime.timedelta(seconds=settings.OAUTH_ID_TOKEN_EXPIRATION)

        expected_payload = {
            'preferred_username': self.user.username,
            'name': self.user_profile.name,
            'email': self.user.email,
            'administrator': self.user.is_staff,
            'iss': settings.OAUTH_OIDC_ISSUER,
            'exp': calendar.timegm(expiration.utctimetuple()),
            'iat': calendar.timegm(now.utctimetuple()),
            'aud': self.oauth2_client.client_id,
            'sub': anonymous_id_for_user(self.user, None),
        }

        self.assertEqual(payload, expected_payload)
Example #24
0
def dtime2tstmp(DTime):
  if iterable(DTime) ==True:
    Shape  = shape(DTime)
    return array([timegm(dtime.timetuple())
                  for dtime in DTime]).reshape(Shape)
  else:
    return timegm(DTime.timetuple())
Example #25
0
def time_duration_clock(start, finish):
	"""Returns duration in [x days] hh:mm:ss format (times in time.asctime format)

	x days shows up only on long (usually frozen) times
	"""

	# Construct time time tuple
	try:
		start_time = calendar.timegm( time.strptime(start) )
		finish_time = calendar.timegm( time.strptime(finish) )
	except:
		return "?"

	# seconds of duration
	duration = finish_time - start_time

	days = duration / 86400
	duration %= 86400

	hours = duration / 3600
	duration %= 3600

	mins = duration / 60
	duration %= 60

	secs = duration

	ret = "%.2d:%.2d:%.2d" % (hours, mins, secs)

	if days:
		ret = "%d day(s) %s" % (days, ret)

	return ret
Example #26
0
def base_timeseries(cat_id=None):
    redis = redis_util.Redis(REDIS_HOST, REDIS_PORT).conn
    get_all = request.args.get("all")
    start = request.args.get("start")
    end = request.args.get("end")

    if get_all:
        if cat_id:
            return category.Category(redis, cat_id=cat_id).timeseries.all()
        else:
            time_series = {}
            for cat in category.Category.get_all_categories(redis):
                time_series[cat.cat_id] = cat.timeseries.all()
            return time_series
    else:
        if start and end:
            start = calendar.timegm(parser.parse(request.form["start"]))
            end = calendar.timegm(parser.parse(request.form["end"]))
        else:
            end = time.time()
            start = end-DEFAULT_TIME_SLICE

        if cat_id:
            return category.Category(redis, cat_id=cat_id).timeseries.range(
                start, end)
        else:
            time_series = {}
            for cat in category.Category.get_all_categories(redis):
                time_series[cat.cat_id] = cat.timeseries.range(
                    start, end)
            return time_series
    def test_build(self):
        expected = {}
        self.maxDiff = None
        expected['message_type'] = NotificationType.type_names[NotificationType.UPCOMING_MATCH]
        expected['message_data'] = {}
        expected['message_data']['event_key'] = self.event.key_name
        expected['message_data']['event_name'] = self.event.name
        expected['message_data']['match_key'] = self.match.key_name
        expected['message_data']['team_keys'] = self.match.team_key_names
        if self.match.time:
            expected['message_data']['scheduled_time'] = calendar.timegm(self.match.time.utctimetuple())
            expected['message_data']['predicted_time'] = calendar.timegm(self.match.time.utctimetuple())
        else:
            expected['message_data']['scheduled_time'] = None
            expected['message_data']['predicted_time'] = None
        expected['message_data']['webcast'] = {
            'channel': '6540154',
            'status': 'unknown',
            'stream_title': None,
            'type': 'ustream'
        }

        data = self.notification._build_dict()

        self.assertEqual(expected, data)
Example #28
0
def get_standardized_timestamp(timestamp, ts_format):
    """
  Given a timestamp string, return a time stamp in the epoch ms format. If no date is present in
  timestamp then today's date will be added as a prefix before conversion to epoch ms
  """
    if not timestamp:
        return None
    if timestamp == "now":
        timestamp = str(datetime.datetime.now())
    if not ts_format:
        ts_format = detect_timestamp_format(timestamp)
    try:
        if ts_format == "unknown":
            logger.error("Unable to determine timestamp format for : %s", timestamp)
            return -1
        elif ts_format == "epoch":
            ts = int(timestamp) * 1000
        elif ts_format == "epoch_ms":
            ts = timestamp
        elif ts_format == "epoch_fraction":
            ts = int(timestamp[:10]) * 1000 + int(timestamp[11:])
        elif ts_format in ("%H:%M:%S", "%H:%M:%S.%f"):
            date_today = str(datetime.date.today())
            dt_obj = datetime.datetime.strptime(date_today + " " + timestamp, "%Y-%m-%d " + ts_format)
            ts = calendar.timegm(dt_obj.utctimetuple()) * 1000 + dt_obj.microsecond / 1000
        else:
            dt_obj = datetime.datetime.strptime(timestamp, ts_format)
            ts = calendar.timegm(dt_obj.utctimetuple()) * 1000 + dt_obj.microsecond / 1000
    except ValueError:
        return -1
    return str(ts)
Example #29
0
    def default(self, obj):
        try:
            if isinstance(obj, datetime):
                if obj.utcoffset() is not None:
                    obj = obj - obj.utcoffset()
                millis = int(
                        calendar.timegm(obj.timetuple()) * 1000 +
                        obj.microsecond / 1000
                )
                return millis

            if isinstance(obj, date):
                millis = int(
                        calendar.timegm(obj.timetuple()) * 1000

                )
                return millis

            if isinstance(obj, np.int64):
                val = int(obj)
                return val
            iterable = iter(obj)
        except TypeError:
            pass
        else:
            return list(iterable)
        return JSONEncoder.default(self, obj)
Example #30
0
    def check_unsat(self, fname):
        a = XorToCNF()
        tmpfname = create_fuzz.unique_file("tmp_for_xor_to_cnf_convert")
        a.convert(fname, tmpfname)
        # execute with the other solver
        toexec = "lingeling -f %s" % tmpfname
        print "Solving with other solver: %s" % toexec
        currTime = calendar.timegm(time.gmtime())
        try:
            p = subprocess.Popen(toexec.rsplit(),
                                 stdout=subprocess.PIPE,
                                 preexec_fn=setlimits)
        except OSError:
            print "ERROR: Probably you don't have lingeling installed!"
            raise

        consoleOutput2 = p.communicate()[0]
        os.unlink(tmpfname)

        # if other solver was out of time, then we can't say anything
        diffTime = calendar.timegm(time.gmtime()) - currTime
        if diffTime > maxTime - maxTimeDiff:
            print "Other solver: too much time to solve, aborted!"
            return None

        # extract output from the other solver
        print "Checking other solver output..."
        otherSolverUNSAT, otherSolverSolution, _ = solution_parser.parse_solution_from_output(
            consoleOutput2.split("\n"), self.ignoreNoSolution)

        # check if the other solver agrees with us
        return otherSolverUNSAT
Example #31
0
def _format_date(date):
    """ Output an RFC822 date format. """
    if date is None:
        return None

    return formatdate(timegm(date.utctimetuple()))
Example #32
0
def get_period_counts(active_findings,
                      findings,
                      findings_closed,
                      accepted_findings,
                      period_interval,
                      start_date,
                      relative_delta='months'):
    opened_in_period = list()
    active_in_period = list()
    accepted_in_period = list()
    opened_in_period.append(
        ['Timestamp', 'Date', 'S0', 'S1', 'S2', 'S3', 'Total', 'Closed'])
    active_in_period.append(
        ['Timestamp', 'Date', 'S0', 'S1', 'S2', 'S3', 'Total', 'Closed'])
    accepted_in_period.append(
        ['Timestamp', 'Date', 'S0', 'S1', 'S2', 'S3', 'Total', 'Closed'])

    for x in range(-1, period_interval):
        if relative_delta == 'months':
            # make interval the first through last of month
            end_date = (start_date + relativedelta(months=x)) + relativedelta(
                day=1, months=+1, days=-1)
            new_date = (start_date +
                        relativedelta(months=x)) + relativedelta(day=1)
        else:
            # week starts the monday before
            new_date = start_date + relativedelta(weeks=x, weekday=MO(1))
            end_date = new_date + relativedelta(weeks=1, weekday=MO(1))

        closed_in_range_count = findings_closed.filter(
            mitigated__range=[new_date, end_date]).count()

        if accepted_findings:
            risks_a = accepted_findings.filter(
                risk_acceptance__created__range=[
                    datetime(new_date.year, new_date.month, 1, tzinfo=localtz),
                    datetime(new_date.year,
                             new_date.month,
                             monthrange(new_date.year, new_date.month)[1],
                             tzinfo=localtz)
                ])
        else:
            risks_a = None

        crit_count, high_count, med_count, low_count, closed_count = [
            0, 0, 0, 0, 0
        ]
        for finding in findings:
            try:
                if new_date <= datetime.combine(
                        finding.date, datetime.min.time()).replace(
                            tzinfo=localtz) <= end_date:
                    if finding.severity == 'Critical':
                        crit_count += 1
                    elif finding.severity == 'High':
                        high_count += 1
                    elif finding.severity == 'Medium':
                        med_count += 1
                    elif finding.severity == 'Low':
                        low_count += 1
            except:
                if new_date <= finding.date <= end_date:
                    if finding.severity == 'Critical':
                        crit_count += 1
                    elif finding.severity == 'High':
                        high_count += 1
                    elif finding.severity == 'Medium':
                        med_count += 1
                    elif finding.severity == 'Low':
                        low_count += 1
                pass

        total = crit_count + high_count + med_count + low_count
        opened_in_period.append([
            (tcalendar.timegm(new_date.timetuple()) * 1000), new_date,
            crit_count, high_count, med_count, low_count, total,
            closed_in_range_count
        ])
        crit_count, high_count, med_count, low_count, closed_count = [
            0, 0, 0, 0, 0
        ]
        if risks_a is not None:
            for finding in risks_a:
                if finding.severity == 'Critical':
                    crit_count += 1
                elif finding.severity == 'High':
                    high_count += 1
                elif finding.severity == 'Medium':
                    med_count += 1
                elif finding.severity == 'Low':
                    low_count += 1

        total = crit_count + high_count + med_count + low_count
        accepted_in_period.append([
            (tcalendar.timegm(new_date.timetuple()) * 1000), new_date,
            crit_count, high_count, med_count, low_count, total
        ])
        crit_count, high_count, med_count, low_count, closed_count = [
            0, 0, 0, 0, 0
        ]
        for finding in active_findings:
            try:
                if datetime.combine(finding.date, datetime.min.time()).replace(
                        tzinfo=localtz) <= end_date:
                    if finding.severity == 'Critical':
                        crit_count += 1
                    elif finding.severity == 'High':
                        high_count += 1
                    elif finding.severity == 'Medium':
                        med_count += 1
                    elif finding.severity == 'Low':
                        low_count += 1
            except:
                if finding.date <= end_date:
                    if finding.severity == 'Critical':
                        crit_count += 1
                    elif finding.severity == 'High':
                        high_count += 1
                    elif finding.severity == 'Medium':
                        med_count += 1
                    elif finding.severity == 'Low':
                        low_count += 1
                pass
        total = crit_count + high_count + med_count + low_count
        active_in_period.append([
            (tcalendar.timegm(new_date.timetuple()) * 1000), new_date,
            crit_count, high_count, med_count, low_count, total
        ])

    return {
        'opened_per_period': opened_in_period,
        'accepted_per_period': accepted_in_period,
        'active_per_period': active_in_period
    }
Example #33
0
def UTCToLocal(utc_dt):
    # get integer timestamp to avoid precision lost
    timestamp = calendar.timegm(utc_dt.timetuple())
    local_dt = datetime.fromtimestamp(timestamp)
    assert utc_dt.resolution >= timedelta(microseconds=1)
    return local_dt.replace(microsecond=utc_dt.microsecond)
Example #34
0
def epoch_seconds():
    return calendar.timegm(time.gmtime())
Example #35
0
def date_nix(str_date):
    return calendar.timegm(str_date.timetuple())
Example #36
0
    def _calculate_times(self):
        """
        Calculates and returns several time-related values that tend to be needed
        at the same time.

        :return:    tuple of numbers described below...
                    now_s: current time as seconds since the epoch
                    first_run_s: time of the first run as seconds since the epoch,
                        calculated based on self.first_run
                    since_first_s: how many seconds have elapsed since the first
                        run
                    run_every_s: how many seconds should elapse between runs of
                        this schedule
                    last_scheduled_run_s: the most recent time at which this
                        schedule should have run based on its schedule, as
                        seconds since the epoch
                    expected_runs: number of runs that should have happened based
                        on the first_run time and the interval
        :rtype:     tuple

        """
        now_s = time.time()
        first_run_dt = dateutils.to_utc_datetime(
            dateutils.parse_iso8601_datetime(self.first_run))
        first_run_s = calendar.timegm(first_run_dt.utctimetuple())
        since_first_s = now_s - first_run_s

        # An interval could be an isodate.Duration or a datetime.timedelta
        interval = self.as_schedule_entry().schedule.run_every
        if isinstance(interval, isodate.Duration):
            # Determine how long (in seconds) to wait between the last run and the next one. This
            # changes depending on the current time because a duration can be a month or a year.
            if self.last_run_at is not None:
                last_run_dt = dateutils.to_utc_datetime(
                    dateutils.parse_iso8601_datetime(str(self.last_run_at)))
                run_every_s = timedelta_total_seconds(
                    interval.totimedelta(start=last_run_dt))
            else:
                run_every_s = timedelta_total_seconds(
                    interval.totimedelta(start=first_run_dt))

            # This discovers how many runs should have occurred based on the schedule
            expected_runs = 0
            current_run = first_run_dt
            last_scheduled_run_s = first_run_s
            duration = self.as_schedule_entry().schedule.run_every
            while True:
                # The interval is determined by the date of the previous run
                current_interval = duration.totimedelta(start=current_run)
                current_run += current_interval

                # If time of this run is less than the current time, keep going
                current_run_s = calendar.timegm(current_run.utctimetuple())
                if current_run_s < now_s:
                    expected_runs += 1
                    last_scheduled_run_s += timedelta_total_seconds(
                        current_interval)
                else:
                    break
        else:
            run_every_s = timedelta_total_seconds(interval)
            # don't want this to be negative
            expected_runs = max(int(since_first_s / run_every_s), 0)
            last_scheduled_run_s = first_run_s + expected_runs * run_every_s

        return now_s, first_run_s, since_first_s, run_every_s, last_scheduled_run_s, expected_runs
Example #37
0
# age worm
# /info/experiment/worm/age
age_r = f['info']['experiment']['worm']['age']  #type u2
age = str(''.join(unichr(c) for c in age_r))

# /info/experiment/environment/food
food_r = f['info']['experiment']['environment']['food']  #type u2
food = str(''.join(unichr(c) for c in food_r))

# /info/experiment/environment/timestamp
timestamp_r = f['info']['experiment']['environment']['timestamp']  #type u2
timestamp = str(''.join(unichr(c) for c in timestamp_r))

# HH:MM:SS.mmmmmm
my_date_object = strptime(timestamp, '%Y-%m-%d %H:%M:%S.%f')
unix_time = timegm(my_date_object)  # utc based # correct!!!

# /info/video/length/time
time_recorded_r = f['info']['video']['length']['time']
time_recorded = time_recorded_r[0][0]

# /info/video/length/frames
frames_r = f['info']['video']['length']['frames']
frames = frames_r[0][0]

fps_r = f['info']['video']['resolution']['fps']
fps = fps_r[0][0]


def get_interv(ary_refs_start, ary_refs_end, writer_obj):
Example #38
0
    def __get_item_data(self, item):
        """
        gets information out of <item>..</item>

        @return:  output, note, properties, tags
                  variables for orgwriter.append_org_subitem
        """
        try:
            # logging.debug(item)
            properties = OrgProperties()
            guid = item['id']
            if not guid:
                logging.error("got no id")

            unformatted_link = item['link']
            short_link = OrgFormat.link(unformatted_link, "link")

            # if we found a url in title
            # then append the url in front of subject
            if re.search("http[s]?://", item['title']) is not None:
                output = short_link + ": " + item['title']
            else:
                output = OrgFormat.link(unformatted_link, item['title'])

            note = item['description']

            # converting updated_parsed UTC --> LOCALTIME
            # Karl 2018-09-22 this might be changed due to:
            # DeprecationWarning: To avoid breaking existing software
            # while fixing issue 310, a temporary mapping has been
            # created from `updated_parsed` to `published_parsed` if
            # `updated_parsed` doesn't exist. This fallback will be
            # removed in a future version of feedparser.
            timestamp = OrgFormat.date(
                time.localtime(calendar.timegm(item['updated_parsed'])), show_time=True)

            properties.add("guid", guid)

        except KeyError:
            logging.error("input is not a RSS 2.0")
            sys.exit(1)

        tags = []
        # Karl 2018-09-22 this might be changed due to:
        # DeprecationWarning: To avoid breaking existing software
        # while fixing issue 310, a temporary mapping has been created
        # from `updated_parsed` to `published_parsed` if
        # `updated_parsed` doesn't exist. This fallback will be
        # removed in a future version of feedparser.
        dont_parse = ['title', 'description', 'updated', 'summary',
                      'updated_parsed', 'link', 'links']
        for i in item:
            logging.debug(i)
            if i not in dont_parse:
                if (type(i) == str or type(i) == str) and \
                   type(item[i]) == str and item[i] != "":
                    if i == "id":
                        i = "guid"
                    properties.add(i, item[i])
                else:
                    if i == "tags":
                        for tag in item[i]:
                            logging.debug("found tag: %s", tag['term'])
                            tags.append(tag['term'])

        return output, note, properties, tags, timestamp
Example #39
0
def perf_steps(api):
    """Run Skia benchmarks."""
    b = api.properties['buildername']
    if upload_perf_results(b):
        api.flavor.create_clean_device_dir(
            api.flavor.device_dirs.perf_data_dir)

    # Run nanobench.
    properties = [
        '--properties',
        'gitHash',
        api.properties['revision'],
    ]
    if api.vars.is_trybot:
        properties.extend([
            'issue',
            api.vars.issue,
            'patchset',
            api.vars.patchset,
            'patch_storage',
            api.vars.patch_storage,
        ])
    properties.extend(['swarming_bot_id', api.vars.swarming_bot_id])
    properties.extend(['swarming_task_id', api.vars.swarming_task_id])

    target = 'nanobench'
    args = [
        target,
        '-i',
        api.flavor.device_dirs.resource_dir,
        '--skps',
        api.flavor.device_dirs.skp_dir,
        '--images',
        api.flavor.device_path_join(api.flavor.device_dirs.images_dir,
                                    'nanobench'),
    ]

    # Do not run svgs on Valgrind.
    if 'Valgrind' not in api.vars.builder_name:
        args.extend(['--svgs', api.flavor.device_dirs.svg_dir])

    args.extend(nanobench_flags(api, api.vars.builder_name))

    if 'Chromecast' in api.vars.builder_cfg.get('os', ''):
        # Due to limited disk space, run a watered down perf run on Chromecast.
        args = [target]
        if api.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
            args.extend(['--nogpu', '--config', '8888'])
        elif api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
            args.extend(['--nocpu', '--config', 'gles'])
        args.extend([
            '-i',
            api.flavor.device_dirs.resource_dir,
            '--images',
            api.flavor.device_path_join(api.flavor.device_dirs.resource_dir,
                                        'images', 'color_wheel.jpg'),
            '--skps',
            api.flavor.device_dirs.skp_dir,
            '--pre_log',
            '--match',  # skia:6687
            '~matrixconvolution',
            '~blur_image_filter',
            '~blur_0.01',
            '~GM_animated-image-blurs',
            '~blendmode_mask_',
            '~desk_carsvg.skp',
            '~^path_text_clipped',  # Bot times out; skia:7190
            '~shapes_rrect_inner_rrect_50_500x500',  # skia:7551
            '~compositing_images',
        ])

    if upload_perf_results(b):
        now = api.time.utcnow()
        ts = int(calendar.timegm(now.utctimetuple()))
        json_path = api.flavor.device_path_join(
            api.flavor.device_dirs.perf_data_dir,
            'nanobench_%s_%d.json' % (api.properties['revision'], ts))
        args.extend(['--outResultsFile', json_path])
        args.extend(properties)

        keys_blacklist = ['configuration', 'role', 'test_filter']
        args.append('--key')
        for k in sorted(api.vars.builder_cfg.keys()):
            if not k in keys_blacklist:
                args.extend([k, api.vars.builder_cfg[k]])

    # See skia:2789.
    if 'AbandonGpuContext' in api.vars.extra_tokens:
        args.extend(['--abandonGpuContext'])

    api.run(api.flavor.step, target, cmd=args, abort_on_failure=False)

    # Copy results to swarming out dir.
    if upload_perf_results(b):
        api.file.ensure_directory('makedirs perf_dir',
                                  api.flavor.host_dirs.perf_data_dir)
        api.flavor.copy_directory_contents_to_host(
            api.flavor.device_dirs.perf_data_dir,
            api.flavor.host_dirs.perf_data_dir)
Example #40
0
    def _process_one_minute(self, moment):
        """Generates exactly one self.stats_minute_cls.

    Always process logs in exactly 1 minute chunks. It is small so it won't take
    too long even under relatively high QPS.

    In theory a transaction should be used when saving the aggregated statistics
    in self.stats_hour_cls and self.stats_day_cls. In practice it is not
    necessary because:
    - The caller uses a lock to guard against concurrent calls.
    - Even if it were to become inconsistent or have 2 cron jobs run
      simultaneously, hours_bit|minutes_bit will stay internally consistent with
      the associated values snapshot in it in the respective
      self.stats_day_cls and self.stats_hour_cls entities.
    """
        minute_key_id = '%02d' % moment.minute

        # Fetch the entities. Do not use ndb's memcache but use in-process local
        # cache.
        opts = ndb.ContextOptions(use_memcache=False)
        future_day = self.stats_day_cls.get_or_insert_async(
            str(moment.date()),
            parent=self.root_key,
            values_compressed=self.snapshot_cls(),
            context_options=opts)
        future_hour = self.stats_hour_cls.get_or_insert_async(
            '%02d' % moment.hour,
            parent=self.day_key(moment.date()),
            values_compressed=self.snapshot_cls(),
            context_options=opts)
        future_minute = self.stats_minute_cls.get_by_id_async(
            minute_key_id, parent=self.hour_key(moment), use_memcache=False)

        day = future_day.get_result()
        hour = future_hour.get_result()
        # Normally 'minute' should be None.
        minute = future_minute.get_result()
        futures = []

        if not minute:
            # Call the harvesting function.
            end = moment + datetime.timedelta(minutes=1)
            minute_values = self._generate_snapshot(
                calendar.timegm(moment.timetuple()),
                calendar.timegm(end.timetuple()))

            minute = self.stats_minute_cls(id=minute_key_id,
                                           parent=hour.key,
                                           values_compressed=minute_values)
            futures.append(minute.put_async(use_memcache=False))
        else:
            minute_values = minute.values

        minute_bit = (1 << moment.minute)
        minute_bit_is_set = bool(hour.minutes_bitmap & minute_bit)
        if not minute_bit_is_set:
            hour.values.accumulate(minute_values)
            hour.minutes_bitmap |= minute_bit
            futures.append(hour.put_async(use_memcache=False))
            if hour.minutes_bitmap == self.stats_hour_cls.SEALED_BITMAP:
                logging.info('%s Hour is sealed: %s %s:00', self.root_key.id(),
                             day.key.id(), hour.key.id())

        # Adds data for the past hour back into day.
        if hour.minutes_bitmap == self.stats_hour_cls.SEALED_BITMAP:
            hour_bit = (1 << moment.hour)
            hour_bit_is_set = bool(day.hours_bitmap & hour_bit)
            if not hour_bit_is_set:
                day.values.accumulate(hour.values)
                day.hours_bitmap |= hour_bit
                futures.append(day.put_async(use_memcache=False))
                if day.hours_bitmap == self.stats_day_cls.SEALED_BITMAP:
                    logging.info('%s Day is sealed: %s', self.root_key.id(),
                                 day.key.id())

        if futures:
            ndb.Future.wait_all(futures)
Example #41
0
 def is_after(self, ts):
     ''' Compare this event's timestamp to a give timestamp. '''
     if self.timestamp >= int(calendar.timegm(ts.timetuple())):
         return True
     return False
Example #42
0
import argparse
import os
import sqlite3
import calendar
from datetime import datetime, timedelta, time, date

parser = argparse.ArgumentParser(description="Timesuck migration tool")
parser.add_argument("olddb", type=str, help="Path to old timesuck database")
parser.add_argument("newdb", type=str, help="Path to current new database")
args = parser.parse_args()

connection = sqlite3.connect(os.path.expanduser(args.olddb))
cursor = connection.cursor()

current_db = sqlite3.connect(os.path.expanduser(args.newdb))
current_cursor = current_db.cursor()

for entry in cursor.execute("SELECT * FROM logs"):
    kind, name, start, end, duration = entry
    start = datetime.strptime(start, "%Y-%m-%d %H:%M:%S")
    end = datetime.strptime(end, "%Y-%m-%d %H:%M:%S")
    start_ts = calendar.timegm(start.utctimetuple())
    end_ts = calendar.timegm(end.utctimetuple())
    current_cursor.execute("INSERT INTO logs VALUES (?,?,?,?,?)",
                           (kind, name, start_ts, end_ts, duration))

current_db.commit()

cursor.close()
current_cursor.close()
Example #43
0
 def compute_from_times(self, times):
     dts = [datetime.datetime.utcfromtimestamp(i) for i in times]
     dts = [d.replace(hour=0, minute=0, second=0) for d in dts]
     return np.array([calendar.timegm(dt.timetuple()) for dt in dts])
Example #44
0
 def get_timestamp(self):
     if not self.timestr:
         return None
     t = time.strptime(self.timestr, "%Y%m%dT%H%M%SZ")
     return datetime.datetime.utcfromtimestamp(calendar.timegm(t))
    def _minutely_diff_poll(self):
        try:
            if not os.path.exists('state.txt'):
                log.error("No state file found to poll minutelies.")
                return

            seen_uids = {}
            new_changesets = {}
            seen_changesets = self.seen_changesets

            state = self.readState('state.txt')

            while self.fetchNextState(state):
                state = self.readState('state.txt')

                # Grab the next sequence number and build a URL out of it
                sqnStr = state['sequenceNumber'].zfill(9)
                url = "https://planet.openstreetmap.org/replication/minute/%s/%s/%s.osc.gz" % (sqnStr[0:3], sqnStr[3:6], sqnStr[6:9])

                log.info("Downloading change file (%s)." % (url))
                req = urllib2.Request(url, headers={'User-Agent': userAgent})
                content = urllib2.urlopen(req)
                content = StringIO.StringIO(content.read())
                gzipper = gzip.GzipFile(fileobj=content)

                handler = OscHandler()
                parseOsm(gzipper, handler)

                for (id, prim) in itertools.chain(handler.nodes.iteritems(), handler.ways.iteritems(), handler.relations.iteritems()):

                    changeset_id = str(prim['changeset'])
                    action = prim['action']
                    prim_type = prim['type']

                    changeset_data = seen_changesets.get(changeset_id)
                    if not changeset_data:
                        new_changesets[changeset_id] = {'username': prim['user']}
                        changeset_data = {}
                    cs_type_data = changeset_data.get(prim_type, {})
                    cs_type_data[action] = cs_type_data.get(action, 0) + 1
                    cs_type_data['total_changes'] = cs_type_data.get('total_changes', 0) + 1
                    changeset_data[prim_type] = cs_type_data
                    changeset_data['total_changes'] = changeset_data.get('total_changes', 0) + 1
                    changeset_data['last_modified'] = prim['timestamp']
                    seen_changesets[changeset_id] = changeset_data

                    uid = str(prim['uid'])
                    if uid in seen_uids:
                        continue
                    else:
                        seen_uids[uid] = {'changeset': prim['changeset'],
                                          'username': prim['user']}

                    if prim_type == 'node':
                        if 'lat' not in seen_uids[uid]:
                            seen_uids[uid]['lat'] = prim['lat']
                            seen_uids[uid]['lon'] = prim['lon']
                        if changeset_id in new_changesets and \
                                'lat' not in new_changesets[changeset_id]:
                            new_changesets[changeset_id]['lat'] = prim['lat']
                            new_changesets[changeset_id]['lon'] = prim['lon']

                #log.info("Changeset actions: %s" % json.dumps(seen_changesets))

                # Check the changesets for anomolies
                now = datetime.datetime.utcnow()
                cs_flags = []
                for (id, cs_data) in seen_changesets.items():
                    age = (now - cs_data['last_modified']).total_seconds()
                    if age > 3600:
                        del seen_changesets[id]
                        continue

                    total_changes = cs_data['total_changes']
                    node_changes = cs_data.get('node', {}).get('total_changes', 0)
                    way_changes = cs_data.get('way', {}).get('total_changes', 0)
                    relation_changes = cs_data.get('relation', {}).get('total_changes', 0)
                    node_pct = node_changes / float(total_changes)
                    way_pct = way_changes / float(total_changes)
                    relation_pct = relation_changes / float(total_changes)

                    # Flag a changeset that's big and made up of all one primitive type
                    if total_changes > 2000 and (node_pct > 0.97 or way_pct > 0.97 or relation_pct > 0.97):
                        cs_flags.append((id, "it is mostly changes to one data type"))

                    creates = cs_data.get('node', {}).get('create', 0) + cs_data.get('way', {}).get('create', 0) + cs_data.get('relation', {}).get('create', 0)
                    mods = cs_data.get('node', {}).get('modify', 0) + cs_data.get('way', {}).get('modify', 0) + cs_data.get('relation', {}).get('modify', 0)
                    deletes = cs_data.get('node', {}).get('delete', 0) + cs_data.get('way', {}).get('delete', 0) + cs_data.get('relation', {}).get('delete', 0)
                    create_pct = creates / float(total_changes)
                    mod_pct = mods / float(total_changes)
                    delete_pct = deletes / float(total_changes)

                    # Flag a changeset that's big and made up of only one change type
                    if total_changes > 2000 and (create_pct > 0.97 or mod_pct > 0.97 or delete_pct > 0.97):
                        cs_flags.append((id, "it is mostly creates, modifies, or deletes"))

                # Tell the channel about these problems
                irc = world.ircs[0]
                for (cs_id, reason) in cs_flags:
                    if cs_id in seen_changesets and seen_changesets[cs_id].get('alerted_already'):
                        continue

                    response = "Changeset %s is weird because %s. https://osm.org/changeset/%s" % (cs_id, reason, cs_id)

                    log.info(response)
                    for chan in irc.state.channels:
                        if chan == "#osm-bot":
                            msg = ircmsgs.privmsg(chan, response)
                            world.ircs[0].queueMsg(msg)
                    seen_changesets[cs_id]['alerted_already'] = True

            log.info("There were %s users editing this time." % len(seen_uids))
            if stathat:
                ts = isoToDatetime(state['timestamp'])
                ts = calendar.timegm(ts.timetuple())
                stathat.ez_post_value(stathatEmail, 'users editing this minute', len(seen_uids), ts)

            f = open('uid.txt', 'r')
            for line in f:
                for uid in seen_uids.keys():
                    if uid in line:
                        seen_uids.pop(uid)
                        continue
                if len(seen_uids) == 0:
                    break
            f.close()

            if stathat:
                ts = isoToDatetime(state['timestamp'])
                ts = calendar.timegm(ts.timetuple())
                stathat.ez_post_value(stathatEmail, 'new users this minute', len(seen_uids), ts)

            f = open('uid.txt', 'a')
            for (uid, data) in seen_uids.iteritems():
                f.write('%s\t%s\n' % (data['username'], uid))

                location = ""
                country_code = None
                if 'lat' in data:
                    try:
                        country_code, location = self.reverse_geocode(data['lat'], data['lon'])
                    except urllib2.HTTPError as e:
                        log.error("HTTP problem when looking for edit location: %s" % (e))

                response = "%s just started editing%s with changeset https://overpass-api.de/achavi/?changeset=%s" % (data['username'], location, data['changeset'])
                log.info(response)
                irc = world.ircs[0]
                for chan in irc.state.channels:
                    if chan == "#osm-bot" or country_code in _new_uid_edit_region_channels.get(chan, ()):
                        msg = ircmsgs.privmsg(chan, response)
                        world.ircs[0].queueMsg(msg)

            f.close()

            for (changeset_id, data) in new_changesets.iteritems():
                location = ""
                country_code = None
                if 'lat' in data:
                    try:
                        country_code, location = self.reverse_geocode(data['lat'], data['lon'])
                    except urllib2.HTTPError as e:
                        log.error("HTTP problem when looking for changeset location: %s" % (e))

                response = "%s edited%s with changeset https://overpass-api.de/achavi/?changeset=%s" % (data['username'], location, data['changeset'])
                log.info(response)
                irc = world.ircs[0]
                for chan in irc.state.channels:
                    if country_code in _all_edit_region_channels.get(chan, ()):
                        msg = ircmsgs.privmsg(chan, response)
                        irc.queueMsg(msg)

        except Exception as e:
            log.error("Exception processing new users: %s" % traceback.format_exc(e))
Example #46
0
class Geofon(EarthquakeCatalog):
    
    def __init__(self):
        self.events = {}
    
    def iter_event_names(self, time_range=None, nmax=10000, magmin=None, latmin=-90., latmax=90., lonmin=-180., lonmax=180.):
        dmin = time.strftime('%Y-%m-%d', time.gmtime(time_range[0]))
        dmax = time.strftime('%Y-%m-%d', time.gmtime(time_range[1]+24*60*60))
        
        if magmin is None:
            magmin = ''
        else:
            magmin = '%g' % magmin
       
        url = ('http://geofon.gfz-potsdam.de/db/eqinfo.php?' + '&'.join([
            'datemin=%s' % dmin,
            'datemax=%s' % dmax,
            'latmin=%g' % latmin,
            'latmax=%g' % latmax,
            'lonmin=%g' % lonmin,
            'lonmax=%g' % lonmax,
            'magmin=%s' % magmin,
            'fmt=html',
            'nmax=%i' % nmax]))
            
        page = urllib2.urlopen(url).read()
        events = self._parse_events_page(page)
        for ev in events:
            if time_range[0] <= ev.time and ev.time <= time_range[1]:
                self.events[ev.name] = ev
                yield ev.name

    def get_event(self, name):
        if name in self.events:
            return self.events[name]
        
        url = 'http://geofon.gfz-potsdam.de/db/eqpage.php?id=%s' % name
        page = urllib2.urlopen(url).read()
        d = self._parse_event_page(page)
        ev = model.Event(
              lat=d['epicenter'][0],
              lon=d['epicenter'][1], 
              time=d['time'],
              name=name,
              depth=d['depth'],
              magnitude=d['magnitude'],
              region=d['region'],
              catalog='GEOFON')
              
        return ev

        
    def _parse_events_page(self, page):
        page = re.sub('&nbsp([^;])', '&nbsp;\\1', page)  # fix broken &nbsp; tags
        page = re.sub('border=0', 'border="0"', page)
        try:
            doc = minidom.parseString(page)
        except ExpatError, e:
            lines = page.splitlines()
            r = max(e.lineno - 1 - 2,0), min(e.lineno - 1 +3, len(lines))
            ilineline = zip( range(r[0]+1,r[1]+1), lines[r[0]:r[1]] )
            
            logger.error('A problem occured while parsing HTML from GEOFON page (line=%i, col=%i):\n\n' % (e.lineno, e.offset) +
            '\n'.join( ['  line %i: %s' % (iline, line[:e.offset] + '### HERE ###' + line[e.offset:]) for (iline, line) in ilineline ] ))
            logger.error('... maybe the format of the GEOFON web catalog has changed.')
            raise
        
        events = []
        for tr in doc.getElementsByTagName("tr"):
            tds = tr.getElementsByTagName("td")
            if len(tds) == 9:
                elinks = tds[0].getElementsByTagName("a")
                if len(elinks) != 1: continue
                if not 'href' in elinks[0].attributes.keys(): continue
                link = elinks[0].attributes['href'].value.encode('ascii')
                m = re.search(r'\?id=(gfz[0-9]+[a-z]+)$', link)
                if not m: continue
                eid = m.group(1)
                vals = [ getTextR(td).encode('ascii') for td in tds ]
                tevent = calendar.timegm(time.strptime(vals[0][:19], '%Y-%m-%d %H:%M:%S'))
                mag = float(vals[1])
                epicenter = parse_location( vals[2]+' '+vals[3] )
                depth = float(vals[4])*1000.
                region = vals[7]
                ev = model.Event(
                    lat=epicenter[0],
                    lon=epicenter[1], 
                    time=tevent,
                    name=eid,
                    depth=depth,
                    magnitude=mag,
                    region=region,
                    catalog='GEOFON')
                
                logger.debug('Adding event from GEOFON catalog: %s' % ev)
                
                events.append(ev)
                
                
                
        return events
Example #47
0
 def writeObject(self, obj, output, setReferencePosition=False):
     """
     Public method to serialize the given object to the output.
     
     @param obj object to be serialized
     @param output output to be serialized to (bytes)
     @param setReferencePosition flag indicating, that the reference
         position the object was written to shall be recorded (boolean)
     @return new output
     """
     def proc_variable_length(format, length):
         result = ''
         if length > 0b1110:
             result += pack('!B', (format << 4) | 0b1111)
             result = self.writeObject(length, result)
         else:
             result += pack('!B', (format << 4) | length)
         return result
     
     if setReferencePosition:
         self.referencePositions[obj] = len(output)
     
     if obj is None:
         output += pack('!B', 0b00000000)
     elif isinstance(obj, BoolWrapper):
         if obj.value is False:
             output += pack('!B', 0b00001000)
         else:
             output += pack('!B', 0b00001001)
     elif isinstance(obj, Uid):
         size = self.intSize(obj)
         output += pack('!B', (0b1000 << 4) | size - 1)
         output += self.binaryInt(obj)
     elif isinstance(obj, int):
         bytes = self.intSize(obj)
         root = math.log(bytes, 2)
         output += pack('!B', (0b0001 << 4) | int(root))
         output += self.binaryInt(obj)
     elif isinstance(obj, float):
         # just use doubles
         output += pack('!B', (0b0010 << 4) | 3)
         output += self.binaryReal(obj)
     elif isinstance(obj, datetime.datetime):
         timestamp = calendar.timegm(obj.utctimetuple())
         timestamp -= apple_reference_date_offset
         output += pack('!B', 0b00110011)
         output += pack('!d', float(timestamp))
     elif isinstance(obj, Data):
         output += proc_variable_length(0b0100, len(obj))
         output += obj
     elif isinstance(obj, str):
         # Python 3 uses unicode strings only
         bytes = obj.encode('utf_16_be')
         output += proc_variable_length(0b0110, len(bytes) / 2)
         output += bytes
     elif isinstance(obj, HashableWrapper):
         obj = obj.value
         if isinstance(obj, (set, list, tuple)):
             if isinstance(obj, set):
                 output += proc_variable_length(0b1100, len(obj))
             else:
                 output += proc_variable_length(0b1010, len(obj))
         
             objectsToWrite = []
             for objRef in obj:
                 (isNew, output) = self.writeObjectReference(objRef, output)
                 if isNew:
                     objectsToWrite.append(objRef)
             for objRef in objectsToWrite:
                 output = self.writeObject(
                     objRef, output, setReferencePosition=True)
         elif isinstance(obj, dict):
             output += proc_variable_length(0b1101, len(obj))
             keys = []
             values = []
             objectsToWrite = []
             for key, value in obj.items():
                 keys.append(key)
                 values.append(value)
             for key in keys:
                 (isNew, output) = self.writeObjectReference(key, output)
                 if isNew:
                     objectsToWrite.append(key)
             for value in values:
                 (isNew, output) = self.writeObjectReference(value, output)
                 if isNew:
                     objectsToWrite.append(value)
             for objRef in objectsToWrite:
                 output = self.writeObject(
                     objRef, output, setReferencePosition=True)
     return output
Example #48
0
def fake_monotonic():
    if _should_use_real_time():
        return real_monotonic()
    current_time = get_current_time()
    return calendar.timegm(current_time.timetuple()) + current_time.microsecond / 1000000.0
    def __add_send_item(self, record, event_type):
        send_json_string = '{"host":"", "key":"", "value":"", "clock":""}'
        send_item = json.loads(send_json_string)
        message = json.loads(record['Sns']['Message'])
        if event_type == "AutoScaling":
            send_item["host"] = "AutoScaling"
            value = []
            value.append("Event : " + message['Event'])
            value.append("Service : " + message['Service'])
            value.append("Description : " + message['Description'])
            value.append("AvailabilityZone : " +
                         message['Details']['Availability Zone'])
            value.append("AutoScalingGroupName : " +
                         message['AutoScalingGroupName'])
            value.append("Cause : " + message['Cause'])
            value.append("StatusCode : " + message['StatusCode'])
            value.append("StatusMessage : " + message['StatusMessage'])
            send_item["value"] = os.linesep.join(value)

        elif event_type == "RDS":
            send_item["host"] = message["Source ID"]

            value = []
            value.append("EventSource : " + message["Event Source"])
            value.append("IdentifierLink : " + message["Identifier Link"])
            value.append("SourceId : " + message["Source ID"])
            value.append("EventId : " + message["Event ID"])
            value.append("EventMessage : " + message["Event Message"])
            value.append("TopicArn : " + record['Sns']['TopicArn'])
            send_item["value"] = os.linesep.join(value)

        elif event_type == "CloudWatch":
            send_item["host"] = message['Trigger']['Dimensions'][0]['value']
            value = []
            value.append("NewStatus : " + message['NewStateValue'])
            value.append("MetricNamespace : " +
                         message['Trigger']['Namespace'])
            value.append("Dimensions : " +
                         message['Trigger']['Dimensions'][0]['name'] + " = " +
                         message['Trigger']['Dimensions'][0]['value'])
            value.append("MetricName : " + message['Trigger']['MetricName'])
            value.append("NewStateReason : " + message['NewStateReason'])
            value.append("Region : " + message['Region'])
            value.append("TopicArn : " + record['Sns']['TopicArn'])
            send_item["value"] = os.linesep.join(value)

        elif event_type == "EC2RDS":
            send_item["host"] = message['Trigger']['Namespace'].replace(
                'AWS/', "")
            value = []
            value.append("NewStatus : " + message['NewStateValue'])
            value.append("Dimensions : " +
                         json.dumps(message['Trigger']['Dimensions']))
            value.append("MetricName : " + message['Trigger']['MetricName'])
            value.append("NewStateReason : " + message['NewStateReason'])
            value.append("Region :" + message['Region'])
            value.append("TopicArn : " + record['Sns']['TopicArn'])
            send_item["value"] = os.linesep.join(value)

        else:
            send_item["host"] = "Other"
            value = json.loads(record['Sns']['Message'])

        send_item["key"] = "sns.event"
        event_timestamp = dateutil.parser.parse(record['Sns']['Timestamp'])
        send_item["clock"] = calendar.timegm(event_timestamp.utctimetuple())
        self.send_items.append(send_item)
    def _notes_rss_poll(self):
        url_templ = 'https://api.openstreetmap.org/api/0.6/notes/%d.json'
        short_text_len = 64

        try:
            if not os.path.exists('notes_state.txt'):
                log.error("No notes_state file found to poll note feed.")
                return

            notes_state = self.readState('notes_state.txt')
            log.info('Note state is %s' % json.dumps(notes_state))
            last_note_id = int(notes_state.get('last_note_id', None))
            last_note_time = isoToDatetime(notes_state.get('last_note_timestamp', ''))

            while True:
                last_note_id += 1
                url = url_templ % last_note_id
                log.info("Requesting %s" % url)
                try:
                    req = urllib2.Request(url, headers={'User-Agent': userAgent})
                    result = urllib2.urlopen(req)
                    note = json.load(result)
                    attrs = note.get('properties')
                    if len(attrs['comments']) > 0:
                        opening_comment = attrs['comments'][0]
                        author = opening_comment['user'].encode('utf-8') if 'user' in opening_comment else 'Anonymous'
                        full_text = _note_cleaning_re.sub(' ', opening_comment['text'])
                        short_text = ((full_text[:short_text_len-1] + u'\u2026') if len(full_text) > short_text_len else full_text).encode('utf-8')
                    else:
                        author = "Unknown"
                        short_text = "-No comment specified-"

                    date_created = datetime.datetime.strptime(attrs['date_created'], "%Y-%m-%d %H:%M:%S %Z")
                    geo = note.get('geometry').get('coordinates')
                    link = 'https://osm.org/note/%d' % last_note_id
                    location = ""
                    country_code = None

                    if stathat:
                        ts = calendar.timegm(date_created.timetuple())
                        stathat.ez_post_count(stathatEmail, 'new notes', 1, ts)

                    last_note_time = date_created

                    if (datetime.datetime.utcnow() - last_note_time).total_seconds() < 3600:
                        # Only reverse-geocode for newer notes
                        try:
                            country_code, location = self.reverse_geocode(geo[1], geo[0])
                        except urllib2.HTTPError as e:
                            log.error("HTTP problem when looking for note location: %s" % (e))

                    response = '%s posted a new note%s %s ("%s")' % (author, location, link, short_text)
                    log.info("Response is %s" % response)
                    irc = world.ircs[0]
                    for chan in irc.state.channels:
                        if chan == "#osm-bot" or country_code in _note_edit_region_channels.get(chan, ()):
                            msg = ircmsgs.privmsg(chan, response)
                            world.ircs[0].queueMsg(msg)
                except urllib2.HTTPError as e:
                    if e.code == 410:
                        log.info("%s is gone. Skipping." % last_note_id)
                        last_note_id += 1
                    elif e.code == 404:
                        log.info("%s doesn't exist. Stopping." % last_note_id)
                        last_note_id -= 1

                        # If it's been 15 minutes, check the RSS feed for the latest note
                        if (datetime.datetime.utcnow() - last_note_time).total_seconds() > 900:
                            note_feed_url = "https://api.openstreetmap.org/api/0.6/notes/feed"
                            req = urllib2.Request(note_feed_url, headers={'User-Agent': userAgent})
                            xml = urllib2.urlopen(req)
                            tree = ElementTree.ElementTree(file=xml)

                            note_id = None
                            note_time = None
                            for item in tree.iterfind('channel/item'):
                                title = item.findtext('title')
                                if 'new note' in title:
                                    note_time_str = item.findtext('pubDate')
                                    note_time = datetime.datetime.strptime(note_time_str, '%a, %d %b %Y %H:%M:%S +0000')

                                    if last_note_time > note_time:
                                        # Stop iteration if the note at this point in the feed is older than the most recent
                                        # so note_id ends up with the oldest note *after* the last_note_time
                                        break

                                    guid = item.findtext('guid')
                                    note_id = int(guid.rsplit('/', 1)[1].split('#', 1)[0])

                            if note_id and note_time:
                                last_note_time = note_time
                                last_note_id = note_id

                        # If it's been 60 minutes, tell the admin something's broken
                        if (datetime.datetime.utcnow() - last_note_time).total_seconds() > 3600:
                            msg = ircmsgs.privmsg(privmsgNick, "No new notes since %s." % prettyDate(last_note_time))
                            world.ircs[0].queueMsg(msg)

                        break
                    else:
                        raise

            with open('notes_state.txt', 'w') as f:
                f.write('last_note_id=%s\n' % last_note_id)
                f.write('last_note_timestamp=%sZ\n' % last_note_time.isoformat())

        except Exception as e:
            log.error("Exception processing new notes: %s" % traceback.format_exc(e))
Example #51
0
    def test_constructor_with_stringoffset(self):
        # GH 7833
        base_str = '2014-07-01 11:00:00+02:00'
        base_dt = datetime.datetime(2014, 7, 1, 9)
        base_expected = 1404205200000000000

        # confirm base representation is correct
        import calendar
        self.assertEqual(calendar.timegm(base_dt.timetuple()) * 1000000000, base_expected)

        tests = [(base_str, base_expected),
                 ('2014-07-01 12:00:00+02:00', base_expected + 3600 * 1000000000),
                 ('2014-07-01 11:00:00.000008000+02:00', base_expected + 8000),
                 ('2014-07-01 11:00:00.000000005+02:00', base_expected + 5)]

        tm._skip_if_no_pytz()
        tm._skip_if_no_dateutil()
        import pytz
        import dateutil
        timezones = [(None, 0), ('UTC', 0), (pytz.utc, 0),
                     ('Asia/Tokyo', 9), ('US/Eastern', -4),
                     ('dateutil/US/Pacific', -7),
                     (pytz.FixedOffset(-180), -3), (dateutil.tz.tzoffset(None, 18000), 5)]

        for date_str, expected in tests:
            for result in [Timestamp(date_str)]:
                # only with timestring
                self.assertEqual(result.value, expected)
                self.assertEqual(tslib.pydt_to_i8(result), expected)

                # re-creation shouldn't affect to internal value
                result = Timestamp(result)
                self.assertEqual(result.value, expected)
                self.assertEqual(tslib.pydt_to_i8(result), expected)

            # with timezone
            for tz, offset in timezones:
                result = Timestamp(date_str, tz=tz)
                expected_tz = expected
                self.assertEqual(result.value, expected_tz)
                self.assertEqual(tslib.pydt_to_i8(result), expected_tz)

                # should preserve tz
                result = Timestamp(result)
                self.assertEqual(result.value, expected_tz)
                self.assertEqual(tslib.pydt_to_i8(result), expected_tz)

                # should convert to UTC
                result = Timestamp(result, tz='UTC')
                expected_utc = expected
                self.assertEqual(result.value, expected_utc)
                self.assertEqual(tslib.pydt_to_i8(result), expected_utc)

        # This should be 2013-11-01 05:00 in UTC -> converted to Chicago tz
        result = Timestamp('2013-11-01 00:00:00-0500', tz='America/Chicago')
        self.assertEqual(result.value, Timestamp('2013-11-01 05:00').value)
        expected_repr = "Timestamp('2013-11-01 00:00:00-0500', tz='America/Chicago')"
        self.assertEqual(repr(result), expected_repr)
        self.assertEqual(result, eval(repr(result)))

        # This should be 2013-11-01 05:00 in UTC -> converted to Tokyo tz (+09:00)
        result = Timestamp('2013-11-01 00:00:00-0500', tz='Asia/Tokyo')
        self.assertEqual(result.value, Timestamp('2013-11-01 05:00').value)
        expected_repr = "Timestamp('2013-11-01 14:00:00+0900', tz='Asia/Tokyo')"
        self.assertEqual(repr(result), expected_repr)
        self.assertEqual(result, eval(repr(result)))
Example #52
0
def datetime_to_ms(dt):
    """
    Converts a datetime to a millisecond accuracy timestamp
    """
    seconds = calendar.timegm(dt.utctimetuple())
    return seconds * 1000 + dt.microsecond / 1000
    def _update_kube_events(self, instance, pods_list, event_items):
        """
        Process kube events and send ddog events
        The namespace filtering is done here instead of KubeEventRetriever
        to avoid interfering with service discovery
        """
        node_ip, node_name = self.kubeutil.get_node_info()
        self.log.debug('Processing events on {} [{}]'.format(
            node_name, node_ip))

        k8s_namespaces = instance.get('namespaces', DEFAULT_NAMESPACES)
        if not isinstance(k8s_namespaces, list):
            self.log.warning(
                'Configuration key "namespaces" is not a list: fallback to the default value'
            )
            k8s_namespaces = DEFAULT_NAMESPACES

        # handle old config value
        if 'namespace' in instance and instance.get('namespace') not in (
                None, 'default'):
            self.log.warning(
                '''The 'namespace' parameter is deprecated and will stop being supported starting '''
                '''from 5.13. Please use 'namespaces' and/or 'namespace_name_regexp' instead.'''
            )
            k8s_namespaces.append(instance.get('namespace'))

        if self.k8s_namespace_regexp:
            namespaces_endpoint = '{}/namespaces'.format(
                self.kubeutil.kubernetes_api_url)
            self.log.debug('Kubernetes API endpoint to query namespaces: %s' %
                           namespaces_endpoint)

            namespaces = self.kubeutil.retrieve_json_auth(
                namespaces_endpoint).json()
            for namespace in namespaces.get('items', []):
                name = namespace.get('metadata', {}).get('name', None)
                if name and self.k8s_namespace_regexp.match(name):
                    k8s_namespaces.append(name)

        k8s_namespaces = set(k8s_namespaces)

        for event in event_items:
            event_ts = calendar.timegm(
                time.strptime(event.get('lastTimestamp'),
                              '%Y-%m-%dT%H:%M:%SZ'))
            involved_obj = event.get('involvedObject', {})

            # filter events by white listed namespaces (empty namespace belong to the 'default' one)
            if involved_obj.get('namespace', 'default') not in k8s_namespaces:
                continue

            tags = self.kubeutil.extract_event_tags(event)
            tags.extend(instance.get('tags', []))
            tags += ['cluster_name:%s' % self.cluster_name]

            title = '{} {} on {}'.format(involved_obj.get('name'),
                                         event.get('reason'), node_name)
            message = event.get('message')
            source = event.get('source')
            k8s_event_type = event.get('type')
            alert_type = K8S_ALERT_MAP.get(k8s_event_type, 'info')

            if source:
                message += '\nSource: {} {}\n'.format(
                    source.get('component', ''), source.get('host', ''))
            msg_body = "%%%\n{}\n```\n{}\n```\n%%%".format(title, message)
            dd_event = {
                'timestamp': event_ts,
                'host': node_ip,
                'event_type': EVENT_TYPE,
                'msg_title': title,
                'msg_text': msg_body,
                'source_type_name': EVENT_TYPE,
                'alert_type': alert_type,
                'event_object':
                'kubernetes:{}'.format(involved_obj.get('name')),
                'tags': tags,
            }
            self.event(dd_event)
Example #54
0
def get_windows_timestamp():
    # Get Windows Date time, 100 nanoseconds since 1601-01-01 in a 64 bit structure
    timestamp = struct.pack(
        '<q', (116444736000000000 + calendar.timegm(time.gmtime()) * 10000000))

    return timestamp
Example #55
0
json_string = f.read()
parsed_json = json.loads(json_string)

high = parsed_json['high']
sock.sendall('xrp.high ' + str(high) + ' source=' + sourceName + ' \n')
#
last = parsed_json['last']
sock.sendall('xrp.last ' + str(last) + ' source=' + sourceName + ' \n')

bid = parsed_json['bid']
sock.sendall('xrp.bid ' + str(bid) + ' source=' + sourceName + ' \n')

low = parsed_json['low']
sock.sendall('xrp.low ' + str(low) + ' source=' + sourceName + ' \n')

ask =  parsed_json['ask']
sock.sendall('xrp.ask ' + str(ask) + ' source=' + sourceName + ' \n')

ropen =  parsed_json['open']
sock.sendall('xrp.open ' + str(ropen) + ' source=' + sourceName + ' \n')

volume =  parsed_json['volume']
sock.sendall('xrp.volume ' + str(volume) + ' source=' + sourceName + ' \n')


syslog.syslog('Ripple Price logged at ' + str(calendar.timegm(time.gmtime())) + ' ' + str(parsed_json['timestamp']));

f.close()
sock.close()

Example #56
0
        ET.SubElement(note_attr, "altitude").text = '0'

    #reminder and reminder order from
    if position.find('jp_application_deadline') is not None and position.find(
            'jp_application_deadline').text is not None:
        datevar = datetime.datetime.strptime(
            position.find('jp_application_deadline').text, "%Y-%m-%d %H:%M:%S")
        year_corr = max(min(datevar.year,
                            datetime.date.today().year + 1),
                        datetime.date.today().year)
        try:
            datevar = datetime.date(year_corr, datevar.month, datevar.day)
        except ValueError:
            #February 29 in a wrong year..
            datevar = datetime.date(year_corr, datevar.month, datevar.day - 1)
        ET.SubElement(note_attr, "reminder-order").text = str(
            calendar.timegm(datevar.timetuple()))
        ET.SubElement(
            note_attr,
            "reminder-time").text = datevar.strftime("%Y%m%dT%H%M%SZ")

    #clean the objects
    note_attr = None
    note = None

with open(evernote_xmlfile, 'w') as f:
    f.write(
        '<?xml version="1.0" encoding="UTF-8" ?>\n<!DOCTYPE en-export SYSTEM "http://xml.evernote.com/pub/evernote-export3.dtd">\n'
    )
    ET.ElementTree(root2).write(f, 'utf-8')
Example #57
0
def process_request(req):

    # --------------------------
    # For session
    # s = requests.Session()

    # ---------------------------

    global unknown_flag
    global employ_id
    global email
    global feedback, survey_details
    global feed
    req.update({
        "date": datetime.date(datetime.now()).isoformat(),
        "time": datetime.time(datetime.now()).isoformat()
    })

    # req.update({"employ_id":employ_id["employ_id"]})
    # today = date.today()
    # req.update({"today date":today.strftime("%B %d, %Y")})
    now = datetime.now()
    timestamp = datetime.timestamp(now)
    timestamp1 = int(timestamp * (10**3))
    req.update({"timestamp": timestamp1})

    try:
        # planUS.insert(req, check_keys=False)
        history.insert(req, check_keys=False)
    except:
        pass
    try:
        action = req.get("queryResult").get("action")
        knowledge = req.get('queryResult').get('intent').get('displayName')
        if action == "input.welcome":
            print("Webhook Successfully connected.")

        elif action == "emp_id":
            parameters = req.get("queryResult").get("parameters")
            parameters["employ_id"] = parameters["employ_id"].upper()
            print(parameters)

            #For session----------------------------
            # print("------------")
            # setcookiesurl = "http://httpbin.org/cookies/set"
            # getcookiesurl = "http://httpbin.org/cookies"
            # s.get(setcookiesurl, params=parameters)
            # r = s.get(getcookiesurl)
            # print(r.text)
            # print("------------")
            # --------------------------------------------

            filtered_parameters = {
                key: val
                for key, val in parameters.items() if val != ''
            }  # Removing empty parameters
            print(filtered_parameters)
            contact_info = employee_details.find_one(filtered_parameters)
            if parameters and contact_info:
                # req.update({"employ_id": employ_id["employ_id"]})

                employ_id = filtered_parameters
                print("employ id " + str(employ_id))
                print(type(employ_id))
                email = contact_info.get("email_ID")
                print(email)
                to_email = email
                otp = random.randrange(1000, 9999)
                employee_details.find_one_and_update(
                    filtered_parameters, {"$set": {
                        "temp_otp": otp
                    }},
                    upsert=True)
                print(otp)
                subject = "Qrata - verification code"
                body = "Your verification code is :- " + str(
                    otp
                ) + " please enter the code in the chatbot for completing your verification process"
                utils.send_mail(to_email, subject, body)
                message = {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [
                        {
                            "text": {
                                "text": [
                                    "Enter the OTP send to your registered Email-ID"
                                ]
                            },
                            "platform": "FACEBOOK"
                        },
                    ],
                }

            else:
                message = {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "text": {
                            "text": ["Employee ID not valid"]
                        },
                        "platform": "FACEBOOK"
                    }, {
                        "quickReplies": {
                            "title": "Try again",
                            "quickReplies": [
                                "Get Started",
                                "Existing Employee",
                            ]
                        },
                        "platform": "FACEBOOK"
                    }],
                }

            return message

        elif action == "otp":
            otp = req.get("queryResult").get("queryText")
            print(otp)
            print(type(otp))
            contact_info = employee_details.find_one(employ_id)
            orginal_otp = contact_info.get("temp_otp")

            print(employ_id)
            print(orginal_otp)
            print(type(orginal_otp))
            if int(otp) == orginal_otp:

                return {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "quickReplies": {
                            "title":
                            "Thank you for verification. I am Qi, your virtual HR assistant and I can help you in these following things.",
                            "quickReplies": [
                                "My Leave & Absence", "My General Support",
                                "My Pay & Benefits", "Happify Me",
                                "My Learning"
                            ]
                        },
                        "platform": "FACEBOOK"
                    }, {
                        "text": {
                            "text": [""]
                        }
                    }]
                }
            else:
                return {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "quickReplies": {
                            "title":
                            "You employee id or otp is incorrect. Please try again ",
                            "quickReplies": ["Existing Employee"]
                        },
                        "platform": "FACEBOOK"
                    }, {
                        "text": {
                            "text": [""]
                        }
                    }]
                }

        elif "Knowledge.KnowledgeBase" in knowledge:
            answer = (req.get('queryResult').get('fulfillmentMessages'))

            return {
                "source":
                "webhook",
                "fulfillmentMessages":
                answer,
                "outputContexts": [{
                    "name":
                    "projects/hr-bot-2-0-qfiwte/agent/sessions/8361885c-2b57-509c-c8b4-a86057fac036/contexts/existingemployee",
                    "lifespanCount": 1,
                    "parameters": {
                        "number": 1234,
                        "number.original": "1234"
                    }
                }]
            }

        for i in range(0, 5):
            question, op1, op2, op3, op4, op5, ans, option, hint1, hint2, hint3, hint4, hint5, quest = source.data(
                i)

            if action == "question" + str(i + 1):
                answer = req.get("queryResult").get("parameters").get("ans")
                data = {"question" + str(i): answer}
                survey_details.update(data)
                # survey_details.pop("question0")

                print(survey_details)

                # calling first question
                return {
                    "fulfillmentText":
                    "This is a text response",
                    "fulfillmentMessages": [
                        # {
                        #     "card": {
                        #         "title": quest,
                        #     }
                        # },
                        {
                            "text": {
                                "text": [quest]
                            }
                        },
                        {
                            "quickReplies": {
                                "quickReplies": ["1", "2", "3", "4", "5"]
                            }
                        },
                    ]
                }

        if action == "completed":
            answer = req.get("queryResult").get("parameters").get("ans")
            data = {"question5": answer}
            survey_details.update(data)
            print(survey_details)
            survey_details.pop("question0")
            survey.insert(survey_details)
            print(survey_details)
            survey_details = {}

        elif action == "askhr":
            query = req.get("queryResult").get("parameters").get("query")
            print(query)
            token = random.randint(1000, 9999)
            issue = "ISU" + str(token)
            tickets.insert_one({
                "issue_no": issue,
                "token_id": tickets.count() + 1,
                "description": query,
                "priority": "high",
                "status": "open",
                "created_date": "date",
                # "created_date": datetime.datetime.now().isoformat(),
                "due_date": "",
                "comment": "",
            })
            print(tickets)
            return {
                "source":
                "webhook",
                "fulfillmentMessages": [{
                    "text": {
                        "text": ["Issue No : " + issue]
                    },
                    "platform": "FACEBOOK"
                }, {
                    "quickReplies": {
                        "title":
                        "Great. I will notify our HR about your query, and they resolve it as soon as "
                        "possible.",
                        "quickReplies": [
                            "Verify Documents", "See Induction",
                            "Offer Letter", "ASK HR ", "Code of Compliance",
                            "On boarding Feedback"
                        ]
                    },
                    "platform": "FACEBOOK"
                }]
            }

        elif action == "new_joinee":
            parameters = req.get("queryResult").get("parameters")
            print(parameters)
            parameters["email_id"] = parameters["email_id"].lower()
            print("lwer")
            print(parameters)
            filtered_parameters = {
                key: val
                for key, val in parameters.items() if val != ''
            }  # Removing empty parameters
            print(filtered_parameters)
            contact_info = new_joinee.find_one(filtered_parameters)
            print(contact_info)
            if parameters and contact_info:
                email = parameters
                email_id = contact_info.get("email_id")
                to_email = email_id
                otp = random.randrange(1000, 9999)
                new_joinee.find_one_and_update(filtered_parameters,
                                               {"$set": {
                                                   "otp": otp
                                               }},
                                               upsert=True)
                print(otp)
                subject = "Qrata - Verification OTP"
                body = "Your verification code is :- " + str(
                    otp
                ) + " please enter the code in the chatbot for completing your verification process"
                utils.send_mail(to_email, subject, body)
                message = {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [
                        {
                            "text": {
                                "text": [
                                    "Enter the OTP send to your registered Email-ID"
                                ]
                            },
                            "platform": "FACEBOOK"
                        },
                    ],
                }

            else:
                message = {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "text": {
                            "text":
                            ["Sorry !! your Email ID is not registered "]
                        },
                        "platform": "FACEBOOK"
                    }, {
                        "quickReplies": {
                            "title": "What would you like to do next?",
                            "quickReplies":
                            ["Existing Employee", "New Joinee"]
                        },
                        "platform": "FACEBOOK"
                    }],
                }

            return message

        elif action == "feedback.score.1":
            score1 = req.get("queryResult").get("parameters").get("number")
            feedback.append(score1)
            feed["score1"] = score1

            print(feedback)

        elif action == "feedback.score.2":
            score2 = req.get("queryResult").get("parameters").get("number")
            feedback.append(score2)
            feed["score2"] = score2

            print(feedback)
        #
        # elif action == "Feedback":
        #     userfeedback = req.get("queryResult").get("parameters").get("feedback")
        #     # print(userfeedback)
        #
        #     # print(feedback)
        #     # print("****************************************************************************************************************************************")
        #     # print(feedback)

        elif action == "newjoinee.otp":
            otp = req.get("queryResult").get("queryText")
            print(otp)
            print(type(otp))
            contact_info = new_joinee.find_one(email)
            print(email)
            orginal_otp = contact_info.get("otp")

            print(email)
            print(orginal_otp)
            print(type(orginal_otp))
            name = contact_info.get("name")

            if int(otp) == orginal_otp:

                return {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [
                        {
                            "text": {
                                "text": [
                                    "Congratulations " + name +
                                    " on being part of the team! The whole company welcomes you and we look forward to a successful journey with you! Welcome aboard!"
                                    # "Hi "+name+ ", welcome to Qrata !! "
                                ]
                            }
                        },
                        {
                            "payload": {
                                "facebook": {
                                    "attachment": {
                                        "payload": {
                                            "elements": [{
                                                "url":
                                                "https://www.facebook.com/109485067074411/videos/504973897123117/",
                                                "media_type": "video"
                                            }],
                                            "template_type":
                                            "media"
                                        },
                                        "type": "template"
                                    }
                                }
                            },
                            "platform": "FACEBOOK"
                        },
                        {
                            "quickReplies": {
                                "title":
                                "Onboarding Menu",
                                "quickReplies": [
                                    "Verify Documents", "See Induction",
                                    "Offer Letter", "ASK HR ",
                                    "Code of Compliance", "Onboarding Feedback"
                                ]
                            },
                            "platform": "FACEBOOK"
                        },
                    ]
                }
            else:
                return {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "quickReplies": {
                            "title": " OTP Not valid",
                            "quickReplies":
                            ["Existing Employee", "New Joinee"]
                        },
                        "platform": "FACEBOOK"
                    }, {
                        "text": {
                            "text": [""]
                        }
                    }]
                }

        elif action == "remaining_leaves":
            if employ_id:
                contact_info = employee_details.find_one(employ_id)
                remaining_leave = contact_info.get("leaves")
                print(employ_id)
                print(remaining_leave)
                return {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "text": {
                            "text": [
                                "You have " + str(remaining_leave) +
                                " leaves out of 18 and these are going to expire by 31 December, 2019."
                            ]
                        },
                        "platform": "FACEBOOK"
                    }, {
                        "quickReplies": {
                            "title":
                            "What would you like to do next?",
                            "quickReplies": [
                                "Apply for Leave", "Cancel a Leave",
                                "Get Started"
                            ]
                        },
                        "platform": "FACEBOOK"
                    }]
                }
            else:
                return {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "text": {
                            "text": [
                                "please validate yourself as a existing employee"
                            ]
                        },
                        "platform": "FACEBOOK"
                    }, {
                        "quickReplies": {
                            "title": "What would you like to do next?",
                            "quickReplies": ["existing employee"]
                        },
                        "platform": "FACEBOOK"
                    }]
                }

        elif action == "request.leave":
            date_string = req.get("queryResult").get("parameters").get("date")
            return {
                "source":
                "webhook",
                "fulfillmentMessages": [{
                    "text": {
                        "text": [
                            "Okay, you applied a leave for " +
                            utils.date2text(date_string) +
                            ",  a mail has been send to your manager for approval "
                        ]
                    },
                    "platform": "FACEBOOK"
                }, {
                    "quickReplies": {
                        "title": "What would you like to do next?",
                        "quickReplies": ["Get Started", "Check Leave Balance"]
                    },
                    "platform": "FACEBOOK"
                }]
            }
        elif action == "request.vacation":
            start_date = req.get("queryResult").get("parameters").get(
                "date-period").get("startDate")
            end_date = req.get("queryResult").get("parameters").get(
                "date-period").get("endDate")
            return {
                "source":
                "webhook",
                "fulfillmentMessages": [{
                    "text": {
                        "text": [
                            "Okay, I will request your manager to grant you a leave from "
                            + utils.date2text(start_date) + " to " +
                            utils.date2text(end_date)
                        ]
                    },
                    "platform": "FACEBOOK"
                }, {
                    "quickReplies": {
                        "title": "What would you like to do next?",
                        "quickReplies": [
                            "Get Started",
                            "Check Leave Balance",
                        ]
                    },
                    "platform": "FACEBOOK"
                }],
            }

        elif action == "ProvideSalarySlips.TakeEmailAddress":
            to_email = req.get("queryResult").get("parameters").get("email")
            start_date = req.get("queryResult").get("parameters").get(
                "date-period").get("startDate")
            end_date = req.get("queryResult").get("parameters").get(
                "date-period").get("endDate")
            subject = "Salary Slips"
            body = "This is mail to provide a salary slips from " + utils.date2text(start_date) + " to " + \
                   utils.date2text(end_date)
            utils.send_mail(to_email, subject, body)

            return {
                "source":
                "webhook",
                "fulfillmentMessages": [{
                    "text": {
                        "text": [
                            "Your Salary Slips are on your way. Please check your mail."
                        ]
                    },
                    "platform": "FACEBOOK"
                }, {
                    "quickReplies": {
                        "title":
                        "What would you like to do next?",
                        "quickReplies": [
                            "Get Started", "Reimbursement",
                            "My Health Insurance"
                        ]
                    },
                    "platform": "FACEBOOK"
                }],
            }

        elif action == "find.colleague":
            parameters = req.get("queryResult").get("parameters")
            filtered_parameters = {
                key: val
                for key, val in parameters.items() if val != ''
            }  # Removing empty parameters
            contact_info = employee_details.find_one(filtered_parameters)

            if contact_info and filtered_parameters:
                message = {
                    "card": {
                        "title":
                        contact_info.get("name"),
                        "subtitle":
                        contact_info.get('designation') + " | " +
                        contact_info.get('department') + "\n" + "Phone: " +
                        contact_info.get("contact_number"),
                        "imageUri":
                        "https://www.cristianmonroy.com/wp-content/uploads/2017/11/avatars"
                        "-avataaars.png",
                        "buttons": [{
                            "text": "View Profile"
                        }]
                    },
                    "platform": "FACEBOOK"
                }
            else:
                message = {
                    "text": {
                        "text":
                        ["Sorry, I was not able to find the given person."]
                    },
                    "platform": "FACEBOOK"
                }

            return {
                "source":
                "webhook",
                "fulfillmentMessages": [
                    message, {
                        "quickReplies": {
                            "title":
                            "What would you like to do next?",
                            "quickReplies":
                            ["Get Started", "Search other employees"]
                        },
                        "platform": "FACEBOOK"
                    }
                ]
            }

        # elif action == "remaining_leaves":

        elif action == "Feedback":
            userfeedback = req.get("queryResult").get("parameters").get(
                'feedback')

            feedback.append(userfeedback)
            timec = datetime.date(datetime.now()).isoformat()
            feed["feedback"] = userfeedback
            ts = calendar.timegm(time.gmtime())
            feed["time"] = str(timec)
            feed["timestamp"] = ts
            print(feed)
            text = TextBlob(userfeedback)
            sentiment = text.sentiment.polarity
            subjective = text.sentiment.subjectivity
            feedback.clear()
            print(subjective)
            if sentiment >= 0.15 or userfeedback == "🙂":
                feed["sentiment"] = "positive"
                feedbackdata.insert_one(feed)
                print(feed)
                message = u"\U0001F600 " + f"We are glad that you like our culture."
                feed.clear()
            elif sentiment <= -0.15 or userfeedback == "☹�":
                feed["sentiment"] = "negative"
                feedbackdata.insert_one(feed)
                print(feed)

                message = "Sorry to hear that. We will make sure to improve our culture and make this " \
                          "a better place to work."
                feed.clear()
            else:
                feed["sentiment"] = "positive"
                feedbackdata.insert_one(feed)
                message = "Alright, I have noted the feedback."
                feed.clear()

            return {
                "source":
                "webhook",
                "fulfillmentMessages": [{
                    "text": {
                        "text": [message]
                    },
                    "platform": "FACEBOOK"
                }, {
                    "quickReplies": {
                        "title": "What would you like to do next?",
                        "quickReplies": ["Get Started", "Submit An idea"]
                    },
                    "platform": "FACEBOOK"
                }]
            }

        elif action == "search_employee":
            inputname = req.get("queryResult").get("parameters").get(
                "name").get("name")
            print(inputname)
            inputname = inputname.lower()
            contact_info = employee_details.find({
                "name": {
                    "$regex": inputname
                }
            }).limit(3)
            print(contact_info)
            if contact_info.count() != 0:
                return {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "card": {
                            "title":
                            emp["name"],
                            "subtitle":
                            emp["designation"] + " | " + "Phone: " + " | " +
                            str(emp["contact_number"]),
                            "imageUri":
                            "https://www.cristianmonroy.com/wp-content/uploads/2017/11/avatars-avataaars"
                            ".png",
                            "buttons": [{
                                "text": "Profile",
                                "postback": emp["profile"]
                            }]
                        },
                        "platform": "FACEBOOK"
                    } for emp in contact_info] + [{
                        "quickReplies": {
                            "title":
                            "What would you like to do next?",
                            "quickReplies":
                            ["Get Started", "Search other employees"]
                        },
                        "platform": "FACEBOOK"
                    }]
                }
            else:
                return {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "text": {
                            "text": [
                                "Sorry, I was not able to find the given person."
                            ]
                        },
                        "platform": "FACEBOOK"
                    }, {
                        "quickReplies": {
                            "title":
                            "What would you like to do next?",
                            "quickReplies":
                            ["Get Started", "Search other employees"]
                        },
                        "platform": "FACEBOOK"
                    }]
                }

        elif action == "search_employee_emp":
            parameters = req.get("queryResult").get("parameters")
            print(parameters)
            parameters["employ_id"] = parameters["employ_id"].upper()
            filtered_parameters = {
                key: val
                for key, val in parameters.items() if val != ''
            }  # Removing empty parameters
            print(filtered_parameters)
            contact_info = employee_details.find_one(filtered_parameters)
            if contact_info and filtered_parameters:
                message = {
                    "card": {
                        "title":
                        contact_info.get("name"),
                        "subtitle":
                        contact_info.get('designation') + " | " + "Phone: " +
                        str(contact_info.get("contact_number")),
                        "imageUri":
                        "https://www.cristianmonroy.com/wp-content/uploads/2017/11/avatars-avataaars"
                        ".png",
                        "buttons": [{
                            "text": "View Profile"
                        }]
                    },
                    "platform": "FACEBOOK"
                }
            else:
                message = {
                    "text": {
                        "text":
                        ["Sorry, I was not able to find the given person."]
                    },
                    "platform": "FACEBOOK"
                }
            return {
                "source":
                "webhook",
                "fulfillmentMessages": [
                    message, {
                        "quickReplies": {
                            "title":
                            "What would you like to do next?",
                            "quickReplies":
                            ["Get Started", "Search other employees"]
                        },
                        "platform": "FACEBOOK"
                    }
                ]
            }

        elif action == "show.all.public.holidays":
            state = req.get("queryResult").get("parameters").get("geo-state")
            public_holidays_string = public_holidays[
                public_holidays["State"] == state].to_string(
                    columns=["Date", "Holiday"], header=False, index=False)
            return {
                "source":
                "webhook",
                "fulfillmentMessages": [{
                    "text": {
                        "text": [
                            "Here is the list of all public holidays in " +
                            state + "\n\n" + public_holidays_string
                        ]
                    },
                    "platform": "FACEBOOK"
                }, {
                    "quickReplies": {
                        "title":
                        "What would you like to do next?",
                        "quickReplies": [
                            "Get Started", "Check Leave Balance",
                            "Apply for leave"
                        ]
                    },
                    "platform": "FACEBOOK"
                }]
            }

        elif action == "show.all.jobs":
            jobs_search = jobs.find({
                "statusVisible":
                "enum.Hiring_JobPositionStatusVisible.Public"
            }).limit(10)

            if jobs_search.count() != 0:
                return {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "text": {
                            "text": [
                                "Here are some job openings available in our organisation."
                            ]
                        },
                        "platform": "FACEBOOK"
                    }] + [{
                        "card": {
                            "title":
                            job["jobTitle"],
                            "subtitle":
                            job["companyName"] + " | " + job["locality"] +
                            " | " + job["region"],
                            "imageUri":
                            "https://akm-img-a-in.tosshub.com/sites/btmt/images/stories/jobs660_090518050232_103118054303_022119084317.jpg",
                            "buttons": [{
                                "text": "Refer this Job",
                                "postback": job["jobDetailsUrl"]
                            }]
                        },
                        "platform": "FACEBOOK"
                    } for job in jobs_search] + [{
                        "quickReplies": {
                            "title": "What would you like to do next?",
                            "quickReplies": ["Get Started"]
                        },
                        "platform": "FACEBOOK"
                    }]
                }
            else:
                return {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "text": {
                            "text": [
                                "Sorry to inform you that currently we don't have any job openings"
                            ]
                        },
                        "platform": "FACEBOOK"
                    }, {
                        "quickReplies": {
                            "title": "What would you like to do next?",
                            "quickReplies": ["Get Started"]
                        },
                        "platform": "FACEBOOK"
                    }]
                }

        elif action == "raise.ticket":
            query = req.get("queryResult").get("parameters").get("query")
            print(query)
            token = random.randint(1000, 9999)
            issue = "ISU" + str(token)
            tickets.insert_one({
                "issue_no":
                issue,
                "token_id":
                tickets.count() + 1,
                "employee_id":
                "EMP" + str(random.randint(1000, 9999)),
                "description":
                query,
                "priority":
                "high",
                "status":
                "open",
                "created_date":
                "date",
                # "created_date": datetime.datetime.now().isoformat(),
                "due_date":
                "",
                "comment":
                "",
            })
            print(tickets)
            return {
                "source":
                "webhook",
                "fulfillmentMessages": [{
                    "text": {
                        "text": ["Issue No : " + issue]
                    },
                    "platform": "FACEBOOK"
                }, {
                    "quickReplies": {
                        "title":
                        "Great. I will notify our HR about your query, and they resolve it as soon as "
                        "possible.",
                        "quickReplies": ["Get Started"]
                    },
                    "platform": "FACEBOOK"
                }]
            }

        elif action == "input.unknown":
            unknown_flag += 1

            if unknown_flag >= 2:
                unknown_flag = 0
                query = req.get("queryResult").get("queryText")
                return {
                    "source":
                    "webhook",
                    "fulfillmentMessages": [{
                        "quickReplies": {
                            "title":
                            "If I am not able to fulfill your request, you can raise the ticket so that "
                            "our HR team can respond you directly.",
                            "quickReplies":
                            ["Raise the Ticket", "Get Started"]
                        },
                        "platform": "FACEBOOK"
                    }],
                    "outputContexts": [{
                        "name":
                        "projects/internal-hr-bot-womtev/agent/sessions/f6ec5940-9c6d-d669-af33-45426780ba5d/contexts/raise_ticket",
                        "lifespanCount": 1,
                        "parameters": {
                            "query": query,
                        }
                    }]
                }

    except Exception as e:
        print("Error:", e)
        traceback.print_exc()
        return {
            "source":
            "webhook",
            "fulfillmentMessages": [{
                "quickReplies": {
                    "title":
                    "Sorry, I am not able to help you at the moment. This are some topics I can help you with",
                    "quickReplies": [
                        "My Leave & Absence", "My General Support",
                        "My Pay & Benefits", "Happify Me", "My Learning"
                    ]
                },
                "platform": "FACEBOOK"
            }, {
                "text": {
                    "text": [""]
                }
            }]
        }
Example #58
0
def _timestamp(dt):
    return int(calendar.timegm(dt.utctimetuple()))
Example #59
0
def populate_database(minimal=False):
    logger.debug("Populating the DB with test data.")

    # Check if the data already exists. If so, we skip. This can happen between calls from the
    # "old style" tests and the new py.test's.
    try:
        User.get(username="******")
        logger.debug("DB already populated")
        return
    except User.DoesNotExist:
        pass

    # Note: databases set up with "real" schema (via Alembic) will not have these types
    # type, so we it here it necessary.
    try:
        ImageStorageLocation.get(name="local_eu")
        ImageStorageLocation.get(name="local_us")
    except ImageStorageLocation.DoesNotExist:
        ImageStorageLocation.create(name="local_eu")
        ImageStorageLocation.create(name="local_us")

    try:
        NotificationKind.get(name="test_notification")
    except NotificationKind.DoesNotExist:
        NotificationKind.create(name="test_notification")

    new_user_1 = model.user.create_user("devtable", "password",
                                        "*****@*****.**")
    new_user_1.verified = True
    new_user_1.stripe_id = TEST_STRIPE_ID
    new_user_1.save()

    if minimal:
        logger.debug(
            "Skipping most db population because user requested mininal db")
        return

    UserRegion.create(user=new_user_1,
                      location=ImageStorageLocation.get(name="local_us"))
    model.release.set_region_release("quay", "us", "v0.1.2")

    model.user.create_confirm_email_code(new_user_1,
                                         new_email="*****@*****.**")

    disabled_user = model.user.create_user("disabled", "password",
                                           "*****@*****.**")
    disabled_user.verified = True
    disabled_user.enabled = False
    disabled_user.save()

    dtrobot = model.user.create_robot("dtrobot", new_user_1)
    dtrobot2 = model.user.create_robot("dtrobot2", new_user_1)

    new_user_2 = model.user.create_user("public", "password",
                                        "*****@*****.**")
    new_user_2.verified = True
    new_user_2.save()

    new_user_3 = model.user.create_user("freshuser", "password",
                                        "*****@*****.**")
    new_user_3.verified = True
    new_user_3.save()

    another_robot = model.user.create_robot("anotherrobot", new_user_3)

    new_user_4 = model.user.create_user("randomuser", "password",
                                        "*****@*****.**")
    new_user_4.verified = True
    new_user_4.save()

    new_user_5 = model.user.create_user("unverified", "password",
                                        "*****@*****.**")
    new_user_5.save()

    reader = model.user.create_user("reader", "password", "*****@*****.**")
    reader.verified = True
    reader.save()

    creatoruser = model.user.create_user("creator", "password",
                                         "*****@*****.**")
    creatoruser.verified = True
    creatoruser.save()

    outside_org = model.user.create_user("outsideorg", "password",
                                         "*****@*****.**")
    outside_org.verified = True
    outside_org.save()

    model.notification.create_notification(
        "test_notification",
        new_user_1,
        metadata={
            "some": "value",
            "arr": [1, 2, 3],
            "obj": {
                "a": 1,
                "b": 2
            }
        },
    )

    from_date = datetime.utcnow()
    to_date = from_date + timedelta(hours=1)
    notification_metadata = {
        "from_date": formatdate(calendar.timegm(from_date.utctimetuple())),
        "to_date": formatdate(calendar.timegm(to_date.utctimetuple())),
        "reason": "database migration",
    }
    model.notification.create_notification("maintenance",
                                           new_user_1,
                                           metadata=notification_metadata)

    __generate_repository(
        new_user_4,
        "randomrepo",
        "Random repo repository.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )

    simple_repo = __generate_repository(
        new_user_1,
        "simple",
        "Simple repository.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )

    # Add some labels to the latest tag's manifest.
    repo_ref = RepositoryReference.for_repo_obj(simple_repo)
    tag = registry_model.get_repo_tag(repo_ref, "latest")
    manifest = registry_model.get_manifest_for_tag(tag)
    assert manifest

    first_label = registry_model.create_manifest_label(manifest, "foo", "bar",
                                                       "manifest")
    registry_model.create_manifest_label(manifest, "foo", "baz", "api")
    registry_model.create_manifest_label(manifest, "anotherlabel", "1234",
                                         "internal")
    registry_model.create_manifest_label(manifest, "jsonlabel",
                                         '{"hey": "there"}', "internal",
                                         "application/json")

    label_metadata = {
        "key": "foo",
        "value": "bar",
        "id": first_label._db_id,
        "manifest_digest": manifest.digest,
    }

    logs_model.log_action(
        "manifest_label_add",
        new_user_1.username,
        performer=new_user_1,
        timestamp=datetime.now(),
        metadata=label_metadata,
        repository=simple_repo,
    )

    model.blob.initiate_upload(new_user_1.username, simple_repo.name,
                               str(uuid4()), "local_us", {})
    model.notification.create_repo_notification(simple_repo, "repo_push",
                                                "quay_notification", {}, {})

    __generate_repository(
        new_user_1,
        "sharedtags",
        "Shared tags repository",
        False,
        [(new_user_2, "read"), (dtrobot[0], "read")],
        (
            2,
            [
                (3, [], ["v2.0", "v2.1", "v2.2"]),
                (
                    1,
                    [(1, [(1, [], ["prod", "581a284"])
                          ], ["staging", "8423b58"]), (1, [], None)],
                    None,
                ),
            ],
            None,
        ),
    )

    __generate_repository(
        new_user_1,
        "history",
        "Historical repository.",
        False,
        [],
        (4, [(2, [], "#latest"), (3, [], "latest")], None),
    )

    __generate_repository(
        new_user_1,
        "complex",
        "Complex repository with many branches and tags.",
        False,
        [(new_user_2, "read"), (dtrobot[0], "read")],
        (
            2,
            [(3, [], "v2.0"),
             (1, [(1, [(2, [], ["prod"])], "staging"), (1, [], None)], None)],
            None,
        ),
    )

    __generate_repository(
        new_user_1,
        "gargantuan",
        None,
        False,
        [],
        (
            2,
            [
                (3, [], "v2.0"),
                (1, [(1, [(1, [], ["latest", "prod"])], "staging"),
                     (1, [], None)], None),
                (20, [], "v3.0"),
                (5, [], "v4.0"),
                (1, [(1, [], "v5.0"), (1, [], "v6.0")], None),
            ],
            None,
        ),
    )

    trusted_repo = __generate_repository(
        new_user_1,
        "trusted",
        "Trusted repository.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )
    trusted_repo.trust_enabled = True
    trusted_repo.save()

    publicrepo = __generate_repository(
        new_user_2,
        "publicrepo",
        "Public repository pullable by the world.",
        True,
        [],
        (10, [], "latest"),
    )

    __generate_repository(outside_org, "coolrepo", "Some cool repo.", False,
                          [], (5, [], "latest"))

    __generate_repository(
        new_user_1,
        "shared",
        "Shared repository, another user can write.",
        False,
        [(new_user_2, "write"), (reader, "read")],
        (5, [], "latest"),
    )

    __generate_repository(
        new_user_1,
        "text-full-repo",
        "This is a repository for testing text search",
        False,
        [(new_user_2, "write"), (reader, "read")],
        (5, [], "latest"),
    )

    building = __generate_repository(
        new_user_1,
        "building",
        "Empty repository which is building.",
        False,
        [(new_user_2, "write"), (reader, "read")],
        (0, [], None),
    )

    new_token = model.token.create_access_token(building, "write",
                                                "build-worker")

    trigger = model.build.create_build_trigger(building,
                                               "github",
                                               "123authtoken",
                                               new_user_1,
                                               pull_robot=dtrobot[0])
    trigger.config = json.dumps({
        "build_source": "jakedt/testconnect",
        "subdir": "",
        "dockerfile_path": "Dockerfile",
        "context": "/",
    })
    trigger.save()

    repo = "ci.devtable.com:5000/%s/%s" % (building.namespace_user.username,
                                           building.name)
    job_config = {
        "repository": repo,
        "docker_tags": ["latest"],
        "build_subdir": "",
        "trigger_metadata": {
            "commit": "3482adc5822c498e8f7db2e361e8d57b3d77ddd9",
            "ref": "refs/heads/master",
            "default_branch": "master",
        },
    }

    model.repository.star_repository(new_user_1, simple_repo)

    record = model.repository.create_email_authorization_for_repo(
        new_user_1.username, "simple", "*****@*****.**")
    record.confirmed = True
    record.save()

    model.repository.create_email_authorization_for_repo(
        new_user_1.username, "simple", "*****@*****.**")

    build2 = model.build.create_repository_build(
        building,
        new_token,
        job_config,
        "68daeebd-a5b9-457f-80a0-4363b882f8ea",
        "build-name",
        trigger,
    )
    build2.uuid = "deadpork-dead-pork-dead-porkdeadpork"
    build2.save()

    build3 = model.build.create_repository_build(
        building,
        new_token,
        job_config,
        "f49d07f9-93da-474d-ad5f-c852107c3892",
        "build-name",
        trigger,
    )
    build3.uuid = "deadduck-dead-duck-dead-duckdeadduck"
    build3.save()

    build1 = model.build.create_repository_build(
        building, new_token, job_config, "701dcc3724fb4f2ea6c31400528343cd",
        "build-name", trigger)
    build1.uuid = "deadbeef-dead-beef-dead-beefdeadbeef"
    build1.save()

    org = model.organization.create_organization("buynlarge",
                                                 "*****@*****.**",
                                                 new_user_1)
    org.stripe_id = TEST_STRIPE_ID
    org.save()

    liborg = model.organization.create_organization(
        "library", "*****@*****.**", new_user_1)
    liborg.save()

    titiorg = model.organization.create_organization("titi",
                                                     "*****@*****.**",
                                                     new_user_1)
    titiorg.save()

    thirdorg = model.organization.create_organization(
        "sellnsmall", "*****@*****.**", new_user_1)
    thirdorg.save()

    model.user.create_robot("coolrobot", org)

    oauth_app_1 = model.oauth.create_application(
        org,
        "Some Test App",
        "http://localhost:8000",
        "http://localhost:8000/o2c.html",
        client_id="deadbeef",
    )

    model.oauth.create_application(
        org,
        "Some Other Test App",
        "http://quay.io",
        "http://localhost:8000/o2c.html",
        client_id="deadpork",
        description="This is another test application",
    )

    model.oauth.create_access_token_for_testing(new_user_1,
                                                "deadbeef",
                                                "repo:admin",
                                                access_token="%s%s" %
                                                ("b" * 40, "c" * 40))

    oauth_credential = Credential.from_string("dswfhasdf1")
    OAuthAuthorizationCode.create(
        application=oauth_app_1,
        code="Z932odswfhasdf1",
        scope="repo:admin",
        data='{"somejson": "goeshere"}',
        code_name="Z932odswfhasdf1Z932o",
        code_credential=oauth_credential,
    )

    model.user.create_robot("neworgrobot", org)

    ownerbot = model.user.create_robot("ownerbot", org)[0]
    creatorbot = model.user.create_robot("creatorbot", org)[0]

    owners = model.team.get_organization_team("buynlarge", "owners")
    owners.description = "Owners have unfetterd access across the entire org."
    owners.save()

    org_repo = __generate_repository(
        org,
        "orgrepo",
        "Repository owned by an org.",
        False,
        [(outside_org, "read")],
        (4, [], ["latest", "prod"]),
    )

    __generate_repository(
        org,
        "anotherorgrepo",
        "Another repository owned by an org.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )

    creators = model.team.create_team("creators", org, "creator",
                                      "Creators of orgrepo.")

    reader_team = model.team.create_team("readers", org, "member",
                                         "Readers of orgrepo.")
    model.team.add_or_invite_to_team(new_user_1, reader_team, outside_org)
    model.permission.set_team_repo_permission(reader_team.name,
                                              org_repo.namespace_user.username,
                                              org_repo.name, "read")

    model.team.add_user_to_team(new_user_2, reader_team)
    model.team.add_user_to_team(reader, reader_team)
    model.team.add_user_to_team(ownerbot, owners)
    model.team.add_user_to_team(creatorbot, creators)
    model.team.add_user_to_team(creatoruser, creators)

    sell_owners = model.team.get_organization_team("sellnsmall", "owners")
    sell_owners.description = "Owners have unfettered access across the entire org."
    sell_owners.save()

    model.team.add_user_to_team(new_user_4, sell_owners)

    sync_config = {
        "group_dn": "cn=Test-Group,ou=Users",
        "group_id": "somegroupid"
    }
    synced_team = model.team.create_team("synced", org, "member",
                                         "Some synced team.")
    model.team.set_team_syncing(synced_team, "ldap", sync_config)

    another_synced_team = model.team.create_team("synced", thirdorg, "member",
                                                 "Some synced team.")
    model.team.set_team_syncing(another_synced_team, "ldap",
                                {"group_dn": "cn=Test-Group,ou=Users"})

    __generate_repository(
        new_user_1,
        "superwide",
        None,
        False,
        [],
        [
            (10, [], "latest2"),
            (2, [], "latest3"),
            (2, [(1, [], "latest11"), (2, [], "latest12")], "latest4"),
            (2, [], "latest5"),
            (2, [], "latest6"),
            (2, [], "latest7"),
            (2, [], "latest8"),
            (2, [], "latest9"),
            (2, [], "latest10"),
            (2, [], "latest13"),
            (2, [], "latest14"),
            (2, [], "latest15"),
            (2, [], "latest16"),
            (2, [], "latest17"),
            (2, [], "latest18"),
        ],
    )

    mirror_repo = __generate_repository(
        new_user_1,
        "mirrored",
        "Mirrored repository.",
        False,
        [(dtrobot[0], "write"), (dtrobot2[0], "write")],
        (4, [], ["latest", "prod"]),
    )
    mirror_rule = model.repo_mirror.create_mirroring_rule(
        mirror_repo, ["latest", "3.3*"])
    mirror_args = (mirror_repo, mirror_rule, dtrobot[0], "quay.io/coreos/etcd",
                   60 * 60 * 24)
    mirror_kwargs = {
        "external_registry_username": "******",
        "external_registry_password": "******",
        "external_registry_config": {},
        "is_enabled": True,
        "sync_start_date": datetime.utcnow(),
    }
    mirror = model.repo_mirror.enable_mirroring_for_repository(
        *mirror_args, **mirror_kwargs)

    read_only_repo = __generate_repository(
        new_user_1,
        "readonly",
        "Read-Only Repo.",
        False,
        [],
        (4, [], ["latest", "prod"]),
    )
    read_only_repo.state = RepositoryState.READ_ONLY
    read_only_repo.save()

    model.permission.add_prototype_permission(org,
                                              "read",
                                              activating_user=new_user_1,
                                              delegate_user=new_user_2)
    model.permission.add_prototype_permission(org,
                                              "read",
                                              activating_user=new_user_1,
                                              delegate_team=reader_team)
    model.permission.add_prototype_permission(org,
                                              "write",
                                              activating_user=new_user_2,
                                              delegate_user=new_user_1)

    today = datetime.today()
    week_ago = today - timedelta(6)
    six_ago = today - timedelta(5)
    four_ago = today - timedelta(4)
    yesterday = datetime.combine(date.today(),
                                 datetime.min.time()) - timedelta(hours=6)

    __generate_service_key("kid1", "somesamplekey", new_user_1, today,
                           ServiceKeyApprovalType.SUPERUSER)
    __generate_service_key(
        "kid2",
        "someexpiringkey",
        new_user_1,
        week_ago,
        ServiceKeyApprovalType.SUPERUSER,
        today + timedelta(days=14),
    )

    __generate_service_key("kid3", "unapprovedkey", new_user_1, today, None)

    __generate_service_key(
        "kid4",
        "autorotatingkey",
        new_user_1,
        six_ago,
        ServiceKeyApprovalType.KEY_ROTATION,
        today + timedelta(days=1),
        rotation_duration=timedelta(hours=12).total_seconds(),
    )

    __generate_service_key(
        "kid5",
        "key for another service",
        new_user_1,
        today,
        ServiceKeyApprovalType.SUPERUSER,
        today + timedelta(days=14),
        service="different_sample_service",
    )

    __generate_service_key(
        "kid6",
        "someexpiredkey",
        new_user_1,
        week_ago,
        ServiceKeyApprovalType.SUPERUSER,
        today - timedelta(days=1),
    )

    __generate_service_key(
        "kid7",
        "somewayexpiredkey",
        new_user_1,
        week_ago,
        ServiceKeyApprovalType.SUPERUSER,
        today - timedelta(days=30),
    )

    # Add the test pull key as pre-approved for local and unittest registry testing.
    # Note: this must match the private key found in the local/test config.
    _TEST_JWK = {
        "e":
        "AQAB",
        "kty":
        "RSA",
        "n":
        "yqdQgnelhAPMSeyH0kr3UGePK9oFOmNfwD0Ymnh7YYXr21VHWwyM2eVW3cnLd9KXywDFtGSe9oFDbnOuMCdUowdkBcaHju-isbv5KEbNSoy_T2Rip-6L0cY63YzcMJzv1nEYztYXS8wz76pSK81BKBCLapqOCmcPeCvV9yaoFZYvZEsXCl5jjXN3iujSzSF5Z6PpNFlJWTErMT2Z4QfbDKX2Nw6vJN6JnGpTNHZvgvcyNX8vkSgVpQ8DFnFkBEx54PvRV5KpHAq6AsJxKONMo11idQS2PfCNpa2hvz9O6UZe-eIX8jPo5NW8TuGZJumbdPT_nxTDLfCqfiZboeI0Pw",
    }

    key = model.service_keys.create_service_key("test_service_key",
                                                "test_service_key", "quay",
                                                _TEST_JWK, {}, None)

    model.service_keys.approve_service_key(
        key.kid,
        ServiceKeyApprovalType.SUPERUSER,
        notes="Test service key for local/test registry testing",
    )

    # Add an app specific token.
    token = model.appspecifictoken.create_token(new_user_1, "some app")
    token.token_name = "a" * 60
    token.token_secret = "b" * 60
    token.save()

    logs_model.log_action(
        "org_create_team",
        org.username,
        performer=new_user_1,
        timestamp=week_ago,
        metadata={"team": "readers"},
    )

    logs_model.log_action(
        "org_set_team_role",
        org.username,
        performer=new_user_1,
        timestamp=week_ago,
        metadata={
            "team": "readers",
            "role": "read"
        },
    )

    logs_model.log_action(
        "create_repo",
        org.username,
        performer=new_user_1,
        repository=org_repo,
        timestamp=week_ago,
        metadata={
            "namespace": org.username,
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "change_repo_permission",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=six_ago,
        metadata={
            "username": new_user_1.username,
            "repo": "orgrepo",
            "role": "admin"
        },
    )

    logs_model.log_action(
        "change_repo_permission",
        org.username,
        performer=new_user_1,
        repository=org_repo,
        timestamp=six_ago,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo",
            "role": "read"
        },
    )

    logs_model.log_action(
        "add_repo_accesstoken",
        org.username,
        performer=new_user_1,
        repository=org_repo,
        timestamp=four_ago,
        metadata={
            "repo": "orgrepo",
            "token": "deploytoken"
        },
    )

    logs_model.log_action(
        "push_repo",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=today,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "pull_repo",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=today,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "pull_repo",
        org.username,
        repository=org_repo,
        timestamp=today,
        metadata={
            "token": "sometoken",
            "token_code": "somecode",
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "delete_tag",
        org.username,
        performer=new_user_2,
        repository=org_repo,
        timestamp=today,
        metadata={
            "username": new_user_2.username,
            "repo": "orgrepo",
            "tag": "sometag"
        },
    )

    logs_model.log_action(
        "pull_repo",
        org.username,
        repository=org_repo,
        timestamp=today,
        metadata={
            "token_code": "somecode",
            "repo": "orgrepo"
        },
    )

    logs_model.log_action(
        "pull_repo",
        new_user_2.username,
        repository=publicrepo,
        timestamp=yesterday,
        metadata={
            "token_code": "somecode",
            "repo": "publicrepo"
        },
    )

    logs_model.log_action(
        "build_dockerfile",
        new_user_1.username,
        repository=building,
        timestamp=today,
        metadata={
            "repo": "building",
            "namespace": new_user_1.username,
            "trigger_id": trigger.uuid,
            "config": json.loads(trigger.config),
            "service": trigger.service.name,
        },
    )

    model.message.create([{
        "content": "We love you, Quay customers!",
        "severity": "info",
        "media_type": "text/plain",
    }])

    model.message.create([{
        "content": "This is a **development** install of Quay",
        "severity": "warning",
        "media_type": "text/markdown",
    }])

    fake_queue = WorkQueue("fakequeue", tf)
    fake_queue.put(["canonical", "job", "name"], "{}")

    model.user.create_user_prompt(new_user_4, "confirm_username")

    while True:
        to_count = model.repositoryactioncount.find_uncounted_repository()
        if not to_count:
            break

        model.repositoryactioncount.count_repository_actions(
            to_count,
            datetime.utcnow().day)
        model.repositoryactioncount.update_repository_score(to_count)
Example #60
0
 def set_experiment_id(self):
     self.experiment_id = calendar.timegm(time.gmtime())