コード例 #1
0
ファイル: test_games.py プロジェクト: yorick-ne/api
def test_games_query_all_parameters(test_client, maps, game_stats, game_player_stats, ladder, login):
    response = test_client.get('/games?filter[players]=testUser1,testUser3&filter[map_name]=testMap2'
                               '&filter[max_rating]=2000&filter[min_rating]=500&filter[game_type]=1'
                               '&filter[rating_type]=ladder&filter[map_exclude]=true&filter[max_player_count]=4'
                               '&filter[min_player_count]=3&filter[mod]=gfmod1&filter[min_datetime]=1997-07-16T19:20'
                               '&filter[max_datetime]=1997-07-25T19:20')

    assert response.status_code == 200
    assert response.content_type == 'application/vnd.api+json'

    result = json.loads(response.data.decode('utf-8'))
    results_data = result['data'][0]
    assert 'data' in result
    assert len(result['data']) == 1
    player_data = results_data['attributes']['players']
    assert len(player_data) == 4
    assert results_data['id'] == '234'
    assert results_data['attributes']['game_name'] == testGameName
    assert results_data['attributes']['validity'] == 'TOO_MANY_DESYNCS'
    assert results_data['attributes']['victory_condition'] == 'DOMINATION'
    assert all([(player['mean'] - 3 * player['deviation'] <= 2000) and
                (player['mean'] - 3 * player['deviation'] >= 500)
                for player in results_data['attributes']['players']])
    assert parse_date('1997-07-16T19:20') <= parse_date(results_data['attributes']['start_time']) <= parse_date(
        '1997-07-25T19:20')
コード例 #2
0
ファイル: util.py プロジェクト: LifeCoaching/commcare-hq
def adjust_datetimes(data, parent=None, key=None):
    """
    find all datetime-like strings within data (deserialized json)
    and format them uniformly, in place.

    """
    # this strips the timezone like we've always done
    # todo: in the future this will convert to UTC
    if isinstance(data, basestring):
        if re_loose_datetime.match(data):
            if phone_timezones_should_be_processed():
                parent[key] = json_format_datetime(
                    iso8601.parse_date(data).astimezone(pytz.utc)
                    .replace(tzinfo=None)
                )
            else:
                parent[key] = json_format_datetime(
                    iso8601.parse_date(data).replace(tzinfo=None))

    elif isinstance(data, dict):
        for key, value in data.items():
            adjust_datetimes(value, parent=data, key=key)
    elif isinstance(data, list):
        for i, value in enumerate(data):
            adjust_datetimes(value, parent=data, key=i)

    # return data, just for convenience in testing
    # this is the original input, modified, not a new data structure
    return data
コード例 #3
0
ファイル: iso_8601.py プロジェクト: B-Rich/heat
 def validate(self, value, context):
     try:
         iso8601.parse_date(value)
     except Exception:
         return False
     else:
         return True
コード例 #4
0
def test_parse_invalid_date_with_space():
    try:
        iso8601.parse_date("2013-02-21T03:45:24 +0400")
    except iso8601.ParseError:
        pass
    else:
        assert 1 == 2
コード例 #5
0
    def test_availability_zone_detail(self):
        resp_dict = self.controller.detail(self.req)

        self.assertIn('availabilityZoneInfo', resp_dict)
        zones = resp_dict['availabilityZoneInfo']
        self.assertEqual(len(zones), 3)
        timestamp = iso8601.parse_date("2012-12-26T14:45:25Z")
        nova_network_timestamp = iso8601.parse_date("2012-12-26T14:45:24Z")
        expected = [{'zoneName': 'zone-1',
                    'zoneState': {'available': True},
                    'hosts': {'fake_host-1': {
                        'nova-compute': {'active': True, 'available': True,
                                         'updated_at': timestamp}}}},
                   {'zoneName': 'internal',
                    'zoneState': {'available': True},
                    'hosts': {'fake_host-1': {
                        'nova-sched': {'active': True, 'available': True,
                                       'updated_at': timestamp}},
                              'fake_host-2': {
                                  'nova-network': {
                                      'active': True,
                                      'available': False,
                                      'updated_at': nova_network_timestamp}}}},
                   {'zoneName': 'zone-2',
                    'zoneState': {'available': False},
                    'hosts': None}]
        self.assertEqual(expected, zones)
コード例 #6
0
    def _create_entry_parameter_set(self, dataset, entryId, updated):
        '''
        Creates or updates schema for dataset with populated 'EntryID' and 'Updated' fields
        ''' 
        schema = AtomImportSchemas.get_schema(Schema.DATASET)
        # I'm not sure why mgr.set_param always creates additional parametersets. Anyway
        # we can't use it. --SB.
        try:
            p = DatasetParameter.objects.get(parameterset__dataset=dataset, parameterset__schema=schema,
                                        name__name=IngestOptions.PARAM_ENTRY_ID)
        except DatasetParameter.DoesNotExist:
            
            mgr = ParameterSetManager(parentObject=dataset, schema=schema.namespace)
            mgr.new_param(IngestOptions.PARAM_ENTRY_ID, entryId)
        try:
            p = DatasetParameter.objects.get(parameterset__dataset=dataset, parameterset__schema=schema,
                                        name__name=IngestOptions.PARAM_UPDATED)

            i=iso8601.parse_date(updated)
            l=get_local_time_naive(i)
            p.datetime_value = l
            p.save()
        except DatasetParameter.DoesNotExist:            
            mgr = ParameterSetManager(parentObject=dataset, schema=schema.namespace)
                       
            t = get_local_time_naive(iso8601.parse_date(updated))
            logging.getLogger(__name__).debug("Setting update parameter with datetime %s" % t)  
            mgr.new_param(IngestOptions.PARAM_UPDATED, t)
コード例 #7
0
    def test_list_extensions_json(self):
        app = compute.APIRouter(init_only=('extensions',))
        request = webob.Request.blank("/fake/extensions")
        response = request.get_response(app)
        self.assertEqual(200, response.status_int)

        # Make sure we have all the extensions, extra extensions being OK.
        data = jsonutils.loads(response.body)
        names = [str(x['name']) for x in data['extensions']
                 if str(x['name']) in self.ext_list]
        names.sort()
        self.assertEqual(names, self.ext_list)

        # Ensure all the timestamps are valid according to iso8601
        for ext in data['extensions']:
            iso8601.parse_date(ext['updated'])

        # Make sure that at least Fox in Sox is correct.
        (fox_ext, ) = [
            x for x in data['extensions'] if x['alias'] == 'FOXNSOX']
        self.assertEqual(
            {'namespace': 'http://www.fox.in.socks/api/ext/pie/v1.0',
                'name': 'Fox In Socks',
                'updated': '2011-01-22T13:25:27-06:00',
                'description': 'The Fox In Socks Extension.',
                'alias': 'FOXNSOX',
                'links': []}, fox_ext)

        for ext in data['extensions']:
            url = '/fake/extensions/%s' % ext['alias']
            request = webob.Request.blank(url)
            response = request.get_response(app)
            output = jsonutils.loads(response.body)
            self.assertEqual(output['extension']['alias'], ext['alias'])
コード例 #8
0
ファイル: osf.py プロジェクト: CenterForOpenScience/osf-sync
 def __init__(self, request_session, data, *, parent=None):
     super().__init__(request_session, data)
     self.parent = parent
     if hasattr(self, 'date_modified') and self.date_modified:
         self.date_modified = iso8601.parse_date(self.date_modified)
     if hasattr(self, 'last_touched') and self.last_touched:
         self.last_touched = iso8601.parse_date(self.last_touched)
 def __init__(self, d):                                                                                                        
   self.created_at=iso8601.parse_date(d.get('created_at'))                                                                     
   self.updated_at=iso8601.parse_date(d.get('updated_at'))                                                                     
   self.login=d.get('owner').get('login')                                                                                      
   self.id=d.get('id')                                                                                                         
   self.description=d.get('description').encode('utf-8').title()                                                                               
   self.filename=d.get('files').keys()[0].encode('utf-8').title()                                                                              
コード例 #10
0
ファイル: tests.py プロジェクト: adlnet/LR-Lite
 def test_get_list_until_from(self):
     params = {"from": "2012-11-26T15:14:02.972834Z",
               "until": "2012-10-26T15:14:02.972834Z"}
     until_date = iso8601.parse_date(params["until"])
     from_date = iso8601.parse_date(params["from"])
     request = self._prepare_request(params)
     self.assertRaises(HTTPBadRequest, retrieve_list, request)
コード例 #11
0
def get_trust_anchors_as_ds(zone, digests, verbose):
    """Get currently valid Trust Anchors as DS RRset"""

    now = time.time()
    valid_ds_rdata = []

    for keydigest in digests:

        keydigest_id = keydigest['@id']
        keytag = keydigest['KeyTag']

        if '@validFrom' in keydigest:
            valid_from = iso8601.parse_date(keydigest['@validFrom']).timestamp()
            if now < valid_from:
                if verbose:
                    emit_warning('TA {} ({}) not yet valid'.format(keytag, keydigest_id))
                continue

        if '@validUntil' in keydigest:
            valid_until = iso8601.parse_date(keydigest['@validUntil']).timestamp()
            if now > valid_until:
                if verbose:
                    emit_warning('TA {} ({}) expired'.format(keytag, keydigest_id))
                continue

        if verbose:
            emit_info('TA {} ({}) valid'.format(keytag, keydigest_id))
        valid_ds_rdata.append(ds_rdata_from_keydigest(keydigest))

    rrset = dns.rrset.from_rdata_list(dns.name.from_text(zone), 0,
                                      valid_ds_rdata)
    return rrset
コード例 #12
0
ファイル: views.py プロジェクト: sunxin3/thinkcloud2
    def get_data(self):
	charge_subscriptions_by_user = []
        try:
            charge_subscriptions = api.nova.charge_subscription_list(self.request)
            for charge_subscription in charge_subscriptions:
                if self.request.user.id == charge_subscription.user_id:
                    #non-admin user can not have the permission querying the keystone's user info
                    #charge_subscription.user_id = api.keystone.user_get(self.request, charge_subscription.user_id).name
                    charge_subscription.user_id = self.request.user.username
                    try:
                        #TODO: fixme need to test it into product envirment.
                        #TODO: fixme need to add resource_name get syncing 
                        charge_subscription.resource_uuid = api.nova.server_get(self.request, charge_subscription.resource_uuid).name
                    except:
                        pass

                    if charge_subscription.approver_id:
                        #non-admin user can not have the permission querying the keystone's user info
                        #charge_subscription.approver_id = api.keystone.user_get(self.request, charge_subscription.approver_id).name
                        charge_subscription.approver_id = "admin"
                    else:
                        charge_subscription.approver_id = 'N/A'

                    charge_subscription.applied_at = iso8601.parse_date(charge_subscription.applied_at).strftime("%Y-%m-%d %H:%M:%S")
		    if charge_subscription.approved_at:
                    	charge_subscription.approved_at = iso8601.parse_date(charge_subscription.approved_at).strftime("%Y-%m-%d %H:%M:%S")
	            if charge_subscription.expires_at:
                        charge_subscription.expires_at = iso8601.parse_date(charge_subscription.expires_at).strftime("%Y-%m-%d %H:%M:%S")

                    charge_subscriptions_by_user.append(charge_subscription)
        except:
            exceptions.handle(self.request,
                              _('Unable to retrieve charge subscriptions'))
        return charge_subscriptions_by_user
コード例 #13
0
ファイル: tests.py プロジェクト: adlnet/LR-Lite
 def test_get_list_from_until(self):
     params = {"until": "2012-11-26T15:14:02.972834Z",
               "from": "2012-10-26T15:14:02.972834Z"}
     until_date = iso8601.parse_date(params["until"])
     from_date = iso8601.parse_date(params["from"])
     self._list_test_generic(
         params, lambda date: date <= until_date and date >= from_date)
コード例 #14
0
ファイル: query.py プロジェクト: aiqiliu/csv2InfluxDB
def query():
	# aggregation every four minutes 	
	getCurrDBs()
	queryDb = raw_input("Enter the database that you're querying from: ")
	client = InfluxDBClient('localhost', 8086, 'root', 'root', queryDb)

	minTime, maxTime = getTimeRange(client)

	# collection of the mininum results in the 4mins windows
	results = []
	windowStart = minTime
	iterator = 1
	while iso8601.parse_date(windowStart) < iso8601.parse_date(maxTime):
		# upper bound for the 4mins window
		windwoEnd = iso8601.parse_date(windowStart) + datetime.timedelta(minutes=4)
		if windwoEnd > iso8601.parse_date(maxTime):
			windwoEnd = iso8601.parse_date(maxTime)
		windwoEnd = str(windwoEnd).replace(' ', 'T')
		print '==============' + ' Window ' + str(iterator) + ' ' + '=============='
		print "Parsing time window: " + windowStart + ' - ' + str(windwoEnd)
		countQuerymsg = "SELECT COUNT(TEMP) FROM /.*/ WHERE time >= " + "'" + windowStart + "'" + ' AND time <= ' + "'" + windwoEnd + "'" 
		count = client.query(countQuerymsg)

		# rule: query MIN only when time stamp in the 4mins window > 1
		# get the min based on the rule
		currMin = queryCases(client, windowStart, windwoEnd, count)
		results.append(currMin)
		windowStart = windwoEnd
		iterator += 1

	print results
	print "Num of time windows parsed: " + str(iterator)
コード例 #15
0
ファイル: horst.py プロジェクト: erdincay/HORST
def update_matchtimes_for_day(season, day):
    c.execute('''SELECT hometeam, guestteam, date, home_id, guest_id
                 FROM bundesliga
                 WHERE season=? AND day=?''', (season, day))
    db_data = c.fetchall()
    url = "http://openligadb-json.heroku.com/api/matchdata_by_group_league_saison?league_saison=%s&league_shortcut=bl1&group_order_id=%s" % (season-1, day)
    daydata = urllib2.urlopen(url).read()
    daydata = json.loads(daydata)['matchdata']
    for db_game in db_data:
        for game in daydata:
            db_home_id = int(db_game['home_id'])
            db_guest_id = int(db_game['guest_id'])
            if (db_home_id == int(game['id_team1']) and
                  db_guest_id == int(game['id_team2'])):
                db_date = db_game['date']
                datetm = game['match_date_time']
                changes = []
                if iso8601.parse_date(db_date) == iso8601.parse_date(datetm):
                    pass
                else:
                    newdate = str(iso8601.parse_date(datetm))
                    newdate = newdate[:newdate.find('+')]
                    c.execute('''UPDATE bundesliga
                     SET date=?
                     WHERE season=? AND day=? AND home_id=? AND guest_id=?''',
                              (newdate, season, day, db_home_id, db_guest_id))
                    changes.append((db_game['hometeam'], db_game['guestteam']))
                break
    conn.commit()
    print "Anstosszeiten fuer Spieltag %s, %s \
wurden geaendert:" %(day,season), changes
コード例 #16
0
ファイル: sync.py プロジェクト: rcorreia/jirasurvivor
def create_issue(jira_issue):
    "Creates a `survivor.models.Issue` from a `jira.resources.Issue`."
    issue = Issue(
        key=jira_issue.key,
        title=jira_issue.fields.description,
        state=jira_issue.fields.status.name.lower(),
        opened=iso8601.parse_date(jira_issue.fields.created),
        updated=iso8601.parse_date(jira_issue.fields.updated),
        url=jira_issue.self,
    )

    if jira_issue.fields.reporter is not None:
        issue.reporter = get_or_create_user(jira_issue.fields.reporter)
    else:
        issue.reporter = get_or_create_user(jira_issue.fields.assignee)

    if jira_issue.fields.resolutiondate:
        issue.closed = iso8601.parse_date(jira_issue.fields.resolutiondate)
        resolution_type = jira_issue.fields.resolution.name
        issue.finished_or_fixed = resolution_type == "Finished" or resolution_type == "Fixed"

    if jira_issue.fields.assignee:
        issue.assignee = get_or_create_user(jira_issue.fields.assignee)

    # TODO comments, labels

    return issue.save()
コード例 #17
0
ファイル: test_carbonara.py プロジェクト: luo-zn/gnocchi
 def test_fetch_basic(self):
     ts = carbonara.AggregatedTimeSerie.from_data(
         timestamps=[datetime64(2014, 1, 1, 12, 0, 0),
                     datetime64(2014, 1, 1, 12, 0, 4),
                     datetime64(2014, 1, 1, 12, 0, 9)],
         values=[3, 5, 6],
         aggregation=carbonara.Aggregation(
             "mean", numpy.timedelta64(1, 's'), None))
     self.assertEqual(
         [(datetime64(2014, 1, 1, 12), 3),
          (datetime64(2014, 1, 1, 12, 0, 4), 5),
          (datetime64(2014, 1, 1, 12, 0, 9), 6)],
         list(ts.fetch()))
     self.assertEqual(
         [(datetime64(2014, 1, 1, 12, 0, 4), 5),
          (datetime64(2014, 1, 1, 12, 0, 9), 6)],
         list(ts.fetch(
             from_timestamp=datetime64(2014, 1, 1, 12, 0, 4))))
     self.assertEqual(
         [(datetime64(2014, 1, 1, 12, 0, 4), 5),
          (datetime64(2014, 1, 1, 12, 0, 9), 6)],
         list(ts.fetch(
             from_timestamp=numpy.datetime64(iso8601.parse_date(
                 "2014-01-01 12:00:04")))))
     self.assertEqual(
         [(datetime64(2014, 1, 1, 12, 0, 4), 5),
          (datetime64(2014, 1, 1, 12, 0, 9), 6)],
         list(ts.fetch(
             from_timestamp=numpy.datetime64(iso8601.parse_date(
                 "2014-01-01 13:00:04+01:00")))))
コード例 #18
0
def test_parse_invalid_date2():
    try:
        iso8601.parse_date("23")
    except iso8601.ParseError:
        pass
    else:
        assert 1 == 2
コード例 #19
0
def test_query2_function_invalid_argument_count():
    qname = "asd"
    starttime = iso8601.parse_date("1970-01-01")
    endtime = iso8601.parse_date("1970-01-02")
    example_query = "RETURN=nop(nop())"
    with pytest.raises(QueryInterpretException):
        result = query(qname, example_query, starttime, endtime, None)
コード例 #20
0
 def test_set_auctionPeriod_today(self):
     now = datetime.now(TZ)
     response = self.api.patch_json(self.app.app.registry.api_url + 'tenders/' + self.tender_id, {
         'data': {
             "enquiryPeriod": {
                 "endDate": now.isoformat()
             },
             'tenderPeriod': {
                 'startDate': now.isoformat(),
                 'endDate': (now + timedelta(days=7 - now.weekday())).replace(hour=1).isoformat()
             }
         }
     })
     response = self.app.get('/resync/' + self.tender_id)
     self.assertEqual(response.status, '200 OK')
     self.assertNotEqual(response.json, None)
     response = self.api.get(self.app.app.registry.api_url + 'tenders/' + self.tender_id)
     tender = response.json['data']
     self.assertEqual(tender['status'], 'active.tendering')
     response = self.app.get('/resync/' + self.tender_id)
     self.assertEqual(response.status, '200 OK')
     self.assertNotEqual(response.json, None)
     response = self.api.get(self.app.app.registry.api_url + 'tenders/' + self.tender_id)
     tender = response.json['data']
     self.assertEqual(tender['status'], 'active.tendering')
     if self.initial_lots:
         self.assertIn('auctionPeriod', tender['lots'][0])
         self.assertEqual(parse_date(tender['lots'][0]['auctionPeriod']['startDate'], TZ).weekday(), 0)
     else:
         self.assertIn('auctionPeriod', tender)
         self.assertEqual(parse_date(tender['auctionPeriod']['startDate'], TZ).weekday(), 0)
コード例 #21
0
ファイル: views.py プロジェクト: sunxin3/thinkcloud2
    def get_data(self):
        charge_subscriptions_to_approve = []
        try:
            charge_subscriptions = api.nova.charge_subscription_list(self.request)
            for charge_subscription in charge_subscriptions:
                if charge_subscription.status == 'apply':
                    charge_subscription.user_id = api.keystone.user_get(self.request, charge_subscription.user_id).name
                    try:
                        #TODO: fixme need to test it into product envirment.
                        charge_subscription.resource_uuid = api.nova.server_get(self.request, charge_subscription.resource_uuid).name
                    except:
                        pass

                    if charge_subscription.approver_id:
                        charge_subscription.approver_id = api.keystone.user_get(self.request, charge_subscription.approver_id).name
                    else:
                        charge_subscription.approver_id = 'N/A'

                    charge_subscription.applied_at = iso8601.parse_date(charge_subscription.applied_at).strftime("%Y-%m-%d %H:%M:%S")
		    if charge_subscription.approved_at:
                        charge_subscription.approved_at = iso8601.parse_date(charge_subscription.approved_at).strftime("%Y-%m-%d %H:%M:%S")
		    if charge_subscription.expires_at:
                        charge_subscription.expires_at = iso8601.parse_date(charge_subscription.expires_at).strftime("%Y-%m-%d %H:%M:%S")

                    charge_subscriptions_to_approve.append(charge_subscription)
        except:
            exceptions.handle(self.request,
                              _('Unable to retrieve charge subscriptions'))
        return charge_subscriptions_to_approve
コード例 #22
0
ファイル: views.py プロジェクト: adlnet/LR-Lite
def _parse_retrieve_params(req):
    params = {"limit": _PAGE_SIZE, "stale": "update_after"}
    include_docs = req.GET.get(_INCLUDE_DOCS, "false")
    try:
        include_docs = json.loads(include_docs)
    except Exception as ex:
        raise HTTPBadRequest("Invalid JSON for include_docs")
    params[_INCLUDE_DOCS] = include_docs
    try:
        time = iso8601.parse_date(req.GET.get(_FROM, datetime.min.isoformat()))
        params[_START_KEY] = calendar.timegm(time.utctimetuple())
    except Exception as ex:
        raise HTTPBadRequest("Invalid from time, must be ISO 8601 format")
    try:
        time = iso8601.parse_date(
            req.GET.get(_UNTIL, datetime.utcnow().isoformat()))
        params[_END_KEY] = calendar.timegm(time.utctimetuple())
    except Exception as ex:
        raise HTTPBadRequest("Invalid until time, must be ISO 8601 format")
    if params[_END_KEY] < params[_START_KEY]:
        raise HTTPBadRequest("From date cannot come after until date")
    if _PAGE in req.GET:
        try:
            page = int(req.GET.get(_PAGE))
            params['skip'] = page * _PAGE_SIZE
        except:
            raise HTTPBadRequest("Page must be a valid integer")
    return params
コード例 #23
0
ファイル: test_export.py プロジェクト: Tanych/gh-sfm-ui
    def test_seedset_export(self, mock_rabbit_worker_class):
        mock_rabbit_worker = MagicMock(spec=RabbitWorker)
        mock_rabbit_worker_class.side_effect = [mock_rabbit_worker]

        export = Export.objects.create(user=self.user,
                                       export_type="test_type",
                                       export_format="json",
                                       dedupe=True,
                                       item_date_start=datetime.datetime.now(get_localzone()),
                                       item_date_end=datetime.datetime.now(get_localzone()),
                                       harvest_date_start=datetime.datetime.now(get_localzone()),
                                       harvest_date_end=datetime.datetime.now(get_localzone()))
        export.seed_set = self.seedset
        export.save()

        request_export(export)

        # Export start message sent
        name, args, kwargs = mock_rabbit_worker.mock_calls[0]
        self.assertEqual("send_message", name)
        message = args[0]
        self.assertEqual(message["id"], export.export_id)
        self.assertEqual(message["path"], export.path)
        self.assertEqual(message["type"], export.export_type)
        self.assertEqual(message["format"], export.export_format)
        self.assertTrue(message["dedupe"])
        self.assertEqual(iso8601.parse_date(message["item_date_start"]), export.item_date_start)
        self.assertEqual(iso8601.parse_date(message["item_date_end"]), export.item_date_end)
        self.assertEqual(iso8601.parse_date(message["harvest_date_start"]), export.harvest_date_start)
        self.assertEqual(iso8601.parse_date(message["harvest_date_end"]), export.harvest_date_end)
        self.assertEqual(message["seedset"]["id"], export.seed_set.seedset_id)
        self.assertEqual("export.start.test_platform.test_type", args[1])
コード例 #24
0
    def test_filter_by_datetime(self, simple_populated_db):
        """
        Check default requests behaviour when passed datetime-params.
        """
        dbinf = simple_populated_db
        cutoff_time = fake.default_start_dt + datetime.timedelta(days=0.75)
        filters = {
            FilterKeys.authored_since: cutoff_time
        }

        qualifying_packets = [p for p in dbinf.insert_packets
                              if iso8601.parse_date(
                p.Who.Date.text) >= cutoff_time]

        # Grab a copy of the full response dict to check that datetimes
        # are acceptably formatted by requests:
        rv = wrappers.get_summary_response(endpoint=Endpoints.count,
                                           filters=filters,
                                           host=vr.default_host)
        rd = rv.json()
        dt_string = rd[ResultKeys.querystring][FilterKeys.authored_since][0]
        assert iso8601.parse_date(dt_string) == cutoff_time

        count = wrappers.get_summary_data(endpoint=Endpoints.count,
                                          filters=filters,
                                          host=vr.default_host)
        assert count != 0
        assert count < dbinf.n_inserts
        assert count == len(qualifying_packets)
コード例 #25
0
def validate_lastmod(key, data, errors, context):
    if data[key] == u'':
        return
    try:
        iso8601.parse_date(data[key])
    except iso8601.ParseError, ve:
        errors[key].append(_('Invalid date format, must be like 2012-12-31T13:12:11'))
コード例 #26
0
ファイル: disciple.py プロジェクト: bencochran/disciple
def index(request):
    cache = env('cache')
    repos = cache.get('disciple_repos')
    
    if not repos:
        try:
            gh = github.GitHub(config.username, config.token)
        except AttributeError, e:
            gh = github.GitHub()

        repos = []
        for repo in config.repos:
            info = gh.repos.show(repo.user, repo.repo)
            commits = gh.commits.forBranch(repo.user, repo.repo, repo.branch)
            if not isinstance(commits, list):
                commits = []
            for commit in commits:
                committed_date = iso8601.parse_date(commit.committed_date)
                commit.committed_date = committed_date.strftime('%a %b %d %H:%M:%S %z %Y')
                authored_date = iso8601.parse_date(commit.authored_date)
                commit.authored_date = authored_date.strftime('%a %b %d %H:%M:%S %z %Y')
                
            repos.append(AttrDict({'info':info, 'commits':commits}))
        
        # cache it for 10 minutes
        cache.set("disciple_repos", repos, time=600)
コード例 #27
0
def get_event_time_as_utc(voevent, index=0):
    """
    Extracts the event time from a given `WhereWhen.ObsDataLocation`.

    Returns a datetime (timezone-aware, UTC).

    Accesses a `WhereWhere.ObsDataLocation.ObservationLocation`
    element and returns the AstroCoords.Time.TimeInstant.ISOTime element,
    converted to a (UTC-timezoned) datetime.

    Note that a packet may include multiple 'ObsDataLocation' entries
    under the 'WhereWhen' section, for example giving locations of an object
    moving over time. Most packets will have only one, however, so the
    default is to access the first.

    This function now implements conversion from the
    TDB (Barycentric Dynamical Time) time scale in ISOTime format,
    since this is the format used by GAIA VOEvents.
    (See also http://docs.astropy.org/en/stable/time/#time-scale )

    Other timescales (i.e. TT, GPS) will presumably be formatted as a
    TimeOffset, parsing this format is not yet implemented.

    Args:
        voevent (:class:`voeventparse.voevent.Voevent`): Root node of the VOevent
            etree.
        index (int): Index of the ObsDataLocation to extract an ISOtime from.

    Returns:
        :class:`datetime.datetime`: Datetime representing the event-timestamp,
        converted to UTC (timezone aware).

    """
    try:
        od = voevent.WhereWhen.ObsDataLocation[index]
        ol = od.ObservationLocation
        coord_sys = ol.AstroCoords.attrib['coord_system_id']
        timesys_identifier = coord_sys.split('-')[0]

        if timesys_identifier == 'UTC':
            isotime_str = str(ol.AstroCoords.Time.TimeInstant.ISOTime)
            return iso8601.parse_date(isotime_str)
        elif (timesys_identifier == 'TDB'):
            isotime_str = str(ol.AstroCoords.Time.TimeInstant.ISOTime)
            isotime_dtime = iso8601.parse_date(isotime_str)
            tdb_time = astropy.time.Time(isotime_dtime, scale='tdb')
            return tdb_time.utc.to_datetime().replace(tzinfo=pytz.UTC)
        elif (timesys_identifier == 'TT' or timesys_identifier == 'GPS'):
            raise NotImplementedError(
                "Conversion from time-system '{}' to UTC not yet implemented"
            )
        else:
            raise ValueError(
                'Unrecognised time-system: {} (badly formatted VOEvent?)'.format(
                    timesys_identifier
                )
            )

    except AttributeError:
        return None
コード例 #28
0
ファイル: views.py プロジェクト: NLeSC/eEcology-Annotation-WS
def tracker(request):
    """Returns gps+accel data of tracker in a certain time range"""
    cur = request.db.cursor()
    tracker_id = int(request.matchdict['id'])
    start = parse_date(request.matchdict['start']).isoformat()
    end = parse_date(request.matchdict['end']).isoformat()
    return fetch_track(cur, tracker_id, start, end)
コード例 #29
0
ファイル: assist.py プロジェクト: elvisfernandes/restfulx
def update_model_from_params(model, params):
    for k, v in params.items():
        if k.endswith("_id"):
            if v == "":
                setattr(model, k.replace("_id", ""), None)
            else:
                setattr(model, k.replace("_id", ""), db.Key(v))
        elif hasattr(model, k):
            if isinstance(getattr(model, k), bool):
                if v == "false" or v == "":
                    setattr(model, k, False)
                else:
                    setattr(model, k, True)
            elif isinstance(getattr(model, k), float) and v != "":
                setattr(model, k, float(v))
            elif isinstance(getattr(model, k), int) and v != "":
                setattr(model, k, int(v))
            elif isinstance(getattr(model, k), datetime.datetime) and v != "":
                value = iso8601.parse_date(v)
                setattr(model, k, value)
            elif isinstance(getattr(model, k), datetime.date) and v != "":
                value = datetime.datetime.strptime(v, "%Y-%m-%d")
                setattr(model, k, datetime.date(value.year, value.month, value.day))
            elif isinstance(getattr(model, k), datetime.time) and v != "":
                value = iso8601.parse_date(v)
                setattr(model, k, datetime.time(value.hour, value.minute, value.second))
            else:
                setattr(model, k, v)

    model.put()
コード例 #30
0
ファイル: calendar_free.py プロジェクト: flora/pinchats
def datetime_in_calendar(t_interval, list_times):
    for interval in list_times:
        start = iso8601.parse_date(interval["start"]).replace(tzinfo=None)
        end = iso8601.parse_date(interval["end"]).replace(tzinfo=None)
        if t_interval[0] >= start and t_interval[1] <= end:
            return True
    return False
コード例 #31
0
 def create_datetime(iso_string):
   return iso8601.parse_date(iso_string)
コード例 #32
0
ファイル: views_old.py プロジェクト: ad0v0/enhydris
 def date_at_pos(pos, tz):
     s = linecache.getline(datafilename, pos)
     return iso8601.parse_date(s.split(",")[0], default_timezone=tz)
コード例 #33
0
ファイル: views_old.py プロジェクト: ad0v0/enhydris
def old_code_for_timeseries_data(request, *args, datafilename,
                                 **kwargs):  # NOQA
    def date_at_pos(pos, tz):
        s = linecache.getline(datafilename, pos)
        return iso8601.parse_date(s.split(",")[0], default_timezone=tz)

    def timedeltadivide(a, b):
        """Divide timedelta a by timedelta b."""
        a = a.days * 86400 + a.seconds
        b = b.days * 86400 + b.seconds
        return float(a) / float(b)

    # Return the nearest record number to the specified date
    # The second argument is 0 for exact match, -1 if no
    # exact match and the date is after the record found,
    # 1 if no exact match and the date is before the record.
    def find_line_at_date(adatetime, totlines, tz):
        if totlines < 2:
            return totlines
        i1, i2 = 1, totlines
        d1 = date_at_pos(i1, tz)
        d2 = date_at_pos(i2, tz)
        if adatetime <= d1:
            return (i1, 0 if d1 == adatetime else 1)
        if adatetime >= d2:
            return (i2, 0 if d2 == adatetime else -1)
        while True:
            i = i1 + int(
                round(
                    float(i2 - i1) * timedeltadivide(adatetime - d1, d2 - d1)))
            d = date_at_pos(i, tz)
            if d == adatetime:
                return (i, 0)
            if (i == i1) or (i == i2):
                return (i, -1 if i == i1 else 1)
            if d < adatetime:
                d1, i1 = d, i
            if d > adatetime:
                d2, i2 = d, i

    def add_to_stats(date, value):
        if not gstats["max"]:
            gstats["max"] = value
            gstats["min"] = value
            gstats["sum"] = 0
            gstats["vsum"] = [0.0, 0.0]
            gstats["count"] = 0
            gstats["vectors"] = [0] * 8
        if value >= gstats["max"]:
            gstats["max"] = value
            gstats["max_tstmp"] = date
        if value <= gstats["min"]:
            gstats["min"] = value
            gstats["min_tstmp"] = date
        if is_vector:
            value2 = value
            if value2 >= 360:
                value2 -= 360
            if value2 < 0:
                value2 += 360
            if value2 < 0 or value2 > 360:
                return
            # reversed order of x, y since atan2 definition is
            # math.atan2(y, x)
            gstats["vsum"][1] += math.cos(value2 * math.pi / 180)
            gstats["vsum"][0] += math.sin(value2 * math.pi / 180)
            value2 = value2 + 22.5 if value2 < 337.5 else value2 - 337.5
            gstats["vectors"][int(value2 / 45)] += 1
        gstats["sum"] += value
        gstats["last"] = value
        gstats["last_tstmp"] = date
        gstats["count"] += 1

    def inc_datetime(adate, unit, steps):
        if unit == "day":
            return adate + steps * timedelta(days=1)
        elif unit == "week":
            return adate + steps * timedelta(weeks=1)
        elif unit == "month":
            return add_months_to_datetime(adate, steps)
        elif unit == "year":
            return add_months_to_datetime(adate, 12 * steps)
        elif unit == "moment":
            return adate
        elif unit == "hour":
            return adate + steps * timedelta(minutes=60)
        elif unit == "twohour":
            return adate + steps * timedelta(minutes=120)
        else:
            raise Http404

    if (request.method != "GET") or ("object_id" not in request.GET):
        raise Http404

    response = HttpResponse(content_type="application/json")
    response.status_code = 200
    try:
        object_id = int(request.GET["object_id"])
        timeseries = Timeseries.objects.get(pk=object_id)
    except (ValueError, Timeseries.DoesNotExist):
        raise Http404
    tz = timeseries.time_zone.as_tzinfo
    chart_data = []
    if "start_pos" in request.GET and "end_pos" in request.GET:
        start_pos = int(request.GET["start_pos"])
        end_pos = int(request.GET["end_pos"])
    else:
        end_pos = bufcount(datafilename)
        tot_lines = end_pos
        if "last" in request.GET:
            if request.GET.get("date", False):
                datetimestr = request.GET["date"]
                datetimefmt = "%Y-%m-%d"
                if request.GET.get("time", False):
                    datetimestr = datetimestr + " " + request.GET["time"]
                    datetimefmt = datetimefmt + " %H:%M"
                try:
                    first_date = datetime.strptime(datetimestr, datetimefmt)
                    last_date = inc_datetime(first_date, request.GET["last"],
                                             1)
                    (end_pos,
                     is_exact) = find_line_at_date(last_date, tot_lines, tz)
                    if request.GET.get("exact_datetime",
                                       False) and (is_exact != 0):
                        raise Http404
                except ValueError:
                    raise Http404
            else:
                last_date = date_at_pos(end_pos, tz)
                first_date = inc_datetime(last_date, request.GET["last"], -1)
                # This is an almost bad workarround to exclude the first
                # record from sums, i.e. when we need the 144 10 minute
                # values from a day.
                if "start_offset" in request.GET:
                    offset = float(request.GET["start_offset"])
                    first_date += timedelta(minutes=offset)
            start_pos = find_line_at_date(first_date, tot_lines, tz)[0]
        else:
            start_pos = 1

    length = end_pos - start_pos + 1
    step = int(length / settings.ENHYDRIS_TS_GRAPH_BIG_STEP_DENOMINATOR) or 1
    fine_step = int(
        step / settings.ENHYDRIS_TS_GRAPH_FINE_STEP_DENOMINATOR) or 1
    if not step % fine_step == 0:
        step = fine_step * settings.ENHYDRIS_TS_GRAPH_FINE_STEP_DENOMINATOR
    pos = start_pos
    amax = ""
    prev_pos = -1
    tick_pos = -1
    is_vector = request.GET.get("vector", False)
    gstats = {
        "max": None,
        "min": None,
        "count": 0,
        "max_tstmp": None,
        "min_tstmp": None,
        "sum": None,
        "avg": None,
        "vsum": None,
        "vavg": None,
        "last": None,
        "last_tstmp": None,
        "vectors": None,
    }
    afloat = 0.01
    try:
        linecache.checkcache(datafilename)
        while pos < start_pos + length:
            s = linecache.getline(datafilename, pos)
            if s.isspace():
                pos += fine_step
                continue
            t = s.split(",")
            # Use the following exception handling to catch incoplete
            # reads from cache. Tries only one time, next time if
            # the error on the same line persists, it raises.
            try:
                k = iso8601.parse_date(t[0], default_timezone=tz)
                v = t[1]
            except Exception:
                if pos > prev_pos:
                    prev_pos = pos
                    linecache.checkcache(datafilename)
                    continue
                else:
                    raise
            if v != "":
                afloat = float(v)
                add_to_stats(k, afloat)
                if amax == "":
                    amax = afloat
                else:
                    amax = afloat if afloat > amax else amax
            if (pos - start_pos) % step == 0:
                tick_pos = pos
                if amax == "":
                    amax = "null"
                chart_data.append(
                    [calendar.timegm(k.timetuple()) * 1000,
                     str(amax), pos])
                amax = ""
            # Sometimes linecache tries to read a file being written (from
            # timeseries.write_file). So every 5000 lines refresh the
            # cache.
            if (pos - start_pos) % 5000 == 0:
                linecache.checkcache(datafilename)
            pos += fine_step
        if length > 0 and tick_pos < end_pos:
            if amax == "":
                amax = "null"
            chart_data[-1] = [
                calendar.timegm(k.timetuple()) * 1000,
                str(amax), end_pos
            ]
    finally:
        linecache.clearcache()
    if chart_data:
        if gstats["count"] > 0:
            gstats["avg"] = gstats["sum"] / gstats["count"]
            if is_vector:
                gstats["vavg"] = math.atan2(*gstats["vsum"]) * 180 / math.pi
                if gstats["vavg"] < 0:
                    gstats["vavg"] += 360
            for item in ("max_tstmp", "min_tstmp", "last_tstmp"):
                gstats[item] = calendar.timegm(gstats[item].timetuple()) * 1000
        response.content = json.dumps({"data": chart_data, "stats": gstats})
    else:
        response.content = json.dumps("")
    callback = request.GET.get("jsoncallback", None)
    if callback:
        response.content = "%s(%s)" % (callback, response.content)
    return response
コード例 #34
0
 def to_opt_datetime(k):
     x = s_or(k)
     return iso8601.parse_date(x) if x is not None else None
コード例 #35
0
 def to_t(x):
     return iso8601.parse_date(se(x))
コード例 #36
0
    'loan_status': {
        'mapValue': {
            'fields': {
                'message': {
                    'nullValue': None
                },
                'status': {
                    'stringValue': 'draft'
                }
            }
        }
    },
}

neat_data = {
    'approved_at': None,
    'bank_id': 'HDBCVNVX',
    'created_at': iso8601.parse_date('2019-12-15T03:42:30.691Z'),
    'loan_amount': 200000000,
    'isEmployee': False,
    'clients': ['web_s_housing_dev', 'web_s_housing_prod'],
    'loan_status': {
        'message': None,
        'status': 'draft'
    }
}


def test_convert():
    assert tidy_doc(orig_data) == neat_data
コード例 #37
0
 def day_of_year(iso_date_string):
   dt = iso8601.parse_date(iso_date_string)
   doy = dt.timetuple().tm_yday
   return doy
コード例 #38
0
ファイル: loadendpoints.py プロジェクト: nvdnkpr/lapidus
                                raise CommandError(
                                    "No endpoint url provided for {project} metric '{metric}' in ENDPOINTS_CONFIG file\n"
                                    .format(project=p.name,
                                            metric=endpt['name']))

                            period_key = endpt.get('period', DEFAULT_PERIOD)
                            if period_key not in PERIOD_DICT:
                                raise CommandError(
                                    "period '{0}' not a valid choice\n".format(
                                        period_key))
                            if verbosity >= 2:
                                self.stdout.write(
                                    "period: {0}\n".format(period_key))

                            if len(args):
                                from_date = iso8601.parse_date(args[0])
                            else:
                                from_date = datetime.datetime.combine(
                                    datetime.datetime.now() -
                                    datetime.timedelta(1),
                                    datetime.time(0, 0, 0))

                            if len(args) > 1:
                                to_date = iso8601.parse_date(args[1])
                                datedelta = to_date - from_date
                            else:
                                to_date = datedelta = None

                            if verbosity >= 2:
                                self.stdout.write(
                                    "from_date: {0}\n".format(from_date))
コード例 #39
0
 def test_invalid(self, secret, token, now, ttl_sec, backend, monkeypatch):
     f = Fernet(secret.encode("ascii"), backend=backend)
     current_time = calendar.timegm(iso8601.parse_date(now).utctimetuple())
     monkeypatch.setattr(time, "time", lambda: current_time)
     with pytest.raises(InvalidToken):
         f.decrypt(token.encode("ascii"), ttl=ttl_sec)
コード例 #40
0
    # Continue only if temp folder is big enough
    total_size = sum(
        os.path.getsize(os.path.join("temp", f)) for f in os.listdir("temp/")
        if os.path.isfile(os.path.join("temp", f)))
    if total_size < 500 * 1024 * 1024:
        printc(
            f"* Temp folder is too small ({total_size / 1024 / 1024} MB). Aborting.",
            utils.BColors.YELLOW)
        exit(0)

    # Filter only desired C14 buckets by name and sort them by creation date (most recent first)
    if config.is_c14:
        sync_archives = sorted([{
            **x, "unix_creation_date":
            int(time.mktime(
                iso8601.parse_date(x["creation_date"]).timetuple()))
        } for x in archives if x["name"].lower() == config["C14_SYNC_NAME"]],
                               key=lambda x: x["unix_creation_date"],
                               reverse=True)
        if not sync_archives:
            raise CriticalError("No C14 archives matching given name found!")

        # Make sure that ALL sync archives are 'active', if there's something 'busy' or 'deleting', we want
        # to wait for these pending operation to finish before extracting our archive again
        if not all(x["status"] == "active" for x in sync_archives):
            raise CriticalError(
                "Found sync archive(s), but not all of them are 'active'. "
                "There's probably an (un)archive operation in progress. Retry later"
            )

        # Delete old sync archives if needed
コード例 #41
0
ファイル: test_secondary.py プロジェクト: adityasaky/uptane
  def test_20_update_time(self):
    """
    Tests uptane.clients.secondary.Secondary::update_time()
    """

    # We'll just test one of the three client instances, since it shouldn't
    # make a difference.
    instance = secondary_instances[0]

    # Try a good time attestation first, signed by an expected timeserver key,
    # with an expected nonce (previously "received" from a Secondary)
    original_time_attestation = time_attestation = {
        'signed': {'nonces': [nonce], 'time': '2016-11-02T21:06:05Z'},
        'signatures': [{
          'method': 'ed25519',
          'sig': 'aabffcebaa57f1d6397bdc5647764261fd23516d2996446c3c40b3f30efb2a4a8d80cd2c21a453e78bf99dafb9d0f5e56c4e072db365499fa5f2f304afec100e',
          'keyid': '79c796d7e87389d1ebad04edce49faef611d139ee41ea9fb1931732afbfaac2e'}]}

    # Make sure that the Secondary thinks that it sent the nonce listed in the
    # sample data above.
    instance.last_nonce_sent = nonce

    if tuf.conf.METADATA_FORMAT == 'der':
      # Convert this time attestation to the expected ASN.1/DER format.
      time_attestation = asn1_codec.convert_signed_metadata_to_der(
          original_time_attestation, DATATYPE_TIME_ATTESTATION,
          private_key=TestSecondary.key_timeserver_pri, resign=True)

    # Check expected base conditions before updating time:
    # The only timeserver times registered should be two "now"s added during
    # initialization.  Because the clock override is a module variable in TUF,
    # its value (whether None or already set) depends on whether or not other
    # tests resulting in time attestation verification have occurred (e.g.
    # those for the Primary).
    self.assertEqual(2, len(instance.all_valid_timeserver_times))

    # If the time_attestation is not deemed valid, an exception will be raised.
    instance.update_time(time_attestation)

    # Check results.
    self.assertEqual(3, len(instance.all_valid_timeserver_times))
    # self.assertIsNotNone(tuf.conf.CLOCK_OVERRIDE)
    self.assertEqual(
        int(tuf.formats.datetime_to_unix_timestamp(iso8601.parse_date(
        '2016-11-02T21:06:05Z'))), tuf.conf.CLOCK_OVERRIDE)


    # Prepare to try again with a bad signature.
    # This test we will conduct differently depending on TUF's current format:
    if tuf.conf.METADATA_FORMAT == 'der':
      # Fail to re-sign the DER, so that the signature is over JSON instead,
      # which results in a bad signature.
      time_attestation__badsig = asn1_codec.convert_signed_metadata_to_der(
          original_time_attestation, DATATYPE_TIME_ATTESTATION, resign=False)

    else: # 'json' format
      # Rewrite the first 9 digits of the signature ('sig') to something
      # invalid.
      time_attestation__badsig = {
          'signed': {'nonces': [nonce], 'time': '2016-11-02T21:06:05Z'},
          'signatures': [{
            'method': 'ed25519',
            'sig': '987654321a57f1d6397bdc5647764261fd23516d2996446c3c40b3f30efb2a4a8d80cd2c21a453e78bf99dafb9d0f5e56c4e072db365499fa5f2f304afec100e',
            'keyid': '79c796d7e87389d1ebad04edce49faef611d139ee41ea9fb1931732afbfaac2e'}]}

    # Now actually perform the bad signature test.
    with self.assertRaises(tuf.BadSignatureError):
      instance.update_time(time_attestation__badsig)

    # Check results.  The bad attestation should change none of these.
    self.assertEqual(3, len(instance.all_valid_timeserver_times))
    # self.assertIsNotNone(tuf.conf.CLOCK_OVERRIDE)
    self.assertEqual(
        int(tuf.formats.datetime_to_unix_timestamp(iso8601.parse_date(
        '2016-11-02T21:06:05Z'))), tuf.conf.CLOCK_OVERRIDE)


    self.assertNotEqual(500, nonce, msg='Programming error: bad and good '
        'test nonces are equal.')

    time_attestation__wrongnonce = {
        'signed': {'nonces': [500], 'time': '2016-11-02T21:15:00Z'},
        'signatures': [{
          'method': 'ed25519',
          'sig': '4d01df35ca829fd7ead1408c250950c444db8ac51fa929a7f0288578fbf81016f0e81ed35789689481aee6b7af28ab311306397ef38572732854fb6cf2072604',
          'keyid': '79c796d7e87389d1ebad04edce49faef611d139ee41ea9fb1931732afbfaac2e'}]}

    if tuf.conf.METADATA_FORMAT == 'der':
      # Convert this time attestation to the expected ASN.1/DER format.
      time_attestation__wrongnonce = asn1_codec.convert_signed_metadata_to_der(
          time_attestation__wrongnonce, DATATYPE_TIME_ATTESTATION,
          private_key=TestSecondary.key_timeserver_pri, resign=True)

    with self.assertRaises(uptane.BadTimeAttestation):
      instance.update_time(time_attestation__wrongnonce)
コード例 #42
0
def iso_to_datetime_tuple(iso):
    """
    Converts an iso time string to a datetime tuple
    """
    return iso8601.parse_date(iso)
コード例 #43
0
    def __init__(self, message):
        result = DEVICE_STATUS_RE.match(message.topic)
        if result:
            self.payload = json.loads(message.payload.decode("utf-8"))
            self.deviceType = result.group(1)
            self.deviceId = result.group(2)
            self.device = self.deviceType + ":" + self.deviceId
            '''
			Properties from the "Connect" status are common in "Disconnect" status too
			{
			u'ClientAddr': u'195.212.29.68', 
			u'Protocol': u'mqtt-tcp', 
			u'ClientID': u'd:bcaxk:psutil:001', 
			u'User': u'use-token-auth', 
			u'Time': u'2014-07-07T06:37:56.494-04:00', 
			u'Action': u'Connect', 
			u'ConnectTime': u'2014-07-07T06:37:56.493-04:00', 
			u'Port': 1883
			}
			'''

            self.clientAddr = self.payload['ClientAddr'] if (
                'ClientAddr' in self.payload) else None
            self.protocol = self.payload['Protocol'] if (
                'Protocol' in self.payload) else None
            self.clientId = self.payload['ClientID'] if (
                'ClientID' in self.payload) else None
            self.user = self.payload['User'] if ('User'
                                                 in self.payload) else None
            self.time = iso8601.parse_date(
                self.payload['Time']) if ('Time' in self.payload) else None
            self.action = self.payload['Action'] if ('Action'
                                                     in self.payload) else None
            self.connectTime = iso8601.parse_date(
                self.payload['ConnectTime']) if ('ConnectTime'
                                                 in self.payload) else None
            self.port = self.payload['Port'] if ('Port'
                                                 in self.payload) else None
            '''
			Additional "Disconnect" status properties
			{
			u'WriteMsg': 0, 
			u'ReadMsg': 872, 
			u'Reason': u'The connection has completed normally.', 
			u'ReadBytes': 136507, 
			u'WriteBytes': 32, 
			}
			'''
            self.writeMsg = self.payload['WriteMsg'] if (
                'WriteMsg' in self.payload) else None
            self.readMsg = self.payload['ReadMsg'] if (
                'ReadMsg' in self.payload) else None
            self.reason = self.payload['Reason'] if ('Reason'
                                                     in self.payload) else None
            self.readBytes = self.payload['ReadBytes'] if (
                'ReadBytes' in self.payload) else None
            self.writeBytes = self.payload['WriteBytes'] if (
                'WriteBytes' in self.payload) else None

        else:
            raise ibmiotf.InvalidEventException(
                "Received device status on invalid topic: %s" %
                (message.topic))
コード例 #44
0
 def parse_iso_str(self, iso_str):
     """
     Parses an ISO 8601 datetime string and returns a localized datetime
     object.
     """
     return iso8601.parse_date(iso_str).astimezone(self.tz)
コード例 #45
0
 def on_content(self, content):
     # convert 2010-08-16 18:39:58 +0400
     # to      2010-08-16 18:39:58+04:00
     string = self.date_re.sub(r"+\1:\2", content.strip())
     self.value = iso8601.parse_date(string)
コード例 #46
0
def process_repository_hook(hook_data, via="webhook", fetched_at=None, commit=True,
                            requestor_id=None, repo_id=None):
    hook_id = hook_data.get("id")
    if not hook_id:
        raise MissingData("no hook ID")

    if not repo_id:
        url = hook_data.get("url")
        if not url:
            raise MissingData("no hook url")

        # parse repo info from url
        path = URLObject(url).path
        assert path.segments[0] == "repos"
        repo_owner = path.segments[1]
        repo_name = path.segments[2]

        # fetch repo from database
        repo_query = (Repository.query
            .filter(Repository.owner_login == repo_owner)
            .filter(Repository.name == repo_name)
        )
        try:
            repo = repo_query.one()
        except NoResultFound:
            msg = "Repo {owner}/{repo} not loaded in webhookdb".format(
                owner=repo_owner, repo=repo_name,
            )
            raise NotFound(msg, {
                "type": "repo_hook",
                "owner": repo_owner,
                "repo": repo_name,
            })
        except MultipleResultsFound:
            msg = "Repo {owner}/{repo} found multiple times!".format(
                owner=repo_owner, repo=repo_name,
            )
            raise DatabaseError(msg, {
                "type": "repo_hook",
                "owner": repo_owner,
                "repo": repo_name,
            })
        repo_id = repo.id

    # fetch the object from the database,
    # or create it if it doesn't exist in the DB
    hook = RepositoryHook.query.get(hook_id)
    if not hook:
        hook = RepositoryHook(id=hook_id, repo_id=repo_id)

    # should we update the object?
    fetched_at = fetched_at or datetime.now()
    if hook.last_replicated_at > fetched_at:
        raise StaleData()

    # update the object
    fields = (
        "name", "config", "events", "active", "last_response",
    )
    for field in fields:
        if field in hook_data:
            setattr(hook, field, hook_data[field])
    dt_fields = ("created_at", "updated_at")
    for field in dt_fields:
        if hook_data.get(field):
            dt = parse_date(hook_data[field]).replace(tzinfo=None)
            setattr(hook, field, dt)

    # `url` is special -- it's the value in the `config` object,
    # NOT the top-level `url` property
    hook.url = hook_data.get("config", {}).get("url")

    # update replication timestamp
    replicated_dt_field = "last_replicated_via_{}_at".format(via)
    if hasattr(hook, replicated_dt_field):
        setattr(hook, replicated_dt_field, fetched_at)

    # add to DB session, so that it will be committed
    db.session.add(hook)

    if commit:
        db.session.commit()

    return hook
コード例 #47
0
def get_data(data_source, start, end):
    start = start.replace(tzinfo=None)
    end = end.replace(tzinfo=None)
    results = None
    if data_source.transform_function_language == "INTERNAL":
        return get_internal_data(data_source, start, end)
    # Check cache
    print("CHECK CACHE")
    data_ranges = DataRange.query.filter(
        DataRange.data_source == data_source,
        DataRange.start <= start,
        DataRange.end >= end
    )
    if data_ranges.count() == 1:
        print("------ RETURNING CACHE")
        data = Data.query.filter(
            Data.data_source == data_source,
            Data.data_range == data_ranges.one(),
            Data.timestamp >= start,
            Data.timestamp <= end
        ).all()
        data_dump = datas_schema.dump(data)
        return data_dump.data
    else:
        # No full cache hit, but check for a partial cache hit
        # from start to somewhere in the middle
        # and filling in the gap to the end will only take one fetch
        data_ranges = DataRange.query.filter(DataRange.data_source == data_source,
                                             DataRange.end > start,
                                             DataRange.end <= end,
                                             DataRange.start <= start)
        if data_ranges.count() == 1:
            print("FILLING IN END GAP")
            data_range = data_ranges.one()
            cached_data = Data.query.filter(
                Data.data_source == data_source,
                Data.data_range == data_range,
                Data.timestamp >= start,
                Data.timestamp <= end).all()
            cached_data_dump = datas_schema.dump(cached_data)
            new_data_dump = get_data(data_source, data_range.end, end)
            # new data won't hit the cache
            # new data will overlap cached_data range and auto join
            print("FILLING IN END GAP RESULTS:")
            print(cached_data_dump.data)
            print(type(cached_data_dump.data))
            print(new_data_dump)
            print(type(new_data_dump))
            results = list(cached_data_dump.data) + list(new_data_dump)
            return results

    if results is None:
        # Fetch dependencies
        print("FETCHING DEPENDENCIES:")
        dependent_data = {}
        for dependency in data_source.dependencies:
            print("FETCHING - " + str(dependency.name))
            dependent_data[dependency.name] = get_data(dependency, start, end)

        results = compute(data_source.transform_function, dependent_data, start, end)

    # Validate results
    print(data_source.transform_function)
    print("RESULTS:")
    print(results)
    if type(results) is not list:
        print(type(results))
        raise Exception("results is not a list!")
    for r in results:
        if type(r) is not dict:
            print(r)
            print(type(r))  # write now it's a data model!
            raise Exception('results element is not a dict!')
        r_timestamp_date = iso8601.parse_date(r["timestamp"])
        if type(r["value"]) not in [str, int, float]:
            print(r["value"])
            print(type(r["value"]))
            raise Exception("result element's value field is not a str, int, or float")

    print("CACHING RESULTS")
    cache_results(data_source, start, end, results)

    print("RETURNING")
    data = Data.query.filter(
        Data.data_source == data_source,
        Data.timestamp >= start,
        Data.timestamp <= end
    ).all()
    data_dump = datas_schema.dump(data)
    return data_dump.data
コード例 #48
0
ファイル: twitter_util.py プロジェクト: luzi82/v11bot
            ret['tweet_list'].append({
                'id': tweet['id'],
                'created_at': time64,
                'created_at_str': time_str,
                'retweet_count': retweet_count,
                'is_retweet': is_retweet,
                'is_reply': is_reply,
                'is_accept': is_accept,
            })

            if is_accept:
                ret['retweet_count'] += retweet_count

            max_id = tweet['id'] - 1 if max_id == None else min(
                max_id, tweet['id'] - 1)

        if not good:
            break

    return ret


if __name__ == '__main__':
    from luzi82.v11bot import _conf
    import iso8601
    start_time = '2016-06-15T00:00:00+09:00'
    start_time = iso8601.parse_date(start_time)
    print(
        json.dumps(get_retweet_report(_conf.auth, 'Venus11Vivid', start_time)))
#     print(parse_time('Tue Jun 28 10:16:41 +0000 2016'))
コード例 #49
0
def post_process_AuditMetadata(auditMetadata):
    auditMetadata_cp = auditMetadata.copy()
    auditMetadata_cp['date'] = iso8601.parse_date(auditMetadata_cp['date'])
    return auditMetadata_cp
コード例 #50
0
def convert_datetime_for_delivery(isodate):
    iso_dt = parse_date(isodate)
    date_string = iso_dt.strftime("%Y-%m-%d %H:%M")
    return date_string
コード例 #51
0
 def __parse_date(self, date):
     last_update = helpers.convertToISO8601UTC(date)
     last_update_date = iso8601.parse_date(date)
     return helpers.harvestTimeFormat(last_update)
コード例 #52
0
ファイル: xsd.py プロジェクト: Eveler/dmsic
 def pythonvalue(self, value):
     if value is None or value == 'nil':
         return None
     else:
         return iso8601.parse_date(value)
コード例 #53
0
def patch_tender_negotiation_contract(self):
    response = self.app.get('/tenders/{}/contracts'.format(self.tender_id))
    self.contract_id = response.json['data'][0]['id']

    response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
        self.tender_id, self.contract_id, self.tender_token),
        {"data": {"status": "active"}},
        status=403)
    self.assertEqual(response.status, '403 Forbidden')
    self.assertEqual(response.content_type, 'application/json')
    self.assertIn("Can't sign contract before stand-still period end (", response.json['errors'][0]["description"])

    response = self.app.get('/tenders/{}/awards'.format(self.tender_id))
    self.assertEqual(response.content_type, 'application/json')
    self.assertEqual(len(response.json['data']), 1)
    award = response.json['data'][0]
    start = parse_date(award['complaintPeriod']['startDate'])
    end = parse_date(award['complaintPeriod']['endDate'])
    delta = end - start
    self.assertEqual(delta.days, 0 if SANDBOX_MODE else self.stand_still_period_days)

    # at next steps we test to patch contract in 'complete' tender status
    tender = self.db.get(self.tender_id)
    for i in tender.get('awards', []):
        i['complaintPeriod']['endDate'] = i['complaintPeriod']['startDate']
    self.db.save(tender)

    response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
        self.tender_id, self.contract_id, self.tender_token), {"data": {"value": {"currency": "USD"}}}, status=403)
    self.assertEqual(response.status, '403 Forbidden')
    self.assertEqual(response.json['errors'][0]["description"], "Can\'t update currency for contract value")

    response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
        self.tender_id, self.contract_id, self.tender_token),
        {"data": {"value": {"valueAddedTaxIncluded": False}}},
        status=403)
    self.assertEqual(response.status, '403 Forbidden')
    self.assertEqual(response.json['errors'][0]["description"],
                     "Can\'t update valueAddedTaxIncluded for contract value")

    response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
        self.tender_id, self.contract_id, self.tender_token), {"data": {"value": {"amount": 501}}}, status=403)
    self.assertEqual(response.status, '403 Forbidden')
    self.assertEqual(response.json['errors'][0]["description"],
                     "Value amount should be less or equal to awarded amount (469.0)")

    response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
        self.tender_id, self.contract_id, self.tender_token), {"data": {"value": {"amount": 238}}})
    self.assertEqual(response.status, '200 OK')
    self.assertEqual(response.json['data']['value']['amount'], 238)

    response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
        self.tender_id, self.contract_id, self.tender_token), {"data": {"status": "active"}})
    self.assertEqual(response.status, '200 OK')
    self.assertEqual(response.content_type, 'application/json')
    self.assertEqual(response.json['data']["status"], "active")
    self.assertIn(u"dateSigned", response.json['data'])

    response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
        self.tender_id, self.contract_id, self.tender_token), {"data": {"status": "cancelled"}}, status=403)
    self.assertEqual(response.status, '403 Forbidden')
    self.assertEqual(response.content_type, 'application/json')
    self.assertEqual(response.json['errors'][0]["description"],
                     "Can't update contract in current (complete) tender status")

    response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
        self.tender_id, self.contract_id, self.tender_token), {"data": {"status": "pending"}}, status=403)
    self.assertEqual(response.status, '403 Forbidden')
    self.assertEqual(response.content_type, 'application/json')
    self.assertEqual(response.json['errors'][0]["description"],
                     "Can't update contract in current (complete) tender status")

    response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
        self.tender_id, self.contract_id, self.tender_token), {"data": {"status": "active"}}, status=403)
    self.assertEqual(response.status, '403 Forbidden')
    self.assertEqual(response.content_type, 'application/json')
    self.assertEqual(response.json['errors'][0]["description"],
                     "Can't update contract in current (complete) tender status")

    # at next steps we test to patch contract in 'cancelled' tender status
    response = self.app.post_json('/tenders?acc_token={}', {"data": self.initial_data})
    self.assertEqual(response.status, '201 Created')
    tender_id = response.json['data']['id']
    tender_token = response.json['access']['token']

    response = self.app.post_json('/tenders/{}/awards?acc_token={}'.format(tender_id, tender_token),
                                  {'data': {'suppliers': [test_organization], 'status': 'pending'}})
    award_id = response.json['data']['id']
    response = self.app.patch_json('/tenders/{}/awards/{}?acc_token={}'.format(tender_id, award_id, tender_token),
                                   {"data": {'qualified': True, "status": "active"}})

    response = self.app.get('/tenders/{}/contracts'.format(tender_id))
    contract_id = response.json['data'][0]['id']

    response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(tender_id, tender_token),
                                  {'data': {'reason': 'cancellation reason', 'status': 'active'}})
    self.assertEqual(response.status, '201 Created')

    response = self.app.get('/tenders/{}'.format(tender_id))
    self.assertEqual(response.status, '200 OK')
    self.assertEqual(response.json['data']['status'], 'cancelled')

    response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
        tender_id, contract_id, tender_token),
        {"data": {"awardID": "894917dc8b1244b6aab9ab0ad8c8f48a"}},
        status=422)
    self.assertEqual(response.status, '422 Unprocessable Entity')
    self.assertEqual(response.content_type, 'application/json')

    response = self.app.patch_json('/tenders/{}/contracts/{}?acc_token={}'.format(
        tender_id, contract_id, tender_token), {"data": {"status": "active"}}, status=403)
    self.assertEqual(response.status, '403 Forbidden')
    self.assertEqual(response.content_type, 'application/json')
    self.assertEqual(response.json['errors'][0]["description"],
                     "Can't update contract in current (cancelled) tender status")

    response = self.app.patch_json('/tenders/{}/contracts/some_id?acc_token={}'.format(
        self.tender_id, self.tender_token), {"data": {"status": "active"}}, status=404)
    self.assertEqual(response.status, '404 Not Found')
    self.assertEqual(response.content_type, 'application/json')
    self.assertEqual(response.json['status'], 'error')
    self.assertEqual(response.json['errors'], [
        {u'description': u'Not Found', u'location':
            u'url', u'name': u'contract_id'}
    ])

    response = self.app.patch_json('/tenders/some_id/contracts/some_id', {"data": {"status": "active"}}, status=404)
    self.assertEqual(response.status, '404 Not Found')
    self.assertEqual(response.content_type, 'application/json')
    self.assertEqual(response.json['status'], 'error')
    self.assertEqual(response.json['errors'], [
        {u'description': u'Not Found', u'location':
            u'url', u'name': u'tender_id'}
    ])

    response = self.app.get('/tenders/{}/contracts/{}'.format(self.tender_id, self.contract_id))
    self.assertEqual(response.status, '200 OK')
    self.assertEqual(response.content_type, 'application/json')
    self.assertEqual(response.json['data']["status"], "active")
コード例 #54
0
ファイル: ReMM.py プロジェクト: retrodans/ReMM
                part="snippet,status",
                body=dict(snippet=dict(
                    title=CLIENT_DATA['playlists'][i]['title'],
                    description=CLIENT_DATA['playlists'][i]['description']),
                          status=dict(privacyStatus=CLIENT_DATA['playlists'][i]
                                      ['privacyStatus']))).execute()
            PID = playlists_insert_response["id"]

        # LOOP playlists in config
        for sp in CLIENT_DATA['playlists'][i]['playlists']:
            # GET items in playlist
            playlist_query = youtube.playlistItems().list(
                part="snippet", playlistId=sp, maxResults=5).execute()
            # LOOP videos in YouTube playlist
            for pq in playlist_query['items']:
                publishedAtTimestamp = iso8601.parse_date(
                    pq['snippet']['publishedAt']).strftime('%s')
                type(publishedAtTimestamp)
                publishedAtTimestamp = int(publishedAtTimestamp)
                type(publishedAtTimestamp)

                # Add video to playlist if new since cron
                if publishedAtTimestamp > LAST_CRON_TIME and pq['snippet'][
                        'title'] != "Deleted video":
                    logging.info("#2 %s is a new video (%i > %i)",
                                 pq['snippet']['title'], publishedAtTimestamp,
                                 LAST_CRON_TIME)
                    playlist_query = youtube.playlistItems().insert(
                        part="snippet",
                        body={
                            "snippet": {
                                "playlistId": PID,
コード例 #55
0
 def test_query_usage_report(self):
     timestamp_from = '2012-03-24T16:44:21+00:00'
     timestamp_to = '2012-03-24T16:46:21+00:00'
     datetime_from = iso8601.parse_date(timestamp_from)
     datetime_to = iso8601.parse_date(timestamp_to)
     project_subscriptions = [
         {
             'id': self.subscription_id,
             'resource_uuid': self.resource_uuid,
             'resource_name': self.resource_name,
             'created_at': datetime_from,
             'expires_at': datetime_to,
             'product': {
                 'region': {
                     'name': self.region_name,
                     },
                 'item': {
                     'name': self.item_name,
                     },
                 'item_type': {
                     'name': self.item_type_name,
                     },
                 'order_unit': 'hours',
                 'order_size': 1,
                 'price': self.product_price,
                 'currency': 'CNY',
                 }
             },
         {
             'id': 60,
             'resource_uuid': 'a-fake-uuid-1',
             'resource_name': 'a_fake_name_1',
             'created_at': datetime_from,
             'expires_at': datetime_to,
             'product': {
                 'region': {
                     'name': 'default',
                     },
                 'item': {
                     'name': 'instance',
                     },
                 'item_type': {
                     'name': 'm1.large',
                     },
                 'order_unit': 'months',
                 'order_size': 1,
                 'price': 2100.00,
                 'currency': 'CNY',
                 }
             },
         {
             'id': 61,
             'resource_uuid': 'a-fake-uuid-2',
             'resource_name': '10.211.23.45',
             'created_at': datetime_from,
             'expires_at': datetime_to,
             'product': {
                 'region': {
                     'name': 'default',
                     },
                 'item': {
                     'name': 'floating_ip',
                     },
                 'item_type': {
                     'name': 'default',
                     },
                 'order_unit': 'days',
                 'order_size': 1,
                 'price': 1.10,
                 'currency': 'CNY',
                 }
             },
         {
             'id': 62,
             'resource_uuid': 'a-fake-uuid-3',
             'resource_name': '170.1.223.5',
             'created_at': datetime_from,
             'expires_at': datetime_to,
             'product': {
                 'region': {
                     'name': 'default',
                     },
                 'item': {
                     'name': 'floating_ip',
                     },
                 'item_type': {
                     'name': 'default',
                     },
                 'order_unit': 'days',
                 'order_size': 1,
                 'price': 1.10,
                 'currency': 'CNY',
                 }
             },
         {
             'id': 63,
             'resource_uuid': 'a-fake-uuid-4',
             'resource_name': 'a_fake_name_4',
             'created_at': datetime_from,
             'expires_at': datetime_to,
             'product': {
                 'region': {
                     'name': 'default',
                     },
                 'item': {
                     'name': 'load_balancer',
                     },
                 'item_type': {
                     'name': 'default',
                     },
                 'order_unit': 'days',
                 'order_size': 1,
                 'price': 2.70,
                 'currency': 'CNY',
                 }
             },
         {
             'id': 64,
             'resource_uuid': 'a-fake-uuid-5',
             'resource_name': 'a_fake_name_5',
             'created_at': datetime_from,
             'expires_at': datetime_to,
             'product': {
                 'region': {
                     'name': 'default',
                     },
                 'item': {
                     'name': 'load_balancer',
                     },
                 'item_type': {
                     'name': 'default',
                     },
                 'order_unit': 'days',
                 'order_size': 1,
                 'price': 2.70,
                 'currency': 'CNY',
                 }
             },
         {
             'id': 65,
             'resource_uuid': self.resource_uuid,
             'resource_name': '192.168.0.2',
             'created_at': datetime_from,
             'expires_at': datetime_to,
             'product': {
                 'region': {
                     'name': self.region_name,
                     },
                 'item': {
                     'name': 'network',
                     },
                 'item_type': {
                     'name': 'default',
                     },
                 'order_unit': 'KBytes',
                 'order_size': 1,
                 'price': 0.70,
                 'currency': 'CNY',
                 }
             },
         {
             'id': 66,
             'resource_uuid': 'a-fake-uuid-1',
             'resource_name': '192.168.0.3',
             'created_at': datetime_from,
             'expires_at': datetime_to,
             'product': {
                 'region': {
                     'name': 'default',
                     },
                 'item': {
                     'name': 'network',
                     },
                 'item_type': {
                     'name': 'default',
                     },
                 'order_unit': 'KBytes',
                 'order_size': 1,
                 'price': 0.70,
                 'currency': 'CNY',
                 }
             },
         ]
     purchases1 = [
         {
             'quantity': 6,
             'line_total': 14.40,
             },
         {
             'quantity': 8,
             'line_total': 19.20,
             },
         {
             'quantity': 2,
             'line_total': 4.80,
             },
         ]
     purchases2 = [
         {
             'quantity': 1,
             'line_total': 2100.00,
             },
         ]
     purchases3 = [
         {
             'quantity': 6,
             'line_total': 6.60,
             },
         {
             'quantity': 8,
             'line_total': 8.80,
             },
         {
             'quantity': 5,
             'line_total': 5.50,
             },
         ]
     purchases4 = [
         {
             'quantity': 6,
             'line_total': 6.60,
             },
         {
             'quantity': 3,
             'line_total': 3.30,
             },
         {
             'quantity': 2,
             'line_total': 2.20,
             },
         ]
     purchases5 = [
         {
             'quantity': 6,
             'line_total': 16.20,
             },
         {
             'quantity': 3,
             'line_total': 8.10,
             },
         {
             'quantity': 4,
             'line_total': 10.80,
             },
         ]
     purchases6 = [
         {
             'quantity': 6,
             'line_total': 16.20,
             },
         {
             'quantity': 8,
             'line_total': 21.60,
             },
         {
             'quantity': 13,
             'line_total': 35.10,
             },
         ]
     purchases7 = [
         {
             'quantity': 1000,
             'line_total': 700.00,
             },
         {
             'quantity': 800,
             'line_total': 560.00,
             },
         {
             'quantity': 52,
             'line_total': 36.40,
             },
         ]
     purchases8 = [
         {
             'quantity': 9000,
             'line_total': 6300.00,
             },
         {
             'quantity': 800,
             'line_total': 560.00,
             },
         {
             'quantity': 53,
             'line_total': 37.10,
             },
         ]
     usage_report = {
         'default': {
             'load_balancer': [
                 ('a-fake-uuid-4', 'a_fake_name_4', 'default', 'days',
                  2.70, 'CNY', 13, 35.10, timestamp_from, timestamp_to),
                 ('a-fake-uuid-5', 'a_fake_name_5', 'default', 'days',
                  2.70, 'CNY', 27, 72.90, timestamp_from, timestamp_to),
                 ],
             'instance': [
                 (self.resource_uuid, self.resource_name,
                  self.item_type_name, 'hours',
                  self.product_price, 'CNY', 16, 38.40,
                  timestamp_from, timestamp_to),
                 ('a-fake-uuid-1', 'a_fake_name_1', 'm1.large', 'months',
                  2100.00, 'CNY', 1, 2100.00, timestamp_from, timestamp_to),
                 ],
             'floating_ip': [
                 ('a-fake-uuid-2', '10.211.23.45', 'default', 'days',
                  1.10, 'CNY', 19, 20.90, timestamp_from, timestamp_to),
                 ('a-fake-uuid-3', '170.1.223.5', 'default', 'days',
                  1.10, 'CNY', 11, 12.10, timestamp_from, timestamp_to),
                 ],
             'network': [
                 (self.resource_uuid, '192.168.0.2', 'default', 'KBytes',
                  0.70, 'CNY', 1852, 1296.40, timestamp_from, timestamp_to),
                 ('a-fake-uuid-1', '192.168.0.3', 'default', 'KBytes',
                  0.70, 'CNY', 9853, 6897.10, timestamp_from, timestamp_to),
                 ],
             },
         }
     self.mox.StubOutWithMock(db, 'subscription_get_all_by_project')
     _purchase_func = 'purchase_get_all_by_subscription_and_timeframe'
     self.mox.StubOutWithMock(db, _purchase_func)
     db.subscription_get_all_by_project(self.context, self.tenant_id).\
             AndReturn(project_subscriptions)
     db.purchase_get_all_by_subscription_and_timeframe(self.context,
             self.subscription_id, datetime_from, datetime_to).\
                     InAnyOrder().AndReturn(purchases1)
     db.purchase_get_all_by_subscription_and_timeframe(self.context,
             60, datetime_from, datetime_to).\
                     InAnyOrder().AndReturn(purchases2)
     db.purchase_get_all_by_subscription_and_timeframe(self.context,
             61, datetime_from, datetime_to).\
                     InAnyOrder().AndReturn(purchases3)
     db.purchase_get_all_by_subscription_and_timeframe(self.context,
             62, datetime_from, datetime_to).\
                     InAnyOrder().AndReturn(purchases4)
     db.purchase_get_all_by_subscription_and_timeframe(self.context,
             63, datetime_from, datetime_to).\
                     InAnyOrder().AndReturn(purchases5)
     db.purchase_get_all_by_subscription_and_timeframe(self.context,
             64, datetime_from, datetime_to).\
                     InAnyOrder().AndReturn(purchases6)
     db.purchase_get_all_by_subscription_and_timeframe(self.context,
             65, datetime_from, datetime_to).\
                     InAnyOrder().AndReturn(purchases7)
     db.purchase_get_all_by_subscription_and_timeframe(self.context,
             66, datetime_from, datetime_to).\
                     InAnyOrder().AndReturn(purchases8)
     self.mox.ReplayAll()
     result = api.query_usage_report(self.context,
                                     timestamp_from,
                                     timestamp_to)
     self.mox.VerifyAll()
     expect_keys = usage_report.keys().sort()
     actual_keys = result['data'].keys().sort()
     self.assertEqual(expect_keys, actual_keys)
     for k, v in result['data'].iteritems():
         new_report = dict()
         for region_name, usage_data in v.iteritems():
             new_data = new_report.setdefault(region_name, list())
             for a, b, c, d, e, f, g, h, i, j in usage_data:
                 datum = (a, b, c, d, e, f, g,
                          float(Decimal(h).quantize(Decimal('0.01'))), i, j)
                 new_data.append(datum)
         self.assertEqual(new_report, usage_report[k])
コード例 #56
0
def dateobj(s, timezone='UTC'):
    localtz = pytz.timezone(timezone)
    return iso8601.parse_date(s).astimezone(localtz)
コード例 #57
0
 def _unformat_date_value(self, v):
     if isinstance(v, string_types) and v.startswith('#'):
         v = iso8601.parse_date(v[1:])
     return v
コード例 #58
0
def localtime_from_iso_datestr(date_str: str) -> str:
    return datetime_to_local_timezone(iso8601.parse_date(date_str))
コード例 #59
0
 def date_is_valid(item):
     try:
         iso8601.parse_date(item)
     except iso8601.ParseError:
         return False
     return True
コード例 #60
0
import sys, os, re

from airflow import DAG
from airflow.operators.bash_operator import BashOperator

from datetime import datetime, timedelta
import iso8601

PROJECT_HOME = os.environ["PROJECT_HOME"]

default_args = {
    'owner': 'airflow',
    'depends_on_past': False,
    'start_date': iso8601.parse_date("2016-12-01"),
    'email': ['*****@*****.**'],
    'email_on_failure': True,
    'email_on_retry': True,
    'retries': 3,
    'retry_delay': timedelta(minutes=5),
}

training_dag = DAG('agile_data_science_batch_prediction_model_training',
                   default_args=default_args)

# We use the same two commands for all our PySpark tasks
pyspark_bash_command = """
spark-submit --master {{ params.master }} \
  {{ params.base_path }}/{{ params.filename }} \
  {{ params.base_path }}
"""
pyspark_date_bash_command = """