def test_full_hydrate(self):
        basic = BasicRepresentation()

        # Sanity check.
        self.assertEqual(basic.name.value, None)
        self.assertEqual(basic.view_count.value, None)
        self.assertEqual(basic.date_joined.value, None)

        basic = BasicRepresentation(
            data={"name": "Daniel", "view_count": 6, "date_joined": datetime.datetime(2010, 2, 15, 12, 0, 0)}
        )

        # Sanity check.
        self.assertEqual(basic.name.value, "Daniel")
        self.assertEqual(basic.view_count.value, 6)
        self.assertEqual(basic.date_joined.value, datetime.datetime(2010, 2, 15, 12, 0, 0))
        self.assertEqual(basic.instance, None)

        # Now load up the data.
        basic.full_hydrate()

        self.assertEqual(basic.name.value, "Daniel")
        self.assertEqual(basic.view_count.value, 6)
        self.assertEqual(basic.date_joined.value, datetime.datetime(2010, 2, 15, 12, 0, 0))
        self.assertEqual(basic.instance.name, "Daniel")
        self.assertEqual(basic.instance.view_count, 6)
        self.assertEqual(basic.instance.date_joined, datetime.datetime(2010, 2, 15, 12, 0, 0))
Example #2
0
    def test_all_transactions(self):
        p2 = Portfolio.objects.create(name='trend', description='trending', owner=self.p1.owner)
        _dt = [
            dt.datetime(2016, 1, 1, 11, 20),
            dt.datetime(2016, 1, 10, 14, 10),
            dt.datetime(2016, 1, 6, 9, 40),
            dt.datetime(2016, 1, 3, 13, 5),
        ]
        s1 = [
            transaction_factory('buy', self.p1, self.s1, _dt[0], price=1, shares=100),
            transaction_factory('sell', self.p1, self.s1, _dt[1], price=1, shares=50),
            transaction_factory('split', self.p1, self.s1, _dt[2], ratio=1.5),
            transaction_factory('dividend', self.p1, self.s1, _dt[3], dividend=10),
        ]
        s2 = [
            transaction_factory('buy', p2, self.s1, _dt[0], price=2, shares=200),
            transaction_factory('sell', p2, self.s1, _dt[1], price=2, shares=100),
            transaction_factory('split', p2, self.s1, _dt[2], ratio=2),
            transaction_factory('dividend', p2, self.s1, _dt[3], dividend=50),
        ]

        all_txn = self.p1.transactions.all()

        # test only p1 transactions are included
        for s in s1:
            self.assertIn(s, all_txn)
        for s in s2:
            self.assertNotIn(s, all_txn)

        # test the transaction array is properly sorted according to datetime
        self.assertEqual(all_txn[0].datetime, _dt[0])
        self.assertEqual(all_txn[1].datetime, _dt[3])
        self.assertEqual(all_txn[2].datetime, _dt[2])
        self.assertEqual(all_txn[3].datetime, _dt[1])
    def test_comprehensive_file_finder_two_files_one_file_not_comprehensive_old_encoder(self):

        company_id = insert_test_company(name='woot')
        start = datetime.datetime(2013, 05, 01)
        end = datetime.datetime(2013, 05, 03)

        data = {
            'is_comprehensive': False,
            'company_id': str(company_id),
            'type': 'retail_input_file',
            'as_of_date': datetime.datetime(2013, 05, 02)
        }

        non_comp_mds_file_id = ensure_id(insert_test_file(name='woot', data=data))

        data = {
            'is_comprehensive': True,
            'company_id': str(company_id),
            'type': 'retail_input_file',
            'as_of_date': datetime.datetime(2013, 05, 02, 12, 30)
        }

        comp_mds_file_id = ensure_id(insert_test_file(name='woot', data=data))

        sorted_file_list = find_comprehensive_retail_input_files_for_company_on_interval(company_id, [start, end])

        self.test_case.assertEqual(sorted_file_list[0]['_id'], comp_mds_file_id)
        self.test_case.assertEqual(sorted_file_list[0]['data']['is_comprehensive'], True)
        self.test_case.assertEqual(sorted_file_list[0]['data']['company_id'], str(company_id))
        self.test_case.assertEqual(sorted_file_list[0]['data']['type'], 'retail_input_file')
        self.test_case.assertEqual(sorted_file_list[0]['data']['as_of_date'], '2013-05-02T12:30:00')
Example #4
0
 def test_chainof(self):
     self._testValidation(V.ChainOf(V.AdaptTo(int),
                                    V.Condition(lambda x: x > 0),
                                    V.AdaptBy(datetime.utcfromtimestamp)),
                          adapted=[(1373475820, datetime(2013, 7, 10, 17, 3, 40)),
                                   ("1373475820", datetime(2013, 7, 10, 17, 3, 40))],
                          invalid=["nan", -1373475820])
Example #5
0
    def test_named_relative_truncated(self):
        date = datetime(2012, 3, 1, 10, 30)

        units = ["year", "month", "day", "hour"]

        path = self.cal.named_relative_path("lastweek", units, date)
        self.assertEqual([2012, 2, 20, 0], path)

        path = self.cal.named_relative_path("last3weeks", units, date)
        self.assertEqual([2012, 2, 6, 0], path)

        date = datetime(2012, 3, 12)

        path = self.cal.named_relative_path("lastmonth", units, date)
        self.assertEqual([2012, 2, 1, 0], path)

        path = self.cal.named_relative_path("last12months", units, date)
        self.assertEqual([2011, 3, 1, 0], path)

        path = self.cal.named_relative_path("nextmonth", units, date)
        self.assertEqual([2012, 4, 1, 0], path)

        path = self.cal.named_relative_path("next12months", units, date)
        self.assertEqual([2013, 3, 1,0 ], path)

        path = self.cal.named_relative_path("lastquarter", units, date)
        self.assertEqual([2011,10, 1, 0], path)

        path = self.cal.named_relative_path("lastyear", units, date)
        self.assertEqual([2011, 1, 1,0 ], path)
Example #6
0
 def test_integrity_checks_on_update(self):
     """
     Try to update a model instance introducing a FK constraint violation.
     If it fails it should fail with IntegrityError.
     """
     # Create an Article.
     models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
     # Retrieve it from the DB
     a1 = models.Article.objects.get(headline="Test article")
     a1.reporter_id = 30
     try:
         a1.save()
     except IntegrityError:
         pass
     else:
         self.skipTest("This backend does not support integrity checks.")
     # Now that we know this backend supports integrity checks we make sure
     # constraints are also enforced for proxy models. Refs #17519
     # Create another article
     r_proxy = models.ReporterProxy.objects.get(pk=self.r.pk)
     models.Article.objects.create(headline='Another article',
                                   pub_date=datetime.datetime(1988, 5, 15),
                                   reporter=self.r, reporter_proxy=r_proxy)
     # Retrieve the second article from the DB
     a2 = models.Article.objects.get(headline='Another article')
     a2.reporter_proxy_id = 30
     with self.assertRaises(IntegrityError):
         a2.save()
Example #7
0
def blog_month(request, blog_slug, year, month, page_no=1, blog_root=None):

    page_no = int(page_no)

    year = int(year)
    month = int(month)

    blog = get_channel_or_blog(blog_slug)
    blog_root = blog_root or blog.get_absolute_url()

    try:
        start_date = datetime(year, month, 1)
        year_end = year
        next_month = month + 1
        if next_month == 13:
            next_month = 1
            year_end += 1
        end_date = datetime(year_end, next_month, 1)
    except ValueError:
        raise Http404

    title = blog.title

    posts = blog.posts().filter(display_time__gte=start_date, display_time__lt=end_date).select_related()

    if not posts.count():
        raise Http404

    archives = tools.collate_archives(blog, blog_root)

    def get_page_url(page_no, num_pages):
        if page_no < 1 or page_no > num_pages:
            return ""
        if page_no == 1:
            return "%s%i/%i/" % (blog_root, year, month)
            # return reverse("blog_month", kwargs = dict(blog_slug=blog_slug, year=year, month=month, blog_root=blog_root))
        else:
            return "%s%i/%i/page/%i/" % (blog_root, year, month, page_no)

    td = get_blog_list_data(request, posts, get_page_url, page_no)

    sections = blog.description_data.get("sections", None)

    td.update(
        dict(
            blog_root=blog_root,
            blog=blog,
            sections=sections,
            title=title,
            page_title=title,
            tagline=blog.tagline,
            archives=archives,
            archive_month=month,
            archive_year=year,
        )
    )

    sections = extendedmarkup.process(sections, td)

    return render_to_response(blog.get_template_names("month.html"), td, context_instance=RequestContext(request))
Example #8
0
    def test_datetime_six_col(self):
        years = np.array([2007, 2008])
        months = np.array([1, 2])
        days = np.array([3, 4])
        hours = np.array([5, 6])
        minutes = np.array([7, 8])
        seconds = np.array([9, 0])
        expected = np.array([datetime(2007, 1, 3, 5, 7, 9),
                             datetime(2008, 2, 4, 6, 8, 0)])

        result = conv.parse_all_fields(years, months, days,
                                       hours, minutes, seconds)

        assert (result == expected).all()

        data = """\
year, month, day, hour, minute, second, a, b
2001, 01, 05, 10, 00, 0, 0.0, 10.
2001, 01, 5, 10, 0, 00, 1., 11.
"""
        datecols = {'ymdHMS': [0, 1, 2, 3, 4, 5]}
        df = self.read_csv(StringIO(data), sep=',', header=0,
                           parse_dates=datecols,
                           date_parser=conv.parse_all_fields)
        assert 'ymdHMS' in df
        assert df.ymdHMS.loc[0] == datetime(2001, 1, 5, 10, 0, 0)
Example #9
0
def setupObsInstance():
    # Observation interval (approx vernal equinox)
    beginTime = datetime(2011, 3, 20, 0, 0, 0)
    endTime = datetime(2011, 3, 21, 0, 0, 0)
    stepTime = timedelta(minutes=60)
    td = endTime-beginTime
    Times = []
    nrTimSamps = int(td.total_seconds()/stepTime.seconds)+1
    for ti in range(0, nrTimSamps):
        Times.append(beginTime+ti*stepTime)

    # Source direction
    #   CasA:
    # celSrcTheta_CasA = np.pi/2-1.026515
    # celSrcPhi_CasA = 6.123487
    #   Celestial origin:
    celSrcTheta = 0.0*math.pi/2
    celSrcPhi = 0.
    celSrcDir = celSrcPhi, (math.pi/2-celSrcTheta), 'J2000'

    # Station position and rotation
    #   Alt1 arbitrarily:
    me = measures()
    stnPos_meWGS = measures().position('wgs84','0deg','0deg','0m')
    stnPos_meITRF = me.measure(stnPos_meWGS,'ITRF')
    stnPos = stnPos_meITRF['m2']['value']*sph2crt_me(stnPos_meITRF)[:,np.newaxis]
    stnRot = antpat.reps.sphgridfun.pntsonsphere.rot3Dmat(0.,0.,1*math.pi/2)
    #   Alt2 set via official LOFAR geodetic data:
    # stnPos, stnRot, stnRelPos = getArrayBandParams('SE607', 'LBA')

    return Times, celSrcDir, stnPos, stnRot
Example #10
0
    def test_parse_dates_noconvert_thousands(self):
        # see gh-14066
        data = 'a\n04.15.2016'

        expected = DataFrame([datetime(2016, 4, 15)], columns=['a'])
        result = self.read_csv(StringIO(data), parse_dates=['a'],
                               thousands='.')
        tm.assert_frame_equal(result, expected)

        exp_index = DatetimeIndex(['2016-04-15'], name='a')
        expected = DataFrame(index=exp_index)
        result = self.read_csv(StringIO(data), index_col=0,
                               parse_dates=True, thousands='.')
        tm.assert_frame_equal(result, expected)

        data = 'a,b\n04.15.2016,09.16.2013'

        expected = DataFrame([[datetime(2016, 4, 15),
                               datetime(2013, 9, 16)]],
                             columns=['a', 'b'])
        result = self.read_csv(StringIO(data), parse_dates=['a', 'b'],
                               thousands='.')
        tm.assert_frame_equal(result, expected)

        expected = DataFrame([[datetime(2016, 4, 15),
                               datetime(2013, 9, 16)]],
                             columns=['a', 'b'])
        expected = expected.set_index(['a', 'b'])
        result = self.read_csv(StringIO(data), index_col=[0, 1],
                               parse_dates=True, thousands='.')
        tm.assert_frame_equal(result, expected)
Example #11
0
    def test_parse_date_time(self):
        dates = np.array(['2007/1/3', '2008/2/4'], dtype=object)
        times = np.array(['05:07:09', '06:08:00'], dtype=object)
        expected = np.array([datetime(2007, 1, 3, 5, 7, 9),
                             datetime(2008, 2, 4, 6, 8, 0)])

        result = conv.parse_date_time(dates, times)
        assert (result == expected).all()

        data = """\
date, time, a, b
2001-01-05, 10:00:00, 0.0, 10.
2001-01-05, 00:00:00, 1., 11.
"""
        datecols = {'date_time': [0, 1]}
        df = self.read_csv(StringIO(data), sep=',', header=0,
                           parse_dates=datecols,
                           date_parser=conv.parse_date_time)
        assert 'date_time' in df
        assert df.date_time.loc[0] == datetime(2001, 1, 5, 10, 0, 0)

        data = ("KORD,19990127, 19:00:00, 18:56:00, 0.8100\n"
                "KORD,19990127, 20:00:00, 19:56:00, 0.0100\n"
                "KORD,19990127, 21:00:00, 20:56:00, -0.5900\n"
                "KORD,19990127, 21:00:00, 21:18:00, -0.9900\n"
                "KORD,19990127, 22:00:00, 21:56:00, -0.5900\n"
                "KORD,19990127, 23:00:00, 22:56:00, -0.5900")

        date_spec = {'nominal': [1, 2], 'actual': [1, 3]}
        df = self.read_csv(StringIO(data), header=None, parse_dates=date_spec,
                           date_parser=conv.parse_date_time)
    def setUp(self):
        self.standardtime = self.TestTime(datetime(1950, 1, 1), 366, 24,
                                          'hours since 1900-01-01', 'standard')

        self.file = tempfile.NamedTemporaryFile(suffix='.nc', delete=False).name
        f = Dataset(self.file, 'w')
        f.createDimension('time', None)
        time = f.createVariable('time', float, ('time',))
        time.units = 'hours since 1900-01-01'
        time[:] = self.standardtime[:]
        f.createDimension('time2', 1)
        time2 = f.createVariable('time2', 'f8', ('time2',))
        time2.units = 'days since 1901-01-01'
        self.first_timestamp = datetime(2000, 1, 1)
        time2[0] = date2num(self.first_timestamp, time2.units)
        ntimes = 21
        f.createDimension("record", ntimes)
        time3 = f.createVariable("time3", numpy.int32, ("record", ))
        time3.units = "seconds since 1970-01-01 00:00:00"
        date = datetime(2037,1,1,0)
        dates = [date]
        for ndate in range(ntimes-1):
            date += (ndate+1)*timedelta(hours=1)
            dates.append(date)
        time3[:] = date2num(dates,time3.units)
        f.close()
Example #13
0
    def test_getWindowTimeBlackedOut(self):

        bHrs = self.w.getWindowTimeBlackedOut()
        self.assertEquals(0.0, bHrs)

        # make blackout that overlaps this window
        # start = datetime(2009, 6, 1)
        # dur   = 7 # days
        blackout = create_blackout(project = self.w.session.project,
                                   start   = datetime(2009, 6, 3),
                                   end     = datetime(2009, 6, 4),
                                   repeat  = 'Once')

        # and another that doesn't
        blackout = create_blackout(project = self.w.session.project,
                                   start   = datetime(2009, 6, 8, 12),
                                   end     = datetime(2009, 6, 9, 12),
                                   repeat  = 'Once')

        bHrs = self.w.getWindowTimeBlackedOut()
        self.assertEquals(24.0, bHrs)

        # now extend this window and make it non-contigious
        # and see how the new blackouts *dont* picked up.
        wr = WindowRange(window = self.w
                       , start_date = datetime(2009, 6, 10)
                       , duration = 2)
        wr.save()

        # the second window range misses the second blackout out
        # But it needs to be fresh from the DB
        w = Window.objects.get(id = self.w.id)

        bHrs = w.getWindowTimeBlackedOut()
        self.assertEquals(24.0, bHrs)
Example #14
0
    def test_timezones(self):
        my_birthday = datetime(1979, 7, 8, 22, 00)
        summertime = datetime(2005, 10, 30, 1, 00)
        wintertime = datetime(2005, 10, 30, 4, 00)
        timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456)

        # 3h30m to the west of UTC
        tz = get_fixed_timezone(-210)
        aware_dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=tz)

        if TZ_SUPPORT:
            self.assertEqual(dateformat.format(my_birthday, 'O'), '+0100')
            self.assertEqual(dateformat.format(my_birthday, 'r'), 'Sun, 8 Jul 1979 22:00:00 +0100')
            self.assertEqual(dateformat.format(my_birthday, 'T'), 'CET')
            self.assertEqual(dateformat.format(my_birthday, 'e'), '')
            self.assertEqual(dateformat.format(aware_dt, 'e'), '-0330')
            self.assertEqual(dateformat.format(my_birthday, 'U'), '300315600')
            self.assertEqual(dateformat.format(timestamp, 'u'), '123456')
            self.assertEqual(dateformat.format(my_birthday, 'Z'), '3600')
            self.assertEqual(dateformat.format(summertime, 'I'), '1')
            self.assertEqual(dateformat.format(summertime, 'O'), '+0200')
            self.assertEqual(dateformat.format(wintertime, 'I'), '0')
            self.assertEqual(dateformat.format(wintertime, 'O'), '+0100')

        # Ticket #16924 -- We don't need timezone support to test this
        self.assertEqual(dateformat.format(aware_dt, 'O'), '-0330')
    def test_blog_post_created_in_words(self):
        datetime_patched = self.add_patch('pyramid_blogr.models.blog_record.datetime')
        datetime_patched.datetime.utcnow.return_value = datetime.datetime(2016, 2, 10)

        blog_record = factories.BlogRecordFactory.create(created=datetime.datetime(2015, 10, 10))

        self.assertEquals(blog_record.created_in_words, '4 months')
Example #16
0
    def test_services_list(self):
        req = FakeRequest()
        res_dict = self.controller.index(req)

        response = {'services': [
                    {'binary': 'nova-scheduler',
                    'host': 'host1',
                    'zone': 'internal',
                    'status': 'disabled',
                    'state': 'up',
                    'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
                    {'binary': 'nova-compute',
                     'host': 'host1',
                     'zone': 'nova',
                     'status': 'disabled',
                     'state': 'up',
                     'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
                    {'binary': 'nova-scheduler',
                     'host': 'host2',
                     'zone': 'internal',
                     'status': 'enabled',
                     'state': 'down',
                     'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
                    {'binary': 'nova-compute',
                     'host': 'host2',
                     'zone': 'nova',
                     'status': 'disabled',
                     'state': 'down',
                     'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
        self.assertEqual(res_dict, response)
 def sanity_insert_or_merge_entity(self):
     ln = u'Lastname'
     fn = u'Firstname'
     resp = self.tc.insert_or_merge_entity(TABLE_NO_DELETE, 
                                           ln, 
                                           fn, 
                                           {'PartitionKey':'Lastname', 
                                            'RowKey':'Firstname', 
                                            'age': u'abc', #changed type 
                                            'sex':'male', #changed value
                                            'birthday':datetime(1991,10,04),
                                            'sign' : 'aquarius' #new
                                           })
     self.assertEquals(resp, None)
     
     resp = self.tc.get_entity(TABLE_NO_DELETE, 
                               ln, 
                               fn, 
                               '')
     self.assertEquals(resp.PartitionKey, ln)
     self.assertEquals(resp.RowKey, fn)
     self.assertEquals(resp.age, u'abc')
     self.assertEquals(resp.sex, u'male')
     self.assertEquals(resp.birthday, datetime(1991, 10, 4))
     self.assertEquals(resp.sign, u'aquarius')
Example #18
0
 def test_services_detail_with_delete_extension(self):
     self.ext_mgr.extensions['os-extended-services-delete'] = True
     req = FakeRequest()
     res_dict = self.controller.index(req)
     response = {'services': [
         {'binary': 'nova-scheduler',
          'host': 'host1',
          'id': 1,
          'zone': 'internal',
          'status': 'disabled',
          'state': 'up',
          'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
         {'binary': 'nova-compute',
          'host': 'host1',
          'id': 2,
          'zone': 'nova',
          'status': 'disabled',
          'state': 'up',
          'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
         {'binary': 'nova-scheduler',
          'host': 'host2',
          'id': 3,
          'zone': 'internal',
          'status': 'enabled',
          'state': 'down',
          'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
         {'binary': 'nova-compute',
          'host': 'host2',
          'id': 4,
          'zone': 'nova',
          'status': 'disabled',
          'state': 'down',
          'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
     self.assertEqual(res_dict, response)
Example #19
0
    def archives():
        """
        返回从第一篇文章开始到现在所经历的月份列表
        """
        # archives = cache.get("archives")
        archives = None
        if archives is None:
            begin_post = Article.query.order_by('created').first()

            now = datetime.datetime.now()

            begin_s = begin_post.created if begin_post else now
            end_s = now

            begin = begin_s
            end = end_s

            total = (end.year - begin.year) * 12 - begin.month + end.month
            archives = [begin]

            date = begin
            for i in range(total):
                if date.month < 12:
                    date = datetime.datetime(date.year, date.month + 1, 1)
                else:
                    date = datetime.datetime(date.year + 1, 1, 1)
                archives.append(date)
            archives.reverse()
            # cache.set("archives", archives)
        return archives
Example #20
0
    def testMetaDateInDatetimeFields(self):
        file_path = os.path.join(os.path.dirname(__file__), "data", "date_in_meta.xml")
        xml_data = open(file_path, "rb").read()
        xform_generic = FormProcessorInterface.post_xform(xml_data)

        self.assertEqual(datetime(2014, 7, 10), xform_generic.metadata.timeStart)
        self.assertEqual(datetime(2014, 7, 11), xform_generic.metadata.timeEnd)
Example #21
0
        def instance_create(context, inst):
            inst_type = flavors.get_flavor_by_flavor_id(3)
            image_uuid = "76fa36fc-c930-4bf3-8c8a-ea2a2420deb6"
            def_image_ref = "http://localhost/images/%s" % image_uuid
            self.instance_cache_num += 1
            instance = fake_instance.fake_db_instance(
                **{
                    "id": self.instance_cache_num,
                    "display_name": inst["display_name"] or "test",
                    "uuid": fakes.FAKE_UUID,
                    "instance_type": inst_type,
                    "access_ip_v4": "1.2.3.4",
                    "access_ip_v6": "fead::1234",
                    "image_ref": inst.get("image_ref", def_image_ref),
                    "user_id": "fake",
                    "project_id": "fake",
                    "reservation_id": inst["reservation_id"],
                    "created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
                    "updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
                    "progress": 0,
                    "fixed_ips": [],
                    "task_state": "",
                    "vm_state": "",
                    "root_device_name": inst.get("root_device_name", "vda"),
                }
            )

            return instance
Example #22
0
    def test_simple(self):
        b = self._build_value
        VALUES = (
            b('search', 'search', 'type="search"'),
            b('telephone', '123456789', 'type="tel"'),
            b('url', 'http://wtforms.simplecodes.com/', 'type="url"'),
            b('email', 'foo@bar.com', 'type="email"'),
            b('datetime', '2013-09-05 00:23:42', 'type="datetime"', datetime(2013, 9, 5, 0, 23, 42)),
            b('date', '2013-09-05', 'type="date"', date(2013, 9, 5)),
            b('dt_local', '2013-09-05 00:23:42', 'type="datetime-local"', datetime(2013, 9, 5, 0, 23, 42)),
            b('integer', '42', '<input id="integer" name="integer" step="1" type="number" value="42">', 42),
            b('decimal', '43.5', '<input id="decimal" name="decimal" step="any" type="number" value="43.5">', Decimal('43.5')),
            b('int_range', '4', '<input id="int_range" name="int_range" step="1" type="range" value="4">', 4),
            b('decimal_range', '58', '<input id="decimal_range" name="decimal_range" step="any" type="range" value="58">', 58),
        )
        formdata = DummyPostData()
        kw = {}
        for item in VALUES:
            formdata[item['key']] = item['form_input']
            kw[item['key']] = item['data']

        form = self.F(formdata)
        for item in VALUES:
            field = form[item['key']]
            render_value = field()
            if render_value != item['expected_html']:
                tmpl = 'Field {key} render mismatch: {render_value!r} != {expected_html!r}'
                raise AssertionError(tmpl.format(render_value=render_value, **item))
            if field.data != item['data']:
                tmpl = 'Field {key} data mismatch: {field.data!r} != {data!r}'
                raise AssertionError(tmpl.format(field=field, **item))
    def test_get(self):
        note = NoteRepresentation()
        note.get(pk=1)
        self.assertEqual(note.content.value, u"This is my very first post using my shiny new API. Pretty sweet, huh?")
        self.assertEqual(note.created.value, datetime.datetime(2010, 3, 30, 20, 5))
        self.assertEqual(note.is_active.value, True)
        self.assertEqual(note.slug.value, u"first-post")
        self.assertEqual(note.title.value, u"First Post!")
        self.assertEqual(note.updated.value, datetime.datetime(2010, 3, 30, 20, 5))

        custom = CustomNoteRepresentation()
        custom.get(pk=1)
        self.assertEqual(custom.content.value, u"This is my very first post using my shiny new API. Pretty sweet, huh?")
        self.assertEqual(custom.created.value, datetime.datetime(2010, 3, 30, 20, 5))
        self.assertEqual(custom.is_active.value, True)
        self.assertEqual(custom.author.value, u"johndoe")
        self.assertEqual(custom.title.value, u"First Post!")
        self.assertEqual(custom.constant.value, 20)

        related = RelatedNoteRepresentation(api_name="v1", resource_name="notes")
        related.get(pk=1)
        self.assertEqual(
            related.content.value, u"This is my very first post using my shiny new API. Pretty sweet, huh?"
        )
        self.assertEqual(related.created.value, datetime.datetime(2010, 3, 30, 20, 5))
        self.assertEqual(related.is_active.value, True)
        self.assertEqual(related.author.value, "/api/v1/users/1/")
        self.assertEqual(related.title.value, u"First Post!")
        self.assertEqual(related.subjects.value, ["/api/v1/subjects/1/", "/api/v1/subjects/2/"])
Example #24
0
 def in_session(self,session='day'):
     # session = day,night
     ctime = datetime.now()
     if session == 'day':
         return ctime > datetime(ctime.year,ctime.month,ctime.day, 9 ,0,0) and ctime < datetime(ctime.year,ctime.month,ctime.day, 15 ,0,0)
     else:
         return ctime > datetime(ctime.year, ctime.month, ctime.day, 21, 0, 0) or ctime < datetime(ctime.year,ctime.month,ctime.day, 2, 30,0)
Example #25
0
    def test_named_relative(self):
        date = datetime(2012, 3, 1)

        units = ["year", "month", "day"]
        path = self.cal.named_relative_path("tomorrow", units, date)
        self.assertEqual([2012, 3, 2], path)

        path = self.cal.named_relative_path("yesterday", units, date)
        self.assertEqual([2012, 2, 29], path)

        path = self.cal.named_relative_path("weekago", units, date)
        self.assertEqual([2012, 2, 23], path)

        path = self.cal.named_relative_path("3weeksago", units, date)
        self.assertEqual([2012, 2, 9], path)

        date = datetime(2012, 3, 12)

        path = self.cal.named_relative_path("monthago", units, date)
        self.assertEqual([2012, 2, 12], path)

        path = self.cal.named_relative_path("12monthsago", units, date)
        self.assertEqual([2011, 3, 12], path)

        path = self.cal.named_relative_path("monthforward", units, date)
        self.assertEqual([2012, 4, 12], path)

        path = self.cal.named_relative_path("12monthsforward", units, date)
        self.assertEqual([2013, 3, 12], path)
Example #26
0
    def upload_timetable_specified_sessions_only(self):
        start_date = datetime(2016, 3, 22, tzinfo=timezone.utc) # tues
        end_date = datetime(2016, 3, 23, tzinfo=timezone.utc) # wed
        self.assertEquals(Event.objects.all().count(), 0)

        # create some timetabled sessions for mondays, tuesdays and Wednesdays
        tues_sessions = mommy.make_recipe('booking.tue_session', _quantity=3)
        mommy.make_recipe('booking.wed_session', _quantity=3)

        session_ids = [
            session.id for session in Session.objects.all() if
            session in tues_sessions
            ]
        # choose tues-wed as date range, but specify the tues sessions only
        upload_timetable(start_date, end_date, session_ids)
        # check that there are now classes on the dates specified
        tue_classes = Event.objects.filter(
            date__gte=self._start_of_day(start_date),
            date__lte=self._end_of_day(start_date)
            )
        wed_classes = Event.objects.filter(
            date__gte=self._start_of_day(end_date),
            date__lte=self._end_of_day(end_date)
            )
        # total number of classes created is 3, as no wed classes created
        self.assertEquals(tue_classes.count(), 3)
        self.assertEquals(wed_classes.count(), 0)
        self.assertEquals(Event.objects.count(), 3)
    def test_select_competitive_companies(self):

        # create two industries
        industry_id1 = ensure_id(insert_test_industry())
        industry_id2 = ensure_id(insert_test_industry())

        # create three companies
        company_id1 = ensure_id(insert_test_company(workflow_status = "published"))
        company_id2 = ensure_id(insert_test_company(workflow_status = "published"))
        company_id3 = ensure_id(insert_test_company(workflow_status = "published"))

        # add primary industries to all three companies.  Company 3 gets a different industry.
        self.main_access.mds.call_add_link("company", company_id1, 'primary_industry_classification', 'industry', industry_id1, "primary_industry", "industry_classification", self.context)
        self.main_access.mds.call_add_link("company", company_id2, 'primary_industry_classification', 'industry', industry_id1, "primary_industry", "industry_classification", self.context)
        self.main_access.mds.call_add_link("company", company_id3, 'primary_industry_classification', 'industry', industry_id2, "primary_industry", "industry_classification", self.context)

        # make industries 1 and 2 compete with each other
        link_interval = [datetime.datetime(2012, 1, 1), datetime.datetime(2013, 2, 2)]
        link_data = {"home_to_away": {"weight": .7}, "away_to_home": {"weight": .7}}
        self.main_access.mds.call_add_link("industry", industry_id1, 'competitor', 'industry', industry_id2, "competitor", "industry_competition", self.context, link_interval = link_interval,
                                           link_data = link_data)

        # query the competitions of company
        competitive_companies = select_competitive_companies(company_id1)

        # sort both the expected and real array so that the order doesn't matter
        expected_competitive_companies = sorted([
            { "_id": str(company_id1), "interval": None, "competition_strength": 1 },
            { "_id": str(company_id2), "interval": None, "competition_strength": 1 },
            { "_id": str(company_id3), "interval": [datetime.datetime(2012, 1, 1), datetime.datetime(2013, 2, 2)], "competition_strength": .7 }
        ])
        competitive_companies = sorted(competitive_companies)

        # make sure the competitions are correct
        self.test_case.assertEqual(competitive_companies, expected_competitive_companies)
Example #28
0
    def test_upload_timetable(self):
        """
        create classes between given dates
        """
        start_date = datetime(2016, 3, 22, tzinfo=timezone.utc) # tues
        end_date = datetime(2016, 3, 23, tzinfo=timezone.utc) # wed
        self.assertEquals(Event.objects.all().count(), 0)

        # create some timetabled sessions for mondays, tuesdays and Wednesdays
        mommy.make_recipe('booking.mon_session', _quantity=3)
        mommy.make_recipe('booking.tue_session', _quantity=3)
        mommy.make_recipe('booking.wed_session', _quantity=3)

        session_ids = [session.id for session in Session.objects.all()]

        upload_timetable(start_date, end_date, session_ids)
        # check that there are now classes on the dates specified
        tue_classes = Event.objects.filter(
            date__gte=self._start_of_day(start_date),
            date__lte=self._end_of_day(start_date)
            )
        wed_classes = Event.objects.filter(
            date__gte=self._start_of_day(end_date),
            date__lte=self._end_of_day(end_date)
            )
        # total number of classes created is 6, as no monday classes created
        self.assertEquals(tue_classes.count(), 3)
        self.assertEquals(wed_classes.count(), 3)
        self.assertEquals(Event.objects.count(), 6)
Example #29
0
    def test_get_with_since(self):
        state_id = "old_state_test"
        testparamssince = {"stateId": state_id, "activityId": self.activityId, "agent": self.testagent}
        path = '%s?%s' % (self.url, urllib.urlencode(testparamssince))
        teststatesince = {"test":"get w/ since","obj":{"agent":"test"}}
        updated =  datetime.datetime(2012, 6, 12, 12, 00).replace(tzinfo=timezone.get_default_timezone())
        put1 = self.client.put(path, teststatesince, content_type=self.content_type, updated=updated.isoformat(), Authorization=self.auth, X_Experience_API_Version="1.0.0")

        self.assertEqual(put1.status_code, 204)
        self.assertEqual(put1.content, '')
        
        r = self.client.get(self.url, testparamssince, X_Experience_API_Version="1.0.0", Authorization=self.auth)
        self.assertEqual(r.status_code, 200)
        
        robj = ast.literal_eval(r.content)
        self.assertEqual(robj['test'], teststatesince['test'])
        self.assertEqual(robj['obj']['agent'], teststatesince['obj']['agent'])
        self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())

        since = datetime.datetime(2012, 7, 1, 12, 00).replace(tzinfo=utc)
        params2 = {"activityId": self.activityId, "agent": self.testagent, "since": since}
        r = self.client.get(self.url, params2, X_Experience_API_Version="1.0.0", Authorization=self.auth)
        self.assertEqual(r.status_code, 200)
        self.assertIn(self.stateId, r.content)
        self.assertIn(self.stateId2, r.content)
        self.assertNotIn(state_id, r.content)
        self.assertNotIn(self.stateId3, r.content)
        self.assertNotIn(self.stateId4, r.content)

        del_r = self.client.delete(self.url, testparamssince, Authorization=self.auth, X_Experience_API_Version="1.0.0")
Example #30
0
    def create_test_fixture(self):
        event_source = "AnEventSource"

        agency_one = "Tatooine"
        agency_two = 'Alderaan'

        origin_one = dict(
            origin_key="test",
            position=geoalchemy.WKTSpatialElement('POINT(-81.40 38.08)'),
            time=datetime(1950, 2, 19, 23, 14, 5),
            depth=1)

        origin_two = dict(
            origin_key="test",
            position=geoalchemy.WKTSpatialElement('POINT(-81.40 38.08)'),
            time=datetime(1987, 2, 6, 9, 14, 15),
            depth=1)

        measure_one = catalogue.MagnitudeMeasure(
            event_source=event_source,
            event_key='1st',
            agency=agency_one, scale='mL', value=5.0,
            **origin_one)
        self.session.add(measure_one)

        measure_two = catalogue.MagnitudeMeasure(
            event_source=event_source,
            event_key='2nd',
            agency=agency_two,
            scale='mb', value=6.0, **origin_two)
        self.session.add(measure_two)