def test_full_hydrate(self):
        basic = BasicRepresentation()

        # Sanity check.
        self.assertEqual(basic.name.value, None)
        self.assertEqual(basic.view_count.value, None)
        self.assertEqual(basic.date_joined.value, None)

        basic = BasicRepresentation(
            data={"name": "Daniel", "view_count": 6, "date_joined": datetime.datetime(2010, 2, 15, 12, 0, 0)}
        )

        # Sanity check.
        self.assertEqual(basic.name.value, "Daniel")
        self.assertEqual(basic.view_count.value, 6)
        self.assertEqual(basic.date_joined.value, datetime.datetime(2010, 2, 15, 12, 0, 0))
        self.assertEqual(basic.instance, None)

        # Now load up the data.
        basic.full_hydrate()

        self.assertEqual(basic.name.value, "Daniel")
        self.assertEqual(basic.view_count.value, 6)
        self.assertEqual(basic.date_joined.value, datetime.datetime(2010, 2, 15, 12, 0, 0))
        self.assertEqual(basic.instance.name, "Daniel")
        self.assertEqual(basic.instance.view_count, 6)
        self.assertEqual(basic.instance.date_joined, datetime.datetime(2010, 2, 15, 12, 0, 0))
示例#2
0
    def create_test_fixture(self):
        event_source = "AnEventSource"

        agency_one = "Tatooine"
        agency_two = 'Alderaan'

        origin_one = dict(
            origin_key="test",
            position=geoalchemy.WKTSpatialElement('POINT(-81.40 38.08)'),
            time=datetime(1950, 2, 19, 23, 14, 5),
            depth=1)

        origin_two = dict(
            origin_key="test",
            position=geoalchemy.WKTSpatialElement('POINT(-81.40 38.08)'),
            time=datetime(1987, 2, 6, 9, 14, 15),
            depth=1)

        measure_one = catalogue.MagnitudeMeasure(
            event_source=event_source,
            event_key='1st',
            agency=agency_one, scale='mL', value=5.0,
            **origin_one)
        self.session.add(measure_one)

        measure_two = catalogue.MagnitudeMeasure(
            event_source=event_source,
            event_key='2nd',
            agency=agency_two,
            scale='mb', value=6.0, **origin_two)
        self.session.add(measure_two)
示例#3
0
    def test_get_with_since(self):
        state_id = "old_state_test"
        testparamssince = {"stateId": state_id, "activityId": self.activityId, "agent": self.testagent}
        path = '%s?%s' % (self.url, urllib.urlencode(testparamssince))
        teststatesince = {"test":"get w/ since","obj":{"agent":"test"}}
        updated =  datetime.datetime(2012, 6, 12, 12, 00).replace(tzinfo=timezone.get_default_timezone())
        put1 = self.client.put(path, teststatesince, content_type=self.content_type, updated=updated.isoformat(), Authorization=self.auth, X_Experience_API_Version="1.0.0")

        self.assertEqual(put1.status_code, 204)
        self.assertEqual(put1.content, '')
        
        r = self.client.get(self.url, testparamssince, X_Experience_API_Version="1.0.0", Authorization=self.auth)
        self.assertEqual(r.status_code, 200)
        
        robj = ast.literal_eval(r.content)
        self.assertEqual(robj['test'], teststatesince['test'])
        self.assertEqual(robj['obj']['agent'], teststatesince['obj']['agent'])
        self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())

        since = datetime.datetime(2012, 7, 1, 12, 00).replace(tzinfo=utc)
        params2 = {"activityId": self.activityId, "agent": self.testagent, "since": since}
        r = self.client.get(self.url, params2, X_Experience_API_Version="1.0.0", Authorization=self.auth)
        self.assertEqual(r.status_code, 200)
        self.assertIn(self.stateId, r.content)
        self.assertIn(self.stateId2, r.content)
        self.assertNotIn(state_id, r.content)
        self.assertNotIn(self.stateId3, r.content)
        self.assertNotIn(self.stateId4, r.content)

        del_r = self.client.delete(self.url, testparamssince, Authorization=self.auth, X_Experience_API_Version="1.0.0")
    def test_select_competitive_companies(self):

        # create two industries
        industry_id1 = ensure_id(insert_test_industry())
        industry_id2 = ensure_id(insert_test_industry())

        # create three companies
        company_id1 = ensure_id(insert_test_company(workflow_status = "published"))
        company_id2 = ensure_id(insert_test_company(workflow_status = "published"))
        company_id3 = ensure_id(insert_test_company(workflow_status = "published"))

        # add primary industries to all three companies.  Company 3 gets a different industry.
        self.main_access.mds.call_add_link("company", company_id1, 'primary_industry_classification', 'industry', industry_id1, "primary_industry", "industry_classification", self.context)
        self.main_access.mds.call_add_link("company", company_id2, 'primary_industry_classification', 'industry', industry_id1, "primary_industry", "industry_classification", self.context)
        self.main_access.mds.call_add_link("company", company_id3, 'primary_industry_classification', 'industry', industry_id2, "primary_industry", "industry_classification", self.context)

        # make industries 1 and 2 compete with each other
        link_interval = [datetime.datetime(2012, 1, 1), datetime.datetime(2013, 2, 2)]
        link_data = {"home_to_away": {"weight": .7}, "away_to_home": {"weight": .7}}
        self.main_access.mds.call_add_link("industry", industry_id1, 'competitor', 'industry', industry_id2, "competitor", "industry_competition", self.context, link_interval = link_interval,
                                           link_data = link_data)

        # query the competitions of company
        competitive_companies = select_competitive_companies(company_id1)

        # sort both the expected and real array so that the order doesn't matter
        expected_competitive_companies = sorted([
            { "_id": str(company_id1), "interval": None, "competition_strength": 1 },
            { "_id": str(company_id2), "interval": None, "competition_strength": 1 },
            { "_id": str(company_id3), "interval": [datetime.datetime(2012, 1, 1), datetime.datetime(2013, 2, 2)], "competition_strength": .7 }
        ])
        competitive_companies = sorted(competitive_companies)

        # make sure the competitions are correct
        self.test_case.assertEqual(competitive_companies, expected_competitive_companies)
示例#5
0
    def test_all_transactions(self):
        p2 = Portfolio.objects.create(name='trend', description='trending', owner=self.p1.owner)
        _dt = [
            dt.datetime(2016, 1, 1, 11, 20),
            dt.datetime(2016, 1, 10, 14, 10),
            dt.datetime(2016, 1, 6, 9, 40),
            dt.datetime(2016, 1, 3, 13, 5),
        ]
        s1 = [
            transaction_factory('buy', self.p1, self.s1, _dt[0], price=1, shares=100),
            transaction_factory('sell', self.p1, self.s1, _dt[1], price=1, shares=50),
            transaction_factory('split', self.p1, self.s1, _dt[2], ratio=1.5),
            transaction_factory('dividend', self.p1, self.s1, _dt[3], dividend=10),
        ]
        s2 = [
            transaction_factory('buy', p2, self.s1, _dt[0], price=2, shares=200),
            transaction_factory('sell', p2, self.s1, _dt[1], price=2, shares=100),
            transaction_factory('split', p2, self.s1, _dt[2], ratio=2),
            transaction_factory('dividend', p2, self.s1, _dt[3], dividend=50),
        ]

        all_txn = self.p1.transactions.all()

        # test only p1 transactions are included
        for s in s1:
            self.assertIn(s, all_txn)
        for s in s2:
            self.assertNotIn(s, all_txn)

        # test the transaction array is properly sorted according to datetime
        self.assertEqual(all_txn[0].datetime, _dt[0])
        self.assertEqual(all_txn[1].datetime, _dt[3])
        self.assertEqual(all_txn[2].datetime, _dt[2])
        self.assertEqual(all_txn[3].datetime, _dt[1])
    def test_comprehensive_file_finder_two_files_one_file_not_comprehensive_old_encoder(self):

        company_id = insert_test_company(name='woot')
        start = datetime.datetime(2013, 05, 01)
        end = datetime.datetime(2013, 05, 03)

        data = {
            'is_comprehensive': False,
            'company_id': str(company_id),
            'type': 'retail_input_file',
            'as_of_date': datetime.datetime(2013, 05, 02)
        }

        non_comp_mds_file_id = ensure_id(insert_test_file(name='woot', data=data))

        data = {
            'is_comprehensive': True,
            'company_id': str(company_id),
            'type': 'retail_input_file',
            'as_of_date': datetime.datetime(2013, 05, 02, 12, 30)
        }

        comp_mds_file_id = ensure_id(insert_test_file(name='woot', data=data))

        sorted_file_list = find_comprehensive_retail_input_files_for_company_on_interval(company_id, [start, end])

        self.test_case.assertEqual(sorted_file_list[0]['_id'], comp_mds_file_id)
        self.test_case.assertEqual(sorted_file_list[0]['data']['is_comprehensive'], True)
        self.test_case.assertEqual(sorted_file_list[0]['data']['company_id'], str(company_id))
        self.test_case.assertEqual(sorted_file_list[0]['data']['type'], 'retail_input_file')
        self.test_case.assertEqual(sorted_file_list[0]['data']['as_of_date'], '2013-05-02T12:30:00')
示例#7
0
    def test_named_relative(self):
        date = datetime(2012, 3, 1)

        units = ["year", "month", "day"]
        path = self.cal.named_relative_path("tomorrow", units, date)
        self.assertEqual([2012, 3, 2], path)

        path = self.cal.named_relative_path("yesterday", units, date)
        self.assertEqual([2012, 2, 29], path)

        path = self.cal.named_relative_path("weekago", units, date)
        self.assertEqual([2012, 2, 23], path)

        path = self.cal.named_relative_path("3weeksago", units, date)
        self.assertEqual([2012, 2, 9], path)

        date = datetime(2012, 3, 12)

        path = self.cal.named_relative_path("monthago", units, date)
        self.assertEqual([2012, 2, 12], path)

        path = self.cal.named_relative_path("12monthsago", units, date)
        self.assertEqual([2011, 3, 12], path)

        path = self.cal.named_relative_path("monthforward", units, date)
        self.assertEqual([2012, 4, 12], path)

        path = self.cal.named_relative_path("12monthsforward", units, date)
        self.assertEqual([2013, 3, 12], path)
示例#8
0
 def test_chainof(self):
     self._testValidation(V.ChainOf(V.AdaptTo(int),
                                    V.Condition(lambda x: x > 0),
                                    V.AdaptBy(datetime.utcfromtimestamp)),
                          adapted=[(1373475820, datetime(2013, 7, 10, 17, 3, 40)),
                                   ("1373475820", datetime(2013, 7, 10, 17, 3, 40))],
                          invalid=["nan", -1373475820])
    def test_get(self):
        note = NoteRepresentation()
        note.get(pk=1)
        self.assertEqual(note.content.value, u"This is my very first post using my shiny new API. Pretty sweet, huh?")
        self.assertEqual(note.created.value, datetime.datetime(2010, 3, 30, 20, 5))
        self.assertEqual(note.is_active.value, True)
        self.assertEqual(note.slug.value, u"first-post")
        self.assertEqual(note.title.value, u"First Post!")
        self.assertEqual(note.updated.value, datetime.datetime(2010, 3, 30, 20, 5))

        custom = CustomNoteRepresentation()
        custom.get(pk=1)
        self.assertEqual(custom.content.value, u"This is my very first post using my shiny new API. Pretty sweet, huh?")
        self.assertEqual(custom.created.value, datetime.datetime(2010, 3, 30, 20, 5))
        self.assertEqual(custom.is_active.value, True)
        self.assertEqual(custom.author.value, u"johndoe")
        self.assertEqual(custom.title.value, u"First Post!")
        self.assertEqual(custom.constant.value, 20)

        related = RelatedNoteRepresentation(api_name="v1", resource_name="notes")
        related.get(pk=1)
        self.assertEqual(
            related.content.value, u"This is my very first post using my shiny new API. Pretty sweet, huh?"
        )
        self.assertEqual(related.created.value, datetime.datetime(2010, 3, 30, 20, 5))
        self.assertEqual(related.is_active.value, True)
        self.assertEqual(related.author.value, "/api/v1/users/1/")
        self.assertEqual(related.title.value, u"First Post!")
        self.assertEqual(related.subjects.value, ["/api/v1/subjects/1/", "/api/v1/subjects/2/"])
示例#10
0
    def test_named_relative_truncated(self):
        date = datetime(2012, 3, 1, 10, 30)

        units = ["year", "month", "day", "hour"]

        path = self.cal.named_relative_path("lastweek", units, date)
        self.assertEqual([2012, 2, 20, 0], path)

        path = self.cal.named_relative_path("last3weeks", units, date)
        self.assertEqual([2012, 2, 6, 0], path)

        date = datetime(2012, 3, 12)

        path = self.cal.named_relative_path("lastmonth", units, date)
        self.assertEqual([2012, 2, 1, 0], path)

        path = self.cal.named_relative_path("last12months", units, date)
        self.assertEqual([2011, 3, 1, 0], path)

        path = self.cal.named_relative_path("nextmonth", units, date)
        self.assertEqual([2012, 4, 1, 0], path)

        path = self.cal.named_relative_path("next12months", units, date)
        self.assertEqual([2013, 3, 1,0 ], path)

        path = self.cal.named_relative_path("lastquarter", units, date)
        self.assertEqual([2011,10, 1, 0], path)

        path = self.cal.named_relative_path("lastyear", units, date)
        self.assertEqual([2011, 1, 1,0 ], path)
示例#11
0
        def instance_create(context, inst):
            inst_type = flavors.get_flavor_by_flavor_id(3)
            image_uuid = "76fa36fc-c930-4bf3-8c8a-ea2a2420deb6"
            def_image_ref = "http://localhost/images/%s" % image_uuid
            self.instance_cache_num += 1
            instance = fake_instance.fake_db_instance(
                **{
                    "id": self.instance_cache_num,
                    "display_name": inst["display_name"] or "test",
                    "uuid": fakes.FAKE_UUID,
                    "instance_type": inst_type,
                    "access_ip_v4": "1.2.3.4",
                    "access_ip_v6": "fead::1234",
                    "image_ref": inst.get("image_ref", def_image_ref),
                    "user_id": "fake",
                    "project_id": "fake",
                    "reservation_id": inst["reservation_id"],
                    "created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
                    "updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
                    "progress": 0,
                    "fixed_ips": [],
                    "task_state": "",
                    "vm_state": "",
                    "root_device_name": inst.get("root_device_name", "vda"),
                }
            )

            return instance
 def sanity_insert_or_merge_entity(self):
     ln = u'Lastname'
     fn = u'Firstname'
     resp = self.tc.insert_or_merge_entity(TABLE_NO_DELETE, 
                                           ln, 
                                           fn, 
                                           {'PartitionKey':'Lastname', 
                                            'RowKey':'Firstname', 
                                            'age': u'abc', #changed type 
                                            'sex':'male', #changed value
                                            'birthday':datetime(1991,10,04),
                                            'sign' : 'aquarius' #new
                                           })
     self.assertEquals(resp, None)
     
     resp = self.tc.get_entity(TABLE_NO_DELETE, 
                               ln, 
                               fn, 
                               '')
     self.assertEquals(resp.PartitionKey, ln)
     self.assertEquals(resp.RowKey, fn)
     self.assertEquals(resp.age, u'abc')
     self.assertEquals(resp.sex, u'male')
     self.assertEquals(resp.birthday, datetime(1991, 10, 4))
     self.assertEquals(resp.sign, u'aquarius')
示例#13
0
    def archives():
        """
        返回从第一篇文章开始到现在所经历的月份列表
        """
        # archives = cache.get("archives")
        archives = None
        if archives is None:
            begin_post = Article.query.order_by('created').first()

            now = datetime.datetime.now()

            begin_s = begin_post.created if begin_post else now
            end_s = now

            begin = begin_s
            end = end_s

            total = (end.year - begin.year) * 12 - begin.month + end.month
            archives = [begin]

            date = begin
            for i in range(total):
                if date.month < 12:
                    date = datetime.datetime(date.year, date.month + 1, 1)
                else:
                    date = datetime.datetime(date.year + 1, 1, 1)
                archives.append(date)
            archives.reverse()
            # cache.set("archives", archives)
        return archives
示例#14
0
 def test_integrity_checks_on_update(self):
     """
     Try to update a model instance introducing a FK constraint violation.
     If it fails it should fail with IntegrityError.
     """
     # Create an Article.
     models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
     # Retrieve it from the DB
     a1 = models.Article.objects.get(headline="Test article")
     a1.reporter_id = 30
     try:
         a1.save()
     except IntegrityError:
         pass
     else:
         self.skipTest("This backend does not support integrity checks.")
     # Now that we know this backend supports integrity checks we make sure
     # constraints are also enforced for proxy models. Refs #17519
     # Create another article
     r_proxy = models.ReporterProxy.objects.get(pk=self.r.pk)
     models.Article.objects.create(headline='Another article',
                                   pub_date=datetime.datetime(1988, 5, 15),
                                   reporter=self.r, reporter_proxy=r_proxy)
     # Retrieve the second article from the DB
     a2 = models.Article.objects.get(headline='Another article')
     a2.reporter_proxy_id = 30
     with self.assertRaises(IntegrityError):
         a2.save()
示例#15
0
    def test_upload_timetable(self):
        """
        create classes between given dates
        """
        start_date = datetime(2016, 3, 22, tzinfo=timezone.utc) # tues
        end_date = datetime(2016, 3, 23, tzinfo=timezone.utc) # wed
        self.assertEquals(Event.objects.all().count(), 0)

        # create some timetabled sessions for mondays, tuesdays and Wednesdays
        mommy.make_recipe('booking.mon_session', _quantity=3)
        mommy.make_recipe('booking.tue_session', _quantity=3)
        mommy.make_recipe('booking.wed_session', _quantity=3)

        session_ids = [session.id for session in Session.objects.all()]

        upload_timetable(start_date, end_date, session_ids)
        # check that there are now classes on the dates specified
        tue_classes = Event.objects.filter(
            date__gte=self._start_of_day(start_date),
            date__lte=self._end_of_day(start_date)
            )
        wed_classes = Event.objects.filter(
            date__gte=self._start_of_day(end_date),
            date__lte=self._end_of_day(end_date)
            )
        # total number of classes created is 6, as no monday classes created
        self.assertEquals(tue_classes.count(), 3)
        self.assertEquals(wed_classes.count(), 3)
        self.assertEquals(Event.objects.count(), 6)
    def test_blog_post_created_in_words(self):
        datetime_patched = self.add_patch('pyramid_blogr.models.blog_record.datetime')
        datetime_patched.datetime.utcnow.return_value = datetime.datetime(2016, 2, 10)

        blog_record = factories.BlogRecordFactory.create(created=datetime.datetime(2015, 10, 10))

        self.assertEquals(blog_record.created_in_words, '4 months')
示例#17
0
 def in_session(self,session='day'):
     # session = day,night
     ctime = datetime.now()
     if session == 'day':
         return ctime > datetime(ctime.year,ctime.month,ctime.day, 9 ,0,0) and ctime < datetime(ctime.year,ctime.month,ctime.day, 15 ,0,0)
     else:
         return ctime > datetime(ctime.year, ctime.month, ctime.day, 21, 0, 0) or ctime < datetime(ctime.year,ctime.month,ctime.day, 2, 30,0)
示例#18
0
    def upload_timetable_specified_sessions_only(self):
        start_date = datetime(2016, 3, 22, tzinfo=timezone.utc) # tues
        end_date = datetime(2016, 3, 23, tzinfo=timezone.utc) # wed
        self.assertEquals(Event.objects.all().count(), 0)

        # create some timetabled sessions for mondays, tuesdays and Wednesdays
        tues_sessions = mommy.make_recipe('booking.tue_session', _quantity=3)
        mommy.make_recipe('booking.wed_session', _quantity=3)

        session_ids = [
            session.id for session in Session.objects.all() if
            session in tues_sessions
            ]
        # choose tues-wed as date range, but specify the tues sessions only
        upload_timetable(start_date, end_date, session_ids)
        # check that there are now classes on the dates specified
        tue_classes = Event.objects.filter(
            date__gte=self._start_of_day(start_date),
            date__lte=self._end_of_day(start_date)
            )
        wed_classes = Event.objects.filter(
            date__gte=self._start_of_day(end_date),
            date__lte=self._end_of_day(end_date)
            )
        # total number of classes created is 3, as no wed classes created
        self.assertEquals(tue_classes.count(), 3)
        self.assertEquals(wed_classes.count(), 0)
        self.assertEquals(Event.objects.count(), 3)
示例#19
0
    def testMetaDateInDatetimeFields(self):
        file_path = os.path.join(os.path.dirname(__file__), "data", "date_in_meta.xml")
        xml_data = open(file_path, "rb").read()
        xform_generic = FormProcessorInterface.post_xform(xml_data)

        self.assertEqual(datetime(2014, 7, 10), xform_generic.metadata.timeStart)
        self.assertEqual(datetime(2014, 7, 11), xform_generic.metadata.timeEnd)
示例#20
0
    def test_simple(self):
        b = self._build_value
        VALUES = (
            b('search', 'search', 'type="search"'),
            b('telephone', '123456789', 'type="tel"'),
            b('url', 'http://wtforms.simplecodes.com/', 'type="url"'),
            b('email', '*****@*****.**', 'type="email"'),
            b('datetime', '2013-09-05 00:23:42', 'type="datetime"', datetime(2013, 9, 5, 0, 23, 42)),
            b('date', '2013-09-05', 'type="date"', date(2013, 9, 5)),
            b('dt_local', '2013-09-05 00:23:42', 'type="datetime-local"', datetime(2013, 9, 5, 0, 23, 42)),
            b('integer', '42', '<input id="integer" name="integer" step="1" type="number" value="42">', 42),
            b('decimal', '43.5', '<input id="decimal" name="decimal" step="any" type="number" value="43.5">', Decimal('43.5')),
            b('int_range', '4', '<input id="int_range" name="int_range" step="1" type="range" value="4">', 4),
            b('decimal_range', '58', '<input id="decimal_range" name="decimal_range" step="any" type="range" value="58">', 58),
        )
        formdata = DummyPostData()
        kw = {}
        for item in VALUES:
            formdata[item['key']] = item['form_input']
            kw[item['key']] = item['data']

        form = self.F(formdata)
        for item in VALUES:
            field = form[item['key']]
            render_value = field()
            if render_value != item['expected_html']:
                tmpl = 'Field {key} render mismatch: {render_value!r} != {expected_html!r}'
                raise AssertionError(tmpl.format(render_value=render_value, **item))
            if field.data != item['data']:
                tmpl = 'Field {key} data mismatch: {field.data!r} != {data!r}'
                raise AssertionError(tmpl.format(field=field, **item))
示例#21
0
    def test_services_list(self):
        req = FakeRequest()
        res_dict = self.controller.index(req)

        response = {'services': [
                    {'binary': 'nova-scheduler',
                    'host': 'host1',
                    'zone': 'internal',
                    'status': 'disabled',
                    'state': 'up',
                    'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
                    {'binary': 'nova-compute',
                     'host': 'host1',
                     'zone': 'nova',
                     'status': 'disabled',
                     'state': 'up',
                     'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
                    {'binary': 'nova-scheduler',
                     'host': 'host2',
                     'zone': 'internal',
                     'status': 'enabled',
                     'state': 'down',
                     'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
                    {'binary': 'nova-compute',
                     'host': 'host2',
                     'zone': 'nova',
                     'status': 'disabled',
                     'state': 'down',
                     'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
        self.assertEqual(res_dict, response)
示例#22
0
 def test_services_detail_with_delete_extension(self):
     self.ext_mgr.extensions['os-extended-services-delete'] = True
     req = FakeRequest()
     res_dict = self.controller.index(req)
     response = {'services': [
         {'binary': 'nova-scheduler',
          'host': 'host1',
          'id': 1,
          'zone': 'internal',
          'status': 'disabled',
          'state': 'up',
          'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
         {'binary': 'nova-compute',
          'host': 'host1',
          'id': 2,
          'zone': 'nova',
          'status': 'disabled',
          'state': 'up',
          'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
         {'binary': 'nova-scheduler',
          'host': 'host2',
          'id': 3,
          'zone': 'internal',
          'status': 'enabled',
          'state': 'down',
          'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
         {'binary': 'nova-compute',
          'host': 'host2',
          'id': 4,
          'zone': 'nova',
          'status': 'disabled',
          'state': 'down',
          'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
     self.assertEqual(res_dict, response)
示例#23
0
    def test_datetime_six_col(self):
        years = np.array([2007, 2008])
        months = np.array([1, 2])
        days = np.array([3, 4])
        hours = np.array([5, 6])
        minutes = np.array([7, 8])
        seconds = np.array([9, 0])
        expected = np.array([datetime(2007, 1, 3, 5, 7, 9),
                             datetime(2008, 2, 4, 6, 8, 0)])

        result = conv.parse_all_fields(years, months, days,
                                       hours, minutes, seconds)

        assert (result == expected).all()

        data = """\
year, month, day, hour, minute, second, a, b
2001, 01, 05, 10, 00, 0, 0.0, 10.
2001, 01, 5, 10, 0, 00, 1., 11.
"""
        datecols = {'ymdHMS': [0, 1, 2, 3, 4, 5]}
        df = self.read_csv(StringIO(data), sep=',', header=0,
                           parse_dates=datecols,
                           date_parser=conv.parse_all_fields)
        assert 'ymdHMS' in df
        assert df.ymdHMS.loc[0] == datetime(2001, 1, 5, 10, 0, 0)
示例#24
0
    def test_parse_date_time(self):
        dates = np.array(['2007/1/3', '2008/2/4'], dtype=object)
        times = np.array(['05:07:09', '06:08:00'], dtype=object)
        expected = np.array([datetime(2007, 1, 3, 5, 7, 9),
                             datetime(2008, 2, 4, 6, 8, 0)])

        result = conv.parse_date_time(dates, times)
        assert (result == expected).all()

        data = """\
date, time, a, b
2001-01-05, 10:00:00, 0.0, 10.
2001-01-05, 00:00:00, 1., 11.
"""
        datecols = {'date_time': [0, 1]}
        df = self.read_csv(StringIO(data), sep=',', header=0,
                           parse_dates=datecols,
                           date_parser=conv.parse_date_time)
        assert 'date_time' in df
        assert df.date_time.loc[0] == datetime(2001, 1, 5, 10, 0, 0)

        data = ("KORD,19990127, 19:00:00, 18:56:00, 0.8100\n"
                "KORD,19990127, 20:00:00, 19:56:00, 0.0100\n"
                "KORD,19990127, 21:00:00, 20:56:00, -0.5900\n"
                "KORD,19990127, 21:00:00, 21:18:00, -0.9900\n"
                "KORD,19990127, 22:00:00, 21:56:00, -0.5900\n"
                "KORD,19990127, 23:00:00, 22:56:00, -0.5900")

        date_spec = {'nominal': [1, 2], 'actual': [1, 3]}
        df = self.read_csv(StringIO(data), header=None, parse_dates=date_spec,
                           date_parser=conv.parse_date_time)
示例#25
0
def blog_month(request, blog_slug, year, month, page_no=1, blog_root=None):

    page_no = int(page_no)

    year = int(year)
    month = int(month)

    blog = get_channel_or_blog(blog_slug)
    blog_root = blog_root or blog.get_absolute_url()

    try:
        start_date = datetime(year, month, 1)
        year_end = year
        next_month = month + 1
        if next_month == 13:
            next_month = 1
            year_end += 1
        end_date = datetime(year_end, next_month, 1)
    except ValueError:
        raise Http404

    title = blog.title

    posts = blog.posts().filter(display_time__gte=start_date, display_time__lt=end_date).select_related()

    if not posts.count():
        raise Http404

    archives = tools.collate_archives(blog, blog_root)

    def get_page_url(page_no, num_pages):
        if page_no < 1 or page_no > num_pages:
            return ""
        if page_no == 1:
            return "%s%i/%i/" % (blog_root, year, month)
            # return reverse("blog_month", kwargs = dict(blog_slug=blog_slug, year=year, month=month, blog_root=blog_root))
        else:
            return "%s%i/%i/page/%i/" % (blog_root, year, month, page_no)

    td = get_blog_list_data(request, posts, get_page_url, page_no)

    sections = blog.description_data.get("sections", None)

    td.update(
        dict(
            blog_root=blog_root,
            blog=blog,
            sections=sections,
            title=title,
            page_title=title,
            tagline=blog.tagline,
            archives=archives,
            archive_month=month,
            archive_year=year,
        )
    )

    sections = extendedmarkup.process(sections, td)

    return render_to_response(blog.get_template_names("month.html"), td, context_instance=RequestContext(request))
示例#26
0
    def setUp(self):
        self.standardtime = self.TestTime(datetime(1950, 1, 1), 366, 24,
                                          'hours since 1900-01-01', 'standard')

        self.file = tempfile.NamedTemporaryFile(suffix='.nc', delete=False).name
        f = Dataset(self.file, 'w')
        f.createDimension('time', None)
        time = f.createVariable('time', float, ('time',))
        time.units = 'hours since 1900-01-01'
        time[:] = self.standardtime[:]
        f.createDimension('time2', 1)
        time2 = f.createVariable('time2', 'f8', ('time2',))
        time2.units = 'days since 1901-01-01'
        self.first_timestamp = datetime(2000, 1, 1)
        time2[0] = date2num(self.first_timestamp, time2.units)
        ntimes = 21
        f.createDimension("record", ntimes)
        time3 = f.createVariable("time3", numpy.int32, ("record", ))
        time3.units = "seconds since 1970-01-01 00:00:00"
        date = datetime(2037,1,1,0)
        dates = [date]
        for ndate in range(ntimes-1):
            date += (ndate+1)*timedelta(hours=1)
            dates.append(date)
        time3[:] = date2num(dates,time3.units)
        f.close()
示例#27
0
    def test_parse_dates_noconvert_thousands(self):
        # see gh-14066
        data = 'a\n04.15.2016'

        expected = DataFrame([datetime(2016, 4, 15)], columns=['a'])
        result = self.read_csv(StringIO(data), parse_dates=['a'],
                               thousands='.')
        tm.assert_frame_equal(result, expected)

        exp_index = DatetimeIndex(['2016-04-15'], name='a')
        expected = DataFrame(index=exp_index)
        result = self.read_csv(StringIO(data), index_col=0,
                               parse_dates=True, thousands='.')
        tm.assert_frame_equal(result, expected)

        data = 'a,b\n04.15.2016,09.16.2013'

        expected = DataFrame([[datetime(2016, 4, 15),
                               datetime(2013, 9, 16)]],
                             columns=['a', 'b'])
        result = self.read_csv(StringIO(data), parse_dates=['a', 'b'],
                               thousands='.')
        tm.assert_frame_equal(result, expected)

        expected = DataFrame([[datetime(2016, 4, 15),
                               datetime(2013, 9, 16)]],
                             columns=['a', 'b'])
        expected = expected.set_index(['a', 'b'])
        result = self.read_csv(StringIO(data), index_col=[0, 1],
                               parse_dates=True, thousands='.')
        tm.assert_frame_equal(result, expected)
示例#28
0
    def test_timezones(self):
        my_birthday = datetime(1979, 7, 8, 22, 00)
        summertime = datetime(2005, 10, 30, 1, 00)
        wintertime = datetime(2005, 10, 30, 4, 00)
        timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456)

        # 3h30m to the west of UTC
        tz = get_fixed_timezone(-210)
        aware_dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=tz)

        if TZ_SUPPORT:
            self.assertEqual(dateformat.format(my_birthday, 'O'), '+0100')
            self.assertEqual(dateformat.format(my_birthday, 'r'), 'Sun, 8 Jul 1979 22:00:00 +0100')
            self.assertEqual(dateformat.format(my_birthday, 'T'), 'CET')
            self.assertEqual(dateformat.format(my_birthday, 'e'), '')
            self.assertEqual(dateformat.format(aware_dt, 'e'), '-0330')
            self.assertEqual(dateformat.format(my_birthday, 'U'), '300315600')
            self.assertEqual(dateformat.format(timestamp, 'u'), '123456')
            self.assertEqual(dateformat.format(my_birthday, 'Z'), '3600')
            self.assertEqual(dateformat.format(summertime, 'I'), '1')
            self.assertEqual(dateformat.format(summertime, 'O'), '+0200')
            self.assertEqual(dateformat.format(wintertime, 'I'), '0')
            self.assertEqual(dateformat.format(wintertime, 'O'), '+0100')

        # Ticket #16924 -- We don't need timezone support to test this
        self.assertEqual(dateformat.format(aware_dt, 'O'), '-0330')
示例#29
0
    def test_getWindowTimeBlackedOut(self):

        bHrs = self.w.getWindowTimeBlackedOut()
        self.assertEquals(0.0, bHrs)

        # make blackout that overlaps this window
        # start = datetime(2009, 6, 1)
        # dur   = 7 # days
        blackout = create_blackout(project = self.w.session.project,
                                   start   = datetime(2009, 6, 3),
                                   end     = datetime(2009, 6, 4),
                                   repeat  = 'Once')

        # and another that doesn't
        blackout = create_blackout(project = self.w.session.project,
                                   start   = datetime(2009, 6, 8, 12),
                                   end     = datetime(2009, 6, 9, 12),
                                   repeat  = 'Once')

        bHrs = self.w.getWindowTimeBlackedOut()
        self.assertEquals(24.0, bHrs)

        # now extend this window and make it non-contigious
        # and see how the new blackouts *dont* picked up.
        wr = WindowRange(window = self.w
                       , start_date = datetime(2009, 6, 10)
                       , duration = 2)
        wr.save()

        # the second window range misses the second blackout out
        # But it needs to be fresh from the DB
        w = Window.objects.get(id = self.w.id)

        bHrs = w.getWindowTimeBlackedOut()
        self.assertEquals(24.0, bHrs)
示例#30
0
def setupObsInstance():
    # Observation interval (approx vernal equinox)
    beginTime = datetime(2011, 3, 20, 0, 0, 0)
    endTime = datetime(2011, 3, 21, 0, 0, 0)
    stepTime = timedelta(minutes=60)
    td = endTime-beginTime
    Times = []
    nrTimSamps = int(td.total_seconds()/stepTime.seconds)+1
    for ti in range(0, nrTimSamps):
        Times.append(beginTime+ti*stepTime)

    # Source direction
    #   CasA:
    # celSrcTheta_CasA = np.pi/2-1.026515
    # celSrcPhi_CasA = 6.123487
    #   Celestial origin:
    celSrcTheta = 0.0*math.pi/2
    celSrcPhi = 0.
    celSrcDir = celSrcPhi, (math.pi/2-celSrcTheta), 'J2000'

    # Station position and rotation
    #   Alt1 arbitrarily:
    me = measures()
    stnPos_meWGS = measures().position('wgs84','0deg','0deg','0m')
    stnPos_meITRF = me.measure(stnPos_meWGS,'ITRF')
    stnPos = stnPos_meITRF['m2']['value']*sph2crt_me(stnPos_meITRF)[:,np.newaxis]
    stnRot = antpat.reps.sphgridfun.pntsonsphere.rot3Dmat(0.,0.,1*math.pi/2)
    #   Alt2 set via official LOFAR geodetic data:
    # stnPos, stnRot, stnRelPos = getArrayBandParams('SE607', 'LBA')

    return Times, celSrcDir, stnPos, stnRot
示例#31
0
def main():
    # build week number dictionary
    current_date = datetime.datetime(2016, 7, 25)
    start_week0 = datetime.datetime(2016, 7, 18)
    end_week0 = datetime.datetime(2016, 7, 24)
    start_midsem = datetime.datetime(2016, 9, 26)
    end_midsem = datetime.datetime(2016, 10, 2)
    day_delta = timedelta(days=1)
    week_delta = timedelta(days=7)
    week_counter = 1
    day_counter = 1
    week_day = {}

    # assigns a week number to each date
    while week_counter < 13:
        while day_counter < 8:
            week_day[current_date] = week_counter
            day_counter += 1
            current_date = current_date + day_delta
            # doesn't +1 if in midsem break
        if not start_midsem <= current_date <= end_midsem:
            week_counter += 1
        day_counter = 1

    # set defaults until user changes them
    download_mode = 'default'
    default_path = 'Downloads/lectureDL'
    home_dir = expanduser('~')
    download_dir = os.path.join(home_dir, default_path)
    skipped_lectures = []
    downloaded_lectures = []
    dates_list = []
    all_switch = False
    print('Welcome to lectureDL.py')

    # simple command-line args
    if len(argv) > 1:
        if '-v' in argv:
            download_mode = 'video'
            print('-v argument passed. Videos will be downloaded.')
        elif '-a' in argv:
            download_mode = 'audio'
            print('-a argument passed. Audio will be downloaded.')
        if '-all' in argv:
            all_switch = True
            print('-all argument passed. All lectures will be downloaded.')
        if '-clearconfig' in argv:
            if os.path.isfile('lectureDL.ini'):
                # delete 'user' section
                os.remove('lectureDL.ini')
                print('Deleted configuration file')
            else:
                print('No configuration file found')

    # setup config file. if it exists, load it
    config = configparser.ConfigParser()
    if os.path.isfile('lectureDL.ini'):
        config.read('lectureDL.ini')

    if len(argv) > 2:
        if '-path' in argv:
            path_input = argv[(argv.index('-path') + 1)]
            chosen_path = os.path.join(home_dir, path_input)
            if not os.path.exists(chosen_path):
                os.makedirs(chosen_path)
            # find way to add properly
            if 'user' not in config:
                config.add_section('user')
            config['user']['download_path'] = chosen_path
            with open('lectureDL.ini', 'w') as configfile:
                config.write(configfile)
            print('Saved path', chosen_path, 'to config file')
    if 'user' in config:
        if 'download_path' in config['user']:
            download_dir = config['user']['download_path']

    # setup download folders
    video_folder = os.path.join(download_dir, 'Lecture videos')
    audio_folder = os.path.join(download_dir, 'Lecture audio')

    # if they don't exist, make them
    if not os.path.exists(video_folder):
        os.makedirs(video_folder)
    if not os.path.exists(audio_folder):
        os.makedirs(audio_folder)

    # set download mode
    while download_mode == 'default':
        print('Enter \'v\' to download videos or \'a\' to download audio')
        user_choice = input('> ')
        if user_choice == 'a':
            download_mode = 'audio'
        elif user_choice == 'v':
            download_mode = 'video'
        elif user_choice == 'x':
            exit()
        else:
            print('That wasn\'t an option.')

    # set date range for lecture downloads
    if not all_switch:
        print('Would you like to download lectures from' +
              'specific weeks or since a particular date?')
        while dates_list == []:
            valid_input = True
            print('Enter a range of weeks (eg. 1-5 or 1,3,4)' +
                  ' or a date (DD/MM/2016) to download videos' +
                  ' that have since been released.')
            user_dates_input = input('> ')

            # if left blank, download all videos
            if user_dates_input == '':
                dates_list = [
                    start_week0 + datetime.timedelta(n) for n in range(
                        int((datetime.datetime.today() + day_delta -
                             start_week0).days))
                ]

            # if user enters comma-separated weeks, or just one,
            # make a list for each and then concatenate
            elif ',' in user_dates_input or user_dates_input.isdigit():
                chosen_weeks = user_dates_input.replace(' ', '').split(',')

                # validate to see if weeks are ints between 1 and 12 inclusive
                for item in chosen_weeks:
                    if int(item) < 1 or int(item) > 12 or not item.isdigit():
                        print('Invalid input. Weeks must be integers' +
                              ' between 1 and 12 inclusive.')
                        valid_input = False

                # build date lists for each week and then concatenate
                print('Lectures will be downloaded for: ')
                for item in chosen_weeks:
                    start_date = start_week0 + (int(item) * week_delta)
                    end_date = end_week0 + (int(item) * week_delta)
                    dates_in_week = [
                        start_date + datetime.timedelta(n)
                        for n in range(int((end_date - start_date).days))
                    ]
                    if valid_input:
                        dates_list += dates_in_week
                        print('Week ', item)

            # entering a week range or a start date
            # both generate a range between start and end
            elif '-' in user_dates_input or '/' in user_dates_input:

                # validate to see if weeks are ints between 1 and 12 inclusive
                if '-' in user_dates_input:
                    chosen_weeks = user_dates_input.split('-')
                    for item in chosen_weeks:
                        if int(item) < 1 or int(item) > 12 \
                                      or not item.isdigit():
                            print('Invalid input. Weeks must be integers' +
                                  ' between 1 and 12 inclusive.')
                            valid_input = False

                    # validate to check if end week comes after first week
                    if chosen_weeks[1] > chosen_weeks[0]:
                        start_week = chosen_weeks[0]
                        end_week = chosen_weeks[1]
                        start_date = (start_week0 +
                                      (int(start_week) * week_delta))
                        end_date = end_week0 + (int(end_week) * week_delta)
                        if valid_input:
                            dates_list = [
                                start_date + datetime.timedelta(n)
                                for n in range(
                                    int((end_date - start_date).days))
                            ]
                    else:
                        print('Invalid input. The second week' +
                              ' must come after the first week.')

                elif '/' in user_dates_input:
                    # if in DD/MM/YYYY format, create a range
                    # between start_date and today
                    try:
                        start_date = datetime.datetime.strptime(
                            user_dates_input, '%d/%m/%Y')
                        end_date = datetime.datetime.today() + day_delta
                        dates_list = [
                            start_date + datetime.timedelta(n)
                            for n in range(int((end_date - start_date).days))
                        ]
                    except ValueError:
                        print('Invalid input. Enter string' +
                              ' in the format DD/MM/YYYY.')

                # if list has been appended, print range
                if not dates_list == []:
                    print(
                        'Lectures will be downloaded for the dates between ' +
                        datetime.datetime.strftime(dates_list[0], '%d %B') +
                        ' and ' +
                        datetime.datetime.strftime(dates_list[-1], '%d %B'))

            # catch-all for anything else
            else:
                print('Invalid input')

    # if all_switch is true
    else:
        dates_list = [
            start_week0 + datetime.timedelta(n)
            for n in range(int((datetime.datetime.today() - start_week0).days))
        ]

    # startup chrome instance
    print('Starting up Chrome instance')
    operating_system = platform.system()
    driver_dict = {
        'Linux': 'ChromeDriver/chromedriver-linux',
        'Windows': 'ChromeDriver/chromedriver-win.exe',
        'Darwin': 'ChromeDriver/chromedriver-mac',
    }
    driver = webdriver.Chrome(driver_dict[operating_system])
    print('Starting login process')
    driver.get('http://app.lms.unimelb.edu.au')

    ask_to_save = False

    # check config file for user settings, else ask for input
    config_yes = 'user' in config \
                 and 'username' in config['user'] \
                 and 'password' in config['user']

    if config_yes:
        input_user = config['user']['username']
        input_password = config['user']['password']
    else:
        input_user = input('Please enter your username: '******'Please enter your password: '******'Would you like to save your username' +
              ' and password for next time? (y/n)')
        save_choice = input('> ')
        if save_choice == 'y':
            if 'user' not in config:
                config.add_section('user')
            config['user']['username'] = input_user
            config['user']['password'] = input_password
            print('Saving config file.')
            with open('lectureDL.ini', 'w') as configfile:
                config.write(configfile)
        else:
            print('Credentials not saved.')

    WebDriverWait(driver, 10).until(
        EC.presence_of_element_located((By.CSS_SELECTOR, 'ul.courseListing')))
    # list items in list class 'courseListing'
    course_list = driver.find_element_by_css_selector('ul.courseListing')
    # only get links with target='_top' to single out subject headings
    course_links = course_list.find_elements_by_css_selector('a[target=_top]')
    # list to be appended with [subj_code, subj_name, subj_link]
    subject_list = []
    subj_num = 1
    # print status
    print('Building list of subjects')

    # build namedtuple for subject

    Subject = namedtuple("Subject", ["code", "name", "link", "num"])

    # get subject info from list of 'a' elements
    for link in course_links:
        # get title eg 'LING30001_2016_SM2: Exploring Linguistic Diversity'
        full_string = link.text
        # call functions to get code and name
        subj_code = get_subj_code(full_string)
        subj_name = get_subj_name(full_string)
        # get subject link
        subj_link = link.get_attribute('href')
        subj_tuple = Subject(subj_code, subj_name, subj_link, subj_num)
        subject_list.append(subj_tuple)
        # add one to subj_num counter
        subj_num += 1

    # create lists for subjects to download and skip
    user_subjects = []
    skipped_subjects = []

    if not all_switch:
        # print subjects to choose from
        print('Subject list:')
        for subject in subject_list:
            # print subject code: subject title
            print("{num}. {code}: {name}".format(num=str(subject.num),
                                                 code=subject.code,
                                                 name=subject.name))

        # choose from subjects to download
        while user_subjects == []:
            print('Please enter subjects you would like to download' +
                  ' (eg. 1,2,3) or leave blank to download all')
            user_choice = input('> ')
            if not user_choice == '':
                chosen_subj_nums = user_choice.split(',')
                for subj_num in chosen_subj_nums:
                    # validate to check if numbers are between 1
                    # and however big the list is
                    invalid_input = int(subj_num) < 1 \
                                    or int(subj_num) > len(subject_list) \
                                    or not subj_num.isdigit()
                    if invalid_input:
                        print(
                            'Invalid input. Subject numbers must be between' +
                            '1 and {0} inclusive.'.format(
                                str(len(subject_list))))
                    for subj in subject_list:
                        if not int(subj_num) == subj.num:
                            skipped_subjects.append(subj)
                        else:
                            user_subjects.append(subj)
            # if left blank, download all subjects
            else:
                user_subjects = subject_list

    # if all_switch is true
    else:
        user_subjects = subject_list

    print('Subjects to be downloaded:')
    for subject in user_subjects:
        # print subject code: subject title
        print("{code}: {name}".format(code=subject.code, name=subject.name))

    # for each subject, navigate through site and download lectures
    for subject in user_subjects:
        # print status
        print('Now working on {code}: {name}'.format(code=subject.code,
                                                     name=subject.name))

        # go to subject page and find Lecture Recordings page
        driver.get(subject.link)
        recs_page = search_link_text(
            driver, ['Recordings', 'Capture', 'recordings', 'capture'])

        # if no recordings page found, skip to next subject
        if recs_page is None:
            print('No recordings page found,' +
                  ' can you find the name of the page?')
            # search for something else? ask user to input page
            search_input = input('> ')
            recs_page = search_link_text(driver, [search_input])

        recs_page.click()

        # sometimes sidebar links goes directly to echo page,
        # sometimes there's a page in between
        # if there's no iframe, it's on the page in between
        if len(driver.find_elements_by_tag_name('iframe')) == 0:
            links_list = driver.find_element_by_css_selector('ul.contentList')
            recs_page2 = search_link_text(
                links_list, ['Recordings', 'Capture', 'recordings', 'capture'])

            recs_page2.click()

        driver.implicitly_wait(10)

        # now on main page. navigate through iframes
        iframe = driver.find_elements_by_css_selector('iframe#contentFrame')[0]
        driver.switch_to_frame(iframe)
        driver.implicitly_wait(10)
        iframe2 = driver.find_elements_by_tag_name('iframe')[0]
        driver.switch_to_frame(iframe2)
        driver.implicitly_wait(10)
        iframe3 = driver.find_elements_by_tag_name('iframe')[0]
        driver.switch_to_frame(iframe3)

        # find ul element, list of recordings
        recs_ul = driver.find_element_by_css_selector('ul#echoes-list')
        recs_list = recs_ul.find_elements_by_css_selector('li.li-echoes')

        # setup for recordings
        multiple_lectures = False
        lectures_list = []
        to_download = []

        # print status
        print('Building list of lectures')
        # for each li element, build up filename info and add to download list
        for item in recs_list:
            # click on each recording to get different download links
            date_div = item.find_element_by_css_selector('div.echo-date')
            date_div.click()
            # scroll div so lectures are always in view
            driver.execute_script('return arguments[0].scrollIntoView();',
                                  date_div)
            driver.implicitly_wait(2)

            # convert string into datetime.datetime object
            # date is formatted like 'August 02 3:20 PM' --> 'August 02 2016'
            # so I need to get rid of time and add year
            date_string = ' '.join(date_div.text.split(' ')[:-2]) + ' 2016'
            try:
                date = datetime.datetime.strptime(date_string, '%B %d %Y')
            except ValueError:
                date = datetime.datetime.strptime(date_string, '%d %B %Y')

            # lookup week number and set default lecture number
            week_num = week_day[date]
            lec_num = 1

            # get link to initial download page for either audio or video
            if download_mode == 'audio':
                link = driver.find_element_by_partial_link_text(
                    'Audio File').get_attribute('href')
            else:
                link = driver.find_element_by_partial_link_text(
                    'Video File').get_attribute('href')

            # check if week_num is already in to_download
            for later_lecture in lectures_list:
                if later_lecture['week_num'] == week_num:
                    # set multiple_lectures to true so that
                    # filenames include lecture numbers
                    multiple_lectures = True
                    # add 1 to lec_num of earlier video
                    later_lecture['lec_num'] += 1

            # add info to download list
            lectures_list.append({
                'link': link,
                'subject_code': subject.code,
                'week_num': week_num,
                'lec_num': lec_num,
                'date': date,
            })
            time.sleep(1)

        # assign filenames
        # made it a separate loop because in the loop above
        # it's constantly updating earlier values etc
        for lecture in lectures_list:
            filename = lecture['subject_code'] + ' Week ' \
                     + str(lecture['week_num']) + ' Lecture'
            if multiple_lectures:
                filename = filename + ' ' + str(lecture['lec_num'])
            if download_mode == 'audio':
                filename_with_ext = filename + '.mp3'
                file_path = os.path.join(audio_folder, filename_with_ext)
            else:
                filename_with_ext = filename + '.m4v'
                file_path = os.path.join(video_folder, filename_with_ext)
            lecture['filename'] = filename
            lecture['file_path'] = file_path

        # only add lectures to be downloaded if they are inside date range.
        for lecture in lectures_list:

            download_yes = lecture['date'] in dates_list \
                and not os.path.isfile(lecture['file_path'])

            if download_yes:
                to_download.append(lecture)
            else:
                # if both outside date range and already exists
                if not lecture['date'] in dates_list \
                          and os.path.isfile(lecture['file_path']):
                    lecture['skip_reason'] = 'Outside date range and \
                                              file already exists'

                # if just outside date range
                elif not lecture['date'] in dates_list:
                    lecture['skip_reason'] = 'Outside date range'
                # if just already exists
                elif os.path.isfile(lecture['file_path']):
                    lecture['skip_reason'] = 'File already exists'
                skipped_lectures.append(lecture)
                print('Skipping {name}: {reason}'.format(
                    name=lecture['filename'], reason=lecture['skip_reason']))

        # print list of lectures to be downloaded
        if len(to_download) > 0:
            print('Lectures to be downloaded:')
            for lecture in to_download:
                print(lecture['filename'])
        else:
            print('No lectures to be downloaded.')

        # for each lecture, set filename and download
        for lecture in to_download:
            # build up filename
            print('Now working on', lecture['filename'])
            # go to initial download page and find actual download link
            driver.get(lecture['link'])
            driver.implicitly_wait(10)
            dl_link = driver.find_element_by_partial_link_text(
                'Download media file.').get_attribute('href')
            # send javascript to stop download redirect
            driver.execute_script('stopCounting=true')

            # download file using urllib.request.urlretrieve
            print('Downloading to ', lecture['file_path'])
            urllib.request.urlretrieve(dl_link, lecture['file_path'])
            print('Completed! Going to next file!')
            downloaded_lectures.append(lecture)
            driver.implicitly_wait(10)

        # when finished with subject
        print('Finished downloading files for', subject.code)

    # when finished with all subjects
    print('All done!')

    # list downloaded lectures
    if len(downloaded_lectures) > 0:
        if len(downloaded_lectures) == 1:
            print('Downloaded 1 lecture:')
        else:
            print('Downloaded ' + str(len(downloaded_lectures)) + ' lectures:')
        for lecture in downloaded_lectures:
            print(lecture['filename'])

    # list skipped lectures
    if len(skipped_lectures) > 0:
        if len(skipped_lectures) == 1:
            print('Skipped 1 lecture:')
        else:
            print('Skipped ' + str(len(skipped_lectures)) + ' lectures:')
        for lecture in skipped_lectures:
            print(lecture['filename'] + ': ' + lecture['skip_reason'])

    print('Saving config file.')
    with open('lectureDL.ini', 'w') as configfile:
        config.write(configfile)
示例#32
0
 def test_timestamp_format(self):
     self.assert_query(
         "changed:>=01/20/2020",
         Q(change__timestamp__gte=datetime(2020, 20, 1, 0, 0, tzinfo=utc)),
     )
示例#33
0
def earning_control(request):
    profile = Profile.objects.get(pk=110)
    earning1 = None
    earning = 0
    earnings = earningPayments.objects.filter(profile=profile).order_by('-id')
    if earnings.count() > 0:
        earning1 = earnings[0]
        split = earning1.payedDate.split('/')
        x = datetime.datetime(int(split[1]), int(split[0]), 1)
        days_in_month = calendar.monthrange(x.year, x.month)[1]
        start_date = x + timedelta(days=days_in_month)
        datetime_current = datetime.datetime.today()
        year = datetime_current.year
        month = datetime_current.month

        profileArray = []
        levelDict = dict()
        level = 1
        total_earning = 0

        profileArray.append(profile.id)

        general_methods.returnLevelTreeByDate(profileArray, levelDict, level,
                                              start_date.month,
                                              start_date.year)

        # for i in range(7):
        #   total_earning = float(total_earning) + float(general_methods.calculate_earning(levelDict, i + 1))

        order_total_member = general_methods.monthlyMemberOrderTotalByDate(
            profile, start_date.month, start_date.year)

        for i in range(7):

            if i + 1 == 1:
                for orderPrice in levelDict[str(i + 1)]:
                    earning = earning + float(orderPrice.total_order)

            if i + 1 == 2:

                for orderPrice in levelDict[str(i + 1)]:
                    earning = earning + float(orderPrice.total_order)

            if i + 1 == 3:

                for orderPrice in levelDict[str(i + 1)]:
                    earning = earning + float(orderPrice.total_order)

            if i + 1 == 4:

                for orderPrice in levelDict[str(i + 1)]:
                    earning = earning + float(orderPrice.total_order)

            if i + 1 == 5:

                for orderPrice in levelDict[str(i + 1)]:
                    earning = earning + float(orderPrice.total_order)

            if i + 1 == 6:

                for orderPrice in levelDict[str(i + 1)]:
                    earning = earning + float(orderPrice.total_order)

            if i + 1 == 7:

                for orderPrice in levelDict[str(i + 1)]:
                    earning = earning + float(orderPrice.total_order)

        print("sjdghkjsdhkjs")

    else:
        earning = 0

    return render(request, 'kazanclar/deneme.html')
示例#34
0
        (str(cont_ok_total) if cont_ok_total > 0 else '-') + '*' +
        (str(cont_problema_total) if cont_problema_total > 0 else '-'))

print len(elementos)
elementos = list(set(elementos))
# numero de elementos
numero_monitorados = len(elementos)
print numero_monitorados

#
# Grafico Pie: Disponibilidade
#
hoje = datetime.datetime.now()
ano = hoje.year
qtd_dias_mes = monthrange(ano, hoje.month)[1]
date_start = datetime.datetime(hoje.year, hoje.month, 1, 0, 0, 0)
date_end = datetime.datetime(hoje.year, hoje.month, qtd_dias_mes, 23, 59, 59)
print str(date_start)
print str(date_end)
dtt = date_start.timetuple()
date_start = time.mktime(dtt)

dtt = date_end.timetuple()
date_end = time.mktime(dtt)

sql = '''SELECT sum(`UPTimeScheduled`) as UP_TOTAL,       
       sum(`DOWNTimeScheduled`) as DOWN_TOTAL,       
       sum(`UNREACHABLETimeScheduled`) as UNREACHABLE_TOTAL,      
       sum(`UNDETERMINEDTimeScheduled`) as UNDETERMINED_TOTAL,
       sum(UPTimeScheduled)+sum(DOWNTimeScheduled)+sum(UNREACHABLETimeScheduled)+sum(UNDETERMINEDTimeScheduled) as TOTALTIME,
       count(host_name) as num_eventos,
示例#35
0
def showMovies(sSearch=''):
    oGui = cGui()
    if sSearch:
        sUrl = sSearch
    else:
        oInputParameterHandler = cInputParameterHandler()
        sUrl = oInputParameterHandler.getValue('siteUrl')

    oRequestHandler = cRequestHandler(sUrl)
    sHtmlContent = oRequestHandler.request()

    # THUMB ref title desc1 desc2
    sPattern = '<img class=".+?src="([^"]+)".+?href="([^"]+).+?<span>([^<]+)<.+?data-time="(?:([^<]+)|)".+?>([^<]+)'

    oParser = cParser()
    aResult = oParser.parse(sHtmlContent, sPattern)

    if (aResult[0] == False):
        oGui.addText(SITE_IDENTIFIER)

    if (aResult[0] == True):
        total = len(aResult[1])
        oOutputParameterHandler = cOutputParameterHandler()
        for aEntry in aResult[1]:
            sThumb = aEntry[0]
            sUrl2 = aEntry[1]
            sTitle = aEntry[2].replace(' streaming gratuit',
                                       '').replace(' foot', '')
            sDate = aEntry[3]
            sdesc1 = aEntry[4]

            bChaine = False
            if sUrl != CHAINE_TV[0] and sUrl != SPORT_TV[0]:
                sDisplayTitle = sTitle
                if sdesc1:
                    sDisplayTitle += ' - ' + sdesc1
                if sDate:
                    try:
                        d = datetime(*(time.strptime(
                            sDate, '%Y-%m-%dT%H:%M:%S+02:00')[0:6]))
                        sDate = d.strftime("%d/%m/%y %H:%M")
                    except Exception as e:
                        pass
                    sDisplayTitle += ' - ' + sDate
            else:
                bChaine = True
                sTitle = sTitle.upper()
                sDisplayTitle = sTitle

            if 'http' not in sUrl2:
                sUrl2 = URL_MAIN[:-1] + sUrl2

            if 'http' not in sThumb:
                sThumb = URL_MAIN[:-1] + sThumb

            oOutputParameterHandler.addParameter('siteUrl', sUrl2)
            oOutputParameterHandler.addParameter('sMovieTitle', sTitle)
            oOutputParameterHandler.addParameter('sDesc', sDisplayTitle)
            oOutputParameterHandler.addParameter('sThumb', sThumb)

            if bChaine:
                oGui.addMisc(SITE_IDENTIFIER, 'showLive', sDisplayTitle,
                             'tv.png', sThumb, sDisplayTitle,
                             oOutputParameterHandler)
            else:
                oGui.addDir(SITE_IDENTIFIER, 'showLive', sDisplayTitle, sThumb,
                            oOutputParameterHandler)

    if not sSearch:
        oGui.setEndOfDirectory()
示例#36
0
def getTimer():
    datenow = datetime.utcnow().replace(second=0, microsecond=0)
    datenow = datenow + timedelta(days=1)
    epoch = datetime(1970, 1, 1)
    return (datenow - epoch).total_seconds() // 1
示例#37
0
def now_2_epochms():
    #https://stackoverflow.com/questions/27245488/converting-iso-8601-date-time-to-seconds-in-python
    #eg: 1984-06-02T19:05:00.000Z
    utc_dt = datetime.utcnow()
    timestamp = (utc_dt - datetime(1970, 1, 1)).total_seconds()
    return int(timestamp * 1000)
 def __init__(self, *args, **kwargs):
     super(Smarthouse_com_auSpider, self).__init__(self, *args, **kwargs)
     self.stored_last_date = incremental_utils.get_latest_pro_review_date(
         self.mysql_manager, self.spider_conf["source_id"])
     if not self.stored_last_date:
         self.stored_last_date = datetime(1970, 1, 1)
示例#39
0
def human_readable_time(seconds):
    delta = datetime.timedelta(seconds=seconds)
    return (datetime.datetime(1900, 1, 1) +
            delta).strftime("%H:%M:%S").split('.')[0]
示例#40
0
print('The result is %s digits long.' % (len(str(prod))))
print('Took %s seconds to calculate.' % (endTime - startTime))

print('\n--------------time.sleep()----------------------')
for i in range(5):
    print('Tick')
    time.sleep(1)
    print('Tock')
    time.sleep(1)
print(time.time())
'''
print('\n---------------datetime Module---------------------')
import datetime

print(datetime.datetime.now())
dt = datetime.datetime(2015, 10, 21, 16, 29, 0)
print(dt)
print(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)

print('\n----------------timedelta --------------------')
delta = datetime.timedelta(days=11, hours=10, minutes=9, seconds=8)
dt = datetime.datetime.now()
dt = dt + delta
print(dt)

print('\n---------------Converting ---------------------')
'''
%Y 	  Year with century, as in '2014'  
%y 	  Year without century, '00' to '99' (1970 to 2069)  
%m 	  Month as a decimal number, '01' to '12'  
%B 	  Full month name, as in 'November'  
示例#41
0
 def get_changeset(self, rev):
     assert rev % 3 == 1  # allow only 3n + 1
     return MockChangeset(
         self, rev, 'message-%d' % rev, 'author-%d' % rev,
         datetime(2001, 1, 1, tzinfo=utc) + timedelta(seconds=rev))
示例#42
0
    def insert_ecrit(self, compte, journal, folio, date,
                     libelle, debit=0.0, credit=0.0, 
                     piece="", centre="", echeance=""):
        """
        Insere une nouvelle ligne dans la table ecritures de Quadra.
        Si le compte possède une affectation analytique, une deuxème
        ligne est insérée avec les données analytiques
        """
        sql_ecr = ""
        sql_ana = ""
        sql_ech = ""

        CodeOperateur = "BOT"
        NumLigne = 0
        TypeLigne = "E"
        ClientOuFrn = 0

        # Plusieurs variable à gérer si présence/absence d'une échéance
        if echeance:
            if not isinstance(echeance, datetime):
                ecr_DtEcheance = datetime(1899, 12, 30)
                ecr_EchSimple = datetime(1899, 12, 30)
            else:
                ecr_DtEcheance = datetime(1899, 12, 30)
                ecr_EchSimple = echeance
                ech_DtEcheance = echeance
                ech_EchSimple = datetime(1899, 12, 30)
        else:
            ecr_DtEcheance = datetime(1899, 12, 30)
            ecr_EchSimple = datetime(1899, 12, 30)  

        if piece:
            piece = str(piece)[0:10]
        else:
            piece = ""

        DateSysSaisie = datetime.now().strftime("%Y-%m-%d %H:%M:%S")

        periode = datetime(date.year,
                           date.month,1)
        jour = date.day
        libelle = libelle[:30]

        uid = self.get_last_uniq() + 1
        lfolio = self.get_last_lignefolio(journal, periode)
        lfolio = int(((lfolio/10)+1)*10)
        Etat = 0
        ModePaiement = "NULL"
        CodeBanque = "NULL"
        NumEditLettrePaiement = "NULL"
        ReferenceTire = "NULL"
        Rib = "NULL"
        DomBanque = "NULL"
        Nature = ""
        PrctRepartition = 0.0
        TypeSaisie = ""

        # Vérif date sur période non cloturée
        if periode <= self.dtclot:
            logging.error("ecriture sur periode cloturée : {}".format(periode))
            return False

        # Vérif affect analytique
        if not centre:
            if compte in self.affect.keys():
                centre = self.affect[compte]

        # Vérif existence du compte
        if not compte in self.plan:
            # Si insert ok on met à jour les param_doss
            if self.insert_compte(compte):
                self.plan.update({compte : {"initule": "", "nbrecr": 0, "lettrage": ""}})

        sql_ecr = f"""
            INSERT INTO Ecritures
            (NumUniq, NumeroCompte, 
            CodeJournal, Folio, 
            LigneFolio, PeriodeEcriture, 
            JourEcriture, Libelle, 
            MontantTenuDebit, MontantTenuCredit, 
            NumeroPiece, DateEcheance, CodeOperateur, 
            DateSysSaisie, Etat, 
            ModePaiement, NumLigne, 
            TypeLigne, NumEditLettrePaiement,
            ReferenceTire, Rib, DomBanque,
            Nature, PrctRepartition, TypeSaisie,
            ClientOuFrn, 
            EcheanceSimple, CentreSimple) 
            VALUES 
            ({uid}, '{compte}', 
            '{journal}', {folio}, 
            {lfolio}, #{periode}#, 
            {jour}, '{libelle}', 
            {debit}, {credit}, 
            '{piece}', #{ecr_DtEcheance}#, '{CodeOperateur}', 
            #{DateSysSaisie}#, {Etat}, 
            {ModePaiement}, {NumLigne}, 
            '{TypeLigne}', {NumEditLettrePaiement},
            {ReferenceTire}, {Rib}, {DomBanque},
            '{Nature}', {PrctRepartition}, '{TypeSaisie}',
            {ClientOuFrn},
            #{ecr_EchSimple}#, '{centre}')
            """            
        if centre:
            montant = abs(debit - credit)
            TypeLigne = "A"
            NumLigne = 1    
            Nature = '*'
            PrctRepartition = 100
            TypeSaisie = 'P'                      

            sql_ana = f"""
                INSERT INTO Ecritures
                (NumUniq, NumeroCompte,
                CodeJournal, Folio,
                LigneFolio,PeriodeEcriture,
                JourEcriture, MontantTenuDebit,
                MontantTenuCredit, NumLigne, 
                TypeLigne, Centre, 
                Nature, PrctRepartition, 
                TypeSaisie, MontantAna)
                VALUES
                ({uid+1}, '{compte}',
                '{journal}', {folio},
                {lfolio}, #{periode}#,
                {jour}, {debit}, 
                {credit}, {NumLigne}, 
                '{TypeLigne}', '{centre}',
                '{Nature}', {PrctRepartition},
                '{TypeSaisie}', {montant})
                """           
        if echeance:
            montant = abs(debit - credit)
            Libelle = ""
            Piece = "NULL"
            CodeOperateur = "NULL"
            DateSysSaisie = datetime(1899, 12, 30)
            NumLigne = 1              
            TypeLigne = "T"
            ModePaiement = "''"
            CodeBanque = "''"
            ReferenceTire = "''"
            Rib = "''"
            DomBanque = "''"            
            Centre = "''"
            Nature = "NULL"
            PrctRepartition = "NULL"
            TypeSaisie = "NULL"
            ClientOuFrn = "NULL"
            CentreSimple = "''"
            sql_ech = f"""
                INSERT INTO Ecritures
                (NumUniq, NumeroCompte, 
                CodeJournal, Folio, 
                LigneFolio, PeriodeEcriture, 
                JourEcriture, Libelle, 
                MontantTenuDebit, MontantTenuCredit, 
                NumeroPiece, DateEcheance, CodeOperateur, 
                DateSysSaisie, Etat, 
                ModePaiement, CodeBanque, NumLigne, 
                TypeLigne, NumEditLettrePaiement,
                ReferenceTire, Rib, DomBanque,
                Centre, Nature, 
                PrctRepartition, TypeSaisie,
                ClientOuFrn, MontantAna,
                EcheanceSimple, CentreSimple) 
                VALUES 
                ({uid+1}, '{compte}', 
                '{journal}', {folio}, 
                {lfolio}, #{periode}#, 
                {jour}, '{Libelle}', 
                {debit}, {credit}, 
                {Piece}, #{ech_DtEcheance}#, {CodeOperateur}, 
                #{DateSysSaisie}#, {Etat}, 
                {ModePaiement}, {CodeBanque}, {NumLigne}, 
                '{TypeLigne}', {NumEditLettrePaiement},
                {ReferenceTire}, {Rib}, {DomBanque},
                {Centre}, {Nature}, 
                {PrctRepartition}, {TypeSaisie},
                {ClientOuFrn}, {montant}, 
                #{ech_EchSimple}#, {CentreSimple})
                """   
                
        stat = self.exec_insert(sql_ecr)
        if stat:
            if sql_ana:
                self.exec_insert(sql_ana)
            elif sql_ech:
                self.exec_insert(sql_ech)
        else:
            uid = 0

        return uid
示例#43
0
def get_functional_toilet_sector_data(domain,
                                      config,
                                      loc_level,
                                      location_id,
                                      show_test=False):
    group_by = ['%s_name' % loc_level]

    config['month'] = datetime(*config['month'])
    data = AggAwcMonthly.objects.filter(**config).values(*group_by).annotate(
        in_month=Sum('infra_functional_toilet'),
        all=Sum('num_awc_infra_last_update'),
    ).order_by('%s_name' % loc_level)

    if not show_test:
        data = apply_exclude(domain, data)

    chart_data = {
        'blue': [],
    }

    tooltips_data = defaultdict(lambda: {'in_month': 0, 'all': 0})

    loc_children = get_child_locations(domain, location_id, show_test)
    result_set = set()

    for row in data:
        valid = row['all']
        name = row['%s_name' % loc_level]
        result_set.add(name)

        in_month = row['in_month']

        row_values = {'in_month': in_month or 0, 'all': valid or 0}

        for prop, value in six.iteritems(row_values):
            tooltips_data[name][prop] += value

        value = (in_month or 0) / float(valid or 1)
        chart_data['blue'].append([name, value])

    for sql_location in loc_children:
        if sql_location.name not in result_set:
            chart_data['blue'].append([sql_location.name, 0])

    chart_data['blue'] = sorted(chart_data['blue'])

    return {
        "tooltips_data":
        dict(tooltips_data),
        "info":
        _(("Percentage of AWCs that reported having a functional toilet")),
        "chart_data": [
            {
                "values": chart_data['blue'],
                "key": "",
                "strokeWidth": 2,
                "classed": "dashed",
                "color": MapColors.BLUE
            },
        ]
    }
示例#44
0
def get_functional_toilet_data_chart(domain,
                                     config,
                                     loc_level,
                                     show_test=False):
    month = datetime(*config['month'])
    three_before = datetime(*config['month']) - relativedelta(months=3)

    config['month__range'] = (three_before, month)
    del config['month']

    chart_data = AggAwcMonthly.objects.filter(**config).values(
        'month', '%s_name' % loc_level).annotate(
            in_month=Sum('infra_functional_toilet'),
            all=Sum('num_awc_infra_last_update'),
        ).order_by('month')

    if not show_test:
        chart_data = apply_exclude(domain, chart_data)

    data = {
        'blue': OrderedDict(),
    }

    dates = [dt for dt in rrule(MONTHLY, dtstart=three_before, until=month)]

    for date in dates:
        miliseconds = int(date.strftime("%s")) * 1000
        data['blue'][miliseconds] = {'y': 0, 'all': 0, 'in_month': 0}

    best_worst = defaultdict(lambda: {'in_month': 0, 'all': 0})
    for row in chart_data:
        date = row['month']
        in_month = (row['in_month'] or 0)
        location = row['%s_name' % loc_level]
        valid = row['all']

        best_worst[location]['in_month'] = in_month
        best_worst[location]['all'] = (valid or 0)

        date_in_miliseconds = int(date.strftime("%s")) * 1000

        data['blue'][date_in_miliseconds]['all'] += (valid or 0)
        data['blue'][date_in_miliseconds]['in_month'] += in_month

    top_locations = sorted([
        dict(loc_name=key,
             percent=(value['in_month'] * 100) / float(value['all'] or 1))
        for key, value in six.iteritems(best_worst)
    ],
                           key=lambda x: x['percent'],
                           reverse=True)

    return {
        "chart_data": [{
            "values": [{
                'x': key,
                'y': value['in_month'] / float(value['all'] or 1),
                'in_month': value['in_month']
            } for key, value in six.iteritems(data['blue'])],
            "key":
            "Percentage of AWCs that reported having a functional toilet",
            "strokeWidth":
            2,
            "classed":
            "dashed",
            "color":
            ChartColors.BLUE
        }],
        "all_locations":
        top_locations,
        "top_five":
        top_locations[:5],
        "bottom_five":
        top_locations[-5:],
        "location_type":
        loc_level.title()
        if loc_level != LocationTypes.SUPERVISOR else 'Sector'
    }
示例#45
0
class BettingResourcesTest(unittest.TestCase):

    DATE_TIME_SENT = datetime.datetime(2003, 8, 4, 12, 30, 45)

    def test_event_type_result(self):
        mock_response = create_mock_json('tests/resources/list_event_types.json')
        event_types = mock_response.json().get('result')

        for event_type in event_types:
            resource = resources.EventTypeResult(date_time_sent=self.DATE_TIME_SENT,
                                                 **event_type)

            assert resource._datetime_sent == self.DATE_TIME_SENT
            assert resource.market_count == event_type['marketCount']
            assert resource.event_type.id == event_type['eventType']['id']
            assert resource.event_type.name == event_type['eventType']['name']

    def test_competition_result(self):
        mock_response = create_mock_json('tests/resources/list_competitions.json')
        competitions = mock_response.json().get('result')

        for competition in competitions:
            resource = resources.CompetitionResult(date_time_sent=self.DATE_TIME_SENT,
                                                   **competition)

            assert resource._datetime_sent == self.DATE_TIME_SENT
            assert resource.market_count == competition['marketCount']
            assert resource.competition_region == competition['competitionRegion']
            assert resource.competition.id == competition['competition']['id']
            assert resource.competition.name == competition['competition']['name']

    def test_time_range_result(self):
        mock_response = create_mock_json('tests/resources/list_time_ranges.json')
        time_ranges = mock_response.json().get('result')

        for time_range in time_ranges:
            resource = resources.TimeRangeResult(date_time_sent=self.DATE_TIME_SENT,
                                                 **time_range)

            assert resource._datetime_sent == self.DATE_TIME_SENT
            assert resource.market_count == time_range['marketCount']
            assert resource.time_range._from == datetime.datetime.strptime(time_range['timeRange']['from'],
                                                                           "%Y-%m-%dT%H:%M:%S.%fZ")
            assert resource.time_range.to == datetime.datetime.strptime(time_range['timeRange']['to'],
                                                                        "%Y-%m-%dT%H:%M:%S.%fZ")

    def test_event_result(self):
        mock_response = create_mock_json('tests/resources/list_events.json')
        event_results = mock_response.json().get('result')

        for event_result in event_results:
            resource = resources.EventResult(date_time_sent=self.DATE_TIME_SENT,
                                             **event_result)

            assert resource._datetime_sent == self.DATE_TIME_SENT
            assert resource.market_count == event_result['marketCount']
            assert resource.event.id == event_result['event']['id']
            assert resource.event.open_date == datetime.datetime.strptime(event_result['event']['openDate'],
                                                                          "%Y-%m-%dT%H:%M:%S.%fZ")
            assert resource.event.time_zone == event_result['event']['timezone']
            assert resource.event.country_code == event_result['event']['countryCode']
            assert resource.event.name == event_result['event']['name']
            assert resource.event.venue == event_result['event']['venue']

    def test_market_type_result(self):
        mock_response = create_mock_json('tests/resources/list_market_types.json')
        market_type_results = mock_response.json().get('result')

        for market_type_result in market_type_results:
            resource = resources.MarketTypeResult(date_time_sent=self.DATE_TIME_SENT,
                                                  **market_type_result)

            assert resource._datetime_sent == self.DATE_TIME_SENT
            assert resource.market_count == market_type_result['marketCount']
            assert resource.market_type == market_type_result['marketType']

    def test_country_result(self):
        mock_response = create_mock_json('tests/resources/list_countries.json')
        countries_results = mock_response.json().get('result')

        for countries_result in countries_results:
            resource = resources.CountryResult(date_time_sent=self.DATE_TIME_SENT,
                                               **countries_result)

            assert resource._datetime_sent == self.DATE_TIME_SENT
            assert resource.market_count == countries_result['marketCount']
            assert resource.country_code == countries_result['countryCode']

    def test_venue_result(self):
        mock_response = create_mock_json('tests/resources/list_venues.json')
        venue_results = mock_response.json().get('result')

        for venue_result in venue_results:
            resource = resources.VenueResult(date_time_sent=self.DATE_TIME_SENT,
                                             **venue_result)

            assert resource._datetime_sent == self.DATE_TIME_SENT
            assert resource.market_count == venue_result['marketCount']
            assert resource.venue == venue_result['venue']

    def test_market_catalogue(self):
        mock_response = create_mock_json('tests/resources/list_market_catalogue.json')
        market_catalogues = mock_response.json().get('result')

        for market_catalogue in market_catalogues:
            resource = resources.MarketCatalogue(date_time_sent=self.DATE_TIME_SENT,
                                                 **market_catalogue)

            assert resource._datetime_sent == self.DATE_TIME_SENT
            assert resource.market_id == market_catalogue['marketId']
            assert resource.market_name == market_catalogue['marketName']
            assert resource.total_matched == market_catalogue['totalMatched']
            assert resource.market_start_time == datetime.datetime.strptime(
                    market_catalogue['marketStartTime'], "%Y-%m-%dT%H:%M:%S.%fZ")

            assert resource.competition.id == market_catalogue['competition']['id']
            assert resource.competition.name == market_catalogue['competition']['name']

            assert resource.event.id == market_catalogue['event']['id']
            assert resource.event.open_date == datetime.datetime.strptime(market_catalogue['event']['openDate'],
                                                                          "%Y-%m-%dT%H:%M:%S.%fZ")
            assert resource.event.time_zone == market_catalogue['event']['timezone']
            assert resource.event.country_code == market_catalogue['event']['countryCode']
            assert resource.event.name == market_catalogue['event']['name']
            assert resource.event.venue == market_catalogue['event'].get('venue')

            assert resource.event_type.id == market_catalogue['eventType']['id']
            assert resource.event_type.name == market_catalogue['eventType']['name']

            assert resource.description.betting_type == market_catalogue['description']['bettingType']
            assert resource.description.bsp_market == market_catalogue['description']['bspMarket']
            assert resource.description.discount_allowed == market_catalogue['description']['discountAllowed']
            assert resource.description.market_base_rate == market_catalogue['description']['marketBaseRate']
            assert resource.description.market_time == datetime.datetime.strptime(
                    market_catalogue['description']['marketTime'], "%Y-%m-%dT%H:%M:%S.%fZ")
            assert resource.description.market_type == market_catalogue['description']['marketType']
            assert resource.description.persistence_enabled == market_catalogue['description']['persistenceEnabled']
            assert resource.description.regulator == market_catalogue['description']['regulator']
            assert resource.description.rules == market_catalogue['description']['rules']
            assert resource.description.rules_has_date == market_catalogue['description']['rulesHasDate']
            assert resource.description.suspend_time == datetime.datetime.strptime(
                    market_catalogue['description']['suspendTime'], "%Y-%m-%dT%H:%M:%S.%fZ")
            assert resource.description.turn_in_play_enabled == market_catalogue['description']['turnInPlayEnabled']
            assert resource.description.wallet == market_catalogue['description']['wallet']
            assert resource.description.each_way_divisor == market_catalogue['description'].get('eachWayDivisor')
            assert resource.description.clarifications == market_catalogue['description'].get('clarifications')

            assert len(resource.runners) == 10
            assert resource.runners[6].handicap == 0.0
            assert resource.runners[6].runner_name == 'SCR Altach'
            assert resource.runners[6].selection_id == 872710
            assert resource.runners[6].sort_priority == 7
            assert resource.runners[6].metadata == {'runnerId': '872710'}

    def test_market_book(self):
        mock_response = create_mock_json('tests/resources/list_market_book.json')
        market_books = mock_response.json().get('result')

        for market_book in market_books:
            resource = resources.MarketBook(date_time_sent=self.DATE_TIME_SENT,
                                            **market_book)
            assert resource._datetime_sent == self.DATE_TIME_SENT
            assert resource.market_id == market_book['marketId']
            assert resource.bet_delay == market_book['betDelay']
            assert resource.bsp_reconciled == market_book['bspReconciled']
            assert resource.complete == market_book['complete']
            assert resource.cross_matching == market_book['crossMatching']
            assert resource.inplay == market_book['inplay']
            assert resource.is_market_data_delayed == market_book['isMarketDataDelayed']
            assert resource.last_match_time == datetime.datetime.strptime(
                    market_book['lastMatchTime'], "%Y-%m-%dT%H:%M:%S.%fZ")
            assert resource.number_of_active_runners == market_book['numberOfActiveRunners']
            assert resource.number_of_runners == market_book['numberOfRunners']
            assert resource.number_of_winners == market_book['numberOfWinners']
            assert resource.runners_voidable == market_book['runnersVoidable']
            assert resource.status == market_book['status']
            assert resource.total_available == market_book['totalAvailable']
            assert resource.total_matched == market_book['totalMatched']
            assert resource.version == market_book['version']

            assert len(resource.runners) == len(market_book['runners'])

            for runner in market_book['runners']:
                pass
                # assert resource.runners[runner['selectionId']].selection_id == runner['selectionId']
                # assert resource.runners[runner['selectionId']].status == runner['status']
                # assert resource.runners[runner['selectionId']].total_matched == runner.get('totalMatched')
                # assert resource.runners[runner['selectionId']].adjustment_factor == runner.get('adjustmentFactor')
                # assert resource.runners[runner['selectionId']].handicap == runner['handicap']
                # assert resource.runners[runner['selectionId']].last_price_traded == runner.get('lastPriceTraded')
                #
                # if runner.get('removalDate'):
                #     assert resource.runners[runner['selectionId']].removal_date == datetime.datetime.strptime(
                #         runner['removalDate'], "%Y-%m-%dT%H:%M:%S.%fZ")
                # # else:
                # #     assert resource.runners[runner['selectionId']].sp.near_price == runner['sp']['nearPrice']
                # #     assert resource.runners[runner['selectionId']].sp.far_price == runner['sp']['farPrice']
                #     assert resource.runners[runner['selectionId']].sp.actual_sp == runner['sp']['actualSP']
                # assert resource.runners[runner['selectionId']].sp.back_stake_taken == runner['sp']['backStakeTaken']
                # assert resource.runners[runner['selectionId']].sp.lay_liability_taken == runner['sp']['layLiabilityTaken']
                #
                # assert resource.runners[runner['selectionId']].ex.available_to_back == runner['ex'].get('availableToBack')
                # assert resource.runners[runner['selectionId']].ex.available_to_lay == runner['ex'].get('availableToLay')
                # assert resource.runners[runner['selectionId']].ex.traded_volume == runner['ex'].get('tradedVolume')
                # # print(resource.runners[runner['selectionId']].orders)
                # # print(resource.runners[runner['selectionId']].matches)
                # # todo complete

    def test_current_orders(self):
        mock_response = create_mock_json('tests/resources/list_current_orders.json')
        current_orders = mock_response.json().get('result')
        resource = resources.CurrentOrders(date_time_sent=self.DATE_TIME_SENT,
                                           **current_orders)
        assert resource._datetime_sent == self.DATE_TIME_SENT
        assert len(resource.orders) == len(current_orders.get('currentOrders'))

        for current_order in current_orders.get('currentOrders'):
            assert resource.orders[0].bet_id == current_order['betId']
            # todo complete

    def test_cleared_orders(self):
        mock_response = create_mock_json('tests/resources/list_cleared_orders.json')
        cleared_orders = mock_response.json().get('result')
        resource = resources.ClearedOrders(date_time_sent=self.DATE_TIME_SENT,
                                           **cleared_orders)
        assert resource._datetime_sent == self.DATE_TIME_SENT
        assert len(resource.orders) == len(cleared_orders.get('clearedOrders'))

        for cleared_order in cleared_orders.get('clearedOrders'):
            assert resource.orders[0].bet_id == cleared_order['betId']
            # todo complete

    def test_market_profit_loss(self):
        mock_response = create_mock_json('tests/resources/list_market_profit_and_loss.json')
        market_profits = mock_response.json().get('result')

        for market_profit in market_profits:
            resource = resources.MarketProfitLoss(date_time_sent=self.DATE_TIME_SENT,
                                                  **market_profit)

            assert resource._datetime_sent == self.DATE_TIME_SENT
            assert resource.market_id == market_profit['marketId']
            assert resource.commission_applied == market_profit.get('commissionApplied')

            assert len(resource.profit_and_losses) == len(market_profit['profitAndLosses'])
            # todo complete

    def test_place_orders(self):
        mock_response = create_mock_json('tests/resources/place_orders.json')
        place_orders = mock_response.json().get('result')
        resource = resources.PlaceOrders(date_time_sent=self.DATE_TIME_SENT,
                                        **place_orders)
        assert resource._datetime_sent == self.DATE_TIME_SENT
        assert resource.market_id == place_orders['marketId']
        assert resource.status == place_orders['status']
        assert resource.customer_ref == place_orders.get('customerRef')
        assert resource.error_code == place_orders.get('errorCode')
        assert len(resource.place_instruction_reports) == len(place_orders.get('instructionReports'))

        for order in place_orders.get('instructionReports'):
            assert resource.place_instruction_reports[0].size_matched == order['sizeMatched']
            assert resource.place_instruction_reports[0].status == order['status']
            assert resource.place_instruction_reports[0].bet_id == order['betId']
            assert resource.place_instruction_reports[0].average_price_matched == order['averagePriceMatched']
            assert resource.place_instruction_reports[0].placed_date == datetime.datetime.strptime(
                        order['placedDate'], "%Y-%m-%dT%H:%M:%S.%fZ")
            assert resource.place_instruction_reports[0].error_code == order.get('errorCode')

            assert resource.place_instruction_reports[0].instruction.selection_id == order['instruction']['selectionId']
            assert resource.place_instruction_reports[0].instruction.side == order['instruction']['side']
            assert resource.place_instruction_reports[0].instruction.order_type == order['instruction']['orderType']
            assert resource.place_instruction_reports[0].instruction.handicap == order['instruction']['handicap']

            assert resource.place_instruction_reports[0].instruction.order.persistence_type == order['instruction']['limitOrder']['persistenceType']
            assert resource.place_instruction_reports[0].instruction.order.price == order['instruction']['limitOrder']['price']
            assert resource.place_instruction_reports[0].instruction.order.size == order['instruction']['limitOrder']['size']

    def test_cancel_orders(self):
        mock_response = create_mock_json('tests/resources/cancel_orders.json')
        cancel_orders = mock_response.json().get('result')
        resource = resources.CancelOrders(date_time_sent=self.DATE_TIME_SENT,
                                          **cancel_orders)
        assert resource._datetime_sent == self.DATE_TIME_SENT
        assert resource.market_id == cancel_orders['marketId']
        assert resource.status == cancel_orders['status']
        assert resource.customer_ref == cancel_orders.get('customerRef')
        assert resource.error_code == cancel_orders.get('errorCode')
        assert len(resource.cancel_instruction_reports) == len(cancel_orders.get('instructionReports'))

        for order in cancel_orders.get('instructionReports'):
            assert resource.cancel_instruction_reports[0].size_cancelled == order['sizeCancelled']
            assert resource.cancel_instruction_reports[0].status == order['status']
            assert resource.cancel_instruction_reports[0].cancelled_date == datetime.datetime.strptime(
                        order['cancelledDate'], "%Y-%m-%dT%H:%M:%S.%fZ")

            assert resource.cancel_instruction_reports[0].instruction.bet_id == order['instruction']['betId']
            assert resource.cancel_instruction_reports[0].instruction.size_reduction == order['instruction'].get('sizeReduction')

    def test_update_orders(self):
        mock_response = create_mock_json('tests/resources/update_orders.json')
        update_orders = mock_response.json().get('result')
        resource = resources.UpdateOrders(date_time_sent=self.DATE_TIME_SENT,
                                          **update_orders)
        assert resource._datetime_sent == self.DATE_TIME_SENT
        assert resource.market_id == update_orders['marketId']
        assert resource.status == update_orders['status']
        assert resource.customer_ref == update_orders.get('customerRef')
        assert resource.error_code == update_orders.get('errorCode')
        assert len(resource.update_instruction_reports) == len(update_orders.get('instructionReports'))

        for order in update_orders.get('instructionReports'):
            pass

    def test_replace_orders(self):
        mock_response = create_mock_json('tests/resources/replace_orders.json')
        replace_orders = mock_response.json().get('result')
        resource = resources.ReplaceOrders(date_time_sent=self.DATE_TIME_SENT,
                                          **replace_orders)
        assert resource._datetime_sent == self.DATE_TIME_SENT
        assert resource.market_id == replace_orders['marketId']
        assert resource.status == replace_orders['status']
        assert resource.customer_ref == replace_orders.get('customerRef')
        assert resource.error_code == replace_orders.get('errorCode')
示例#46
0
    def test_datetimes(self, path):

        # Test writing and reading datetimes. For issue #9139. (xref #9185)
        datetimes = [
            datetime(2013, 1, 13, 1, 2, 3),
            datetime(2013, 1, 13, 2, 45, 56),
            datetime(2013, 1, 13, 4, 29, 49),
            datetime(2013, 1, 13, 6, 13, 42),
            datetime(2013, 1, 13, 7, 57, 35),
            datetime(2013, 1, 13, 9, 41, 28),
            datetime(2013, 1, 13, 11, 25, 21),
            datetime(2013, 1, 13, 13, 9, 14),
            datetime(2013, 1, 13, 14, 53, 7),
            datetime(2013, 1, 13, 16, 37, 0),
            datetime(2013, 1, 13, 18, 20, 52),
        ]

        write_frame = DataFrame({"A": datetimes})
        write_frame.to_excel(path, "Sheet1")
        read_frame = pd.read_excel(path, "Sheet1", header=0)

        tm.assert_series_equal(write_frame["A"], read_frame["A"])
示例#47
0
yields['yieldAcum'] = (yields['yield']+1).cumprod()-1

fig, ax = plt.subplots(figsize=(14, 7))
ax.plot(yields.yieldAcum,  lw=1, c='tab:blue', label='Buy&Hold')
ax.plot(best.yieldAcum, lw=1, c='tab:green',
        label='Rolling Best5 previous yield week return')
ax.plot(worst.yieldAcum,  lw=1, c='tab:red',
        label='Rolling Worst5 previous yield week return')
plt.suptitle('Compare Buy&Hold vs Active Portfolio', y=0.95, fontsize=16)
plt.legend(fontsize=14)

columns = 3
rows = años//columns+1
fig2, ax2 = plt.subplots(figsize=(14, 4*rows), nrows=rows, ncols=columns)
for i in range(años+1):
    dtFrom = dt.datetime(end.year-años + i, 1, 1)
    dtTo = dt.datetime(end.year-años + i + 1, 1, 1)
    yieldsYr = (yields.loc[(yields.index > dtFrom)
                & (yields.index < dtTo)]).copy()
    bestYr = (best.loc[(best.index > dtFrom) & (best.index < dtTo)]).copy()
    worstYr = (worst.loc[(worst.index > dtFrom) & (worst.index < dtTo)]).copy()
    bestYr['yieldAcum'] = (bestYr['yield']+1).cumprod()-1
    worstYr['yieldAcum'] = (worstYr['yield']+1).cumprod()-1
    yieldsYr['yieldAcum'] = (yieldsYr['yield']+1).cumprod()-1
    row = i//columns
    col = i % columns
    ax2[row][col].plot(yieldsYr.yieldAcum,  lw=1, c='tab:blue')
    ax2[row][col].plot(bestYr.yieldAcum,  lw=1, c='tab:green')
    ax2[row][col].plot(worstYr.yieldAcum,  lw=1, c='tab:red')
    ax2[row][col].set_title(str(end.year-años + i),
                            y=0.83, fontsize=20, alpha=0.4)
示例#48
0
    def __init__(self):
        self.images = {}
        # NOTE(justinsb): The OpenStack API can't upload an image?
        # So, make sure we've got one..
        timestamp = datetime.datetime(2011, 1, 1, 1, 2, 3)

        image1 = {'id': '155d900f-4e14-4e4c-a73d-069cbf4541e6',
                  'name': 'fakeimage123456',
                  'created_at': timestamp,
                  'updated_at': timestamp,
                  'deleted_at': None,
                  'deleted': False,
                  'status': 'active',
                  'is_public': False,
                  'container_format': 'raw',
                  'disk_format': 'raw',
                  'properties': {'kernel_id': 'nokernel',
                                 'ramdisk_id': 'nokernel',
                                 'architecture': 'x86_64'}}

        image2 = {'id': 'a2459075-d96c-40d5-893e-577ff92e721c',
                  'name': 'fakeimage123456',
                  'created_at': timestamp,
                  'updated_at': timestamp,
                  'deleted_at': None,
                  'deleted': False,
                  'status': 'active',
                  'is_public': True,
                  'container_format': 'ami',
                  'disk_format': 'ami',
                  'properties': {'kernel_id': 'nokernel',
                                 'ramdisk_id': 'nokernel'}}

        image3 = {'id': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
                  'name': 'fakeimage123456',
                  'created_at': timestamp,
                  'updated_at': timestamp,
                  'deleted_at': None,
                  'deleted': False,
                  'status': 'active',
                  'is_public': True,
                  'container_format': None,
                  'disk_format': None,
                  'properties': {'kernel_id': 'nokernel',
                                 'ramdisk_id': 'nokernel'}}

        image4 = {'id': 'cedef40a-ed67-4d10-800e-17455edce175',
                  'name': 'fakeimage123456',
                  'created_at': timestamp,
                  'updated_at': timestamp,
                  'deleted_at': None,
                  'deleted': False,
                  'status': 'active',
                  'is_public': True,
                  'container_format': 'ami',
                  'disk_format': 'ami',
                  'properties': {'kernel_id': 'nokernel',
                                 'ramdisk_id': 'nokernel'}}

        image5 = {'id': 'c905cedb-7281-47e4-8a62-f26bc5fc4c77',
                  'name': 'fakeimage123456',
                  'created_at': timestamp,
                  'updated_at': timestamp,
                  'deleted_at': None,
                  'deleted': False,
                  'status': 'active',
                  'is_public': True,
                  'container_format': 'ami',
                  'disk_format': 'ami',
                  'properties': {
                      'kernel_id': '155d900f-4e14-4e4c-a73d-069cbf4541e6',
                      'ramdisk_id': None}}

        image6 = {'id': 'a440c04b-79fa-479c-bed1-0b816eaec379',
                  'name': 'fakeimage6',
                  'created_at': timestamp,
                  'updated_at': timestamp,
                  'deleted_at': None,
                  'deleted': False,
                  'status': 'active',
                  'is_public': False,
                  'container_format': 'ova',
                  'disk_format': 'vhd',
                  'properties': {'kernel_id': 'nokernel',
                                 'ramdisk_id': 'nokernel',
                                 'architecture': 'x86_64',
                                 'auto_disk_config': 'False'}}

        image7 = {'id': '70a599e0-31e7-49b7-b260-868f441e862b',
                  'name': 'fakeimage7',
                  'created_at': timestamp,
                  'updated_at': timestamp,
                  'deleted_at': None,
                  'deleted': False,
                  'status': 'active',
                  'is_public': False,
                  'container_format': 'ova',
                  'disk_format': 'vhd',
                  'properties': {'kernel_id': 'nokernel',
                                 'ramdisk_id': 'nokernel',
                                 'architecture': 'x86_64',
                                 'auto_disk_config': 'True'}}

        self.create(None, image1)
        self.create(None, image2)
        self.create(None, image3)
        self.create(None, image4)
        self.create(None, image5)
        self.create(None, image6)
        self.create(None, image7)
        self._imagedata = {}
        super(_FakeImageService, self).__init__()
示例#49
0
class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):

    def test_init(self):
        """Test MySqlToGoogleCloudStorageOperator instance is properly initialized."""
        op = MySQLToGCSOperator(
            task_id=TASK_ID, sql=SQL, bucket=BUCKET, filename=JSON_FILENAME,
            export_format='CSV', field_delimiter='|')
        self.assertEqual(op.task_id, TASK_ID)
        self.assertEqual(op.sql, SQL)
        self.assertEqual(op.bucket, BUCKET)
        self.assertEqual(op.filename, JSON_FILENAME)
        self.assertEqual(op.export_format, 'csv')
        self.assertEqual(op.field_delimiter, '|')

    @parameterized.expand([
        ("string", None, "string"),
        (datetime.date(1970, 1, 2), None, 86400),
        (datetime.datetime(1970, 1, 1, 1, 0), None, 3600),
        (decimal.Decimal(5), None, 5),
        (b"bytes", "BYTES", "Ynl0ZXM="),
        (None, "BYTES", None)
    ])
    def test_convert_type(self, value, schema_type, expected):
        op = MySQLToGCSOperator(
            task_id=TASK_ID,
            mysql_conn_id=MYSQL_CONN_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=JSON_FILENAME)
        self.assertEqual(
            op.convert_type(value, schema_type),
            expected)

    @mock.patch('airflow.operators.mysql_to_gcs.MySqlHook')
    @mock.patch('airflow.operators.sql_to_gcs.GCSHook')
    def test_exec_success_json(self, gcs_hook_mock_class, mysql_hook_mock_class):
        """Test successful run of execute function for JSON"""
        op = MySQLToGCSOperator(
            task_id=TASK_ID,
            mysql_conn_id=MYSQL_CONN_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=JSON_FILENAME)

        mysql_hook_mock = mysql_hook_mock_class.return_value
        mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual(JSON_FILENAME.format(0), obj)
            self.assertEqual('application/json', mime_type)
            self.assertFalse(gzip)
            with open(tmp_filename, 'rb') as file:
                self.assertEqual(b''.join(NDJSON_LINES), file.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op.execute(None)

        mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID)
        mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL)

    @mock.patch('airflow.operators.mysql_to_gcs.MySqlHook')
    @mock.patch('airflow.operators.sql_to_gcs.GCSHook')
    def test_exec_success_csv(self, gcs_hook_mock_class, mysql_hook_mock_class):
        """Test successful run of execute function for CSV"""
        op = MySQLToGCSOperator(
            task_id=TASK_ID,
            mysql_conn_id=MYSQL_CONN_ID,
            sql=SQL,
            export_format='CSV',
            bucket=BUCKET,
            filename=CSV_FILENAME)

        mysql_hook_mock = mysql_hook_mock_class.return_value
        mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual(CSV_FILENAME.format(0), obj)
            self.assertEqual('text/csv', mime_type)
            self.assertFalse(gzip)
            with open(tmp_filename, 'rb') as file:
                self.assertEqual(b''.join(CSV_LINES), file.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op.execute(None)

        mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID)
        mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL)

    @mock.patch('airflow.operators.mysql_to_gcs.MySqlHook')
    @mock.patch('airflow.operators.sql_to_gcs.GCSHook')
    def test_exec_success_csv_ensure_utc(self, gcs_hook_mock_class, mysql_hook_mock_class):
        """Test successful run of execute function for CSV"""
        op = MySQLToGCSOperator(
            task_id=TASK_ID,
            mysql_conn_id=MYSQL_CONN_ID,
            sql=SQL,
            export_format='CSV',
            bucket=BUCKET,
            filename=CSV_FILENAME,
            ensure_utc=True)

        mysql_hook_mock = mysql_hook_mock_class.return_value
        mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual(CSV_FILENAME.format(0), obj)
            self.assertEqual('text/csv', mime_type)
            self.assertFalse(gzip)
            with open(tmp_filename, 'rb') as file:
                self.assertEqual(b''.join(CSV_LINES), file.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op.execute(None)

        mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID)
        mysql_hook_mock.get_conn().cursor().execute.assert_has_calls([mock.call(TZ_QUERY), mock.call(SQL)])

    @mock.patch('airflow.operators.mysql_to_gcs.MySqlHook')
    @mock.patch('airflow.operators.sql_to_gcs.GCSHook')
    def test_exec_success_csv_with_delimiter(self, gcs_hook_mock_class, mysql_hook_mock_class):
        """Test successful run of execute function for CSV with a field delimiter"""
        op = MySQLToGCSOperator(
            task_id=TASK_ID,
            mysql_conn_id=MYSQL_CONN_ID,
            sql=SQL,
            export_format='csv',
            field_delimiter='|',
            bucket=BUCKET,
            filename=CSV_FILENAME)

        mysql_hook_mock = mysql_hook_mock_class.return_value
        mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual(CSV_FILENAME.format(0), obj)
            self.assertEqual('text/csv', mime_type)
            self.assertFalse(gzip)
            with open(tmp_filename, 'rb') as file:
                self.assertEqual(b''.join(CSV_LINES_PIPE_DELIMITED), file.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op.execute(None)

        mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID)
        mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL)

    @mock.patch('airflow.operators.mysql_to_gcs.MySqlHook')
    @mock.patch('airflow.operators.sql_to_gcs.GCSHook')
    def test_file_splitting(self, gcs_hook_mock_class, mysql_hook_mock_class):
        """Test that ndjson is split by approx_max_file_size_bytes param."""
        mysql_hook_mock = mysql_hook_mock_class.return_value
        mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value
        expected_upload = {
            JSON_FILENAME.format(0): b''.join(NDJSON_LINES[:2]),
            JSON_FILENAME.format(1): NDJSON_LINES[2],
        }

        def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False):
            self.assertEqual(BUCKET, bucket)
            self.assertEqual('application/json', mime_type)
            self.assertFalse(gzip)
            with open(tmp_filename, 'rb') as file:
                self.assertEqual(expected_upload[obj], file.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op = MySQLToGCSOperator(
            task_id=TASK_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=JSON_FILENAME,
            approx_max_file_size_bytes=len(expected_upload[JSON_FILENAME.format(0)]))
        op.execute(None)

    @mock.patch('airflow.operators.mysql_to_gcs.MySqlHook')
    @mock.patch('airflow.operators.sql_to_gcs.GCSHook')
    def test_schema_file(self, gcs_hook_mock_class, mysql_hook_mock_class):
        """Test writing schema files."""
        mysql_hook_mock = mysql_hook_mock_class.return_value
        mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS)
        mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION

        gcs_hook_mock = gcs_hook_mock_class.return_value

        def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip):  # pylint: disable=unused-argument
            if obj == SCHEMA_FILENAME:
                self.assertFalse(gzip)
                with open(tmp_filename, 'rb') as file:
                    self.assertEqual(b''.join(SCHEMA_JSON), file.read())

        gcs_hook_mock.upload.side_effect = _assert_upload

        op = MySQLToGCSOperator(
            task_id=TASK_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=JSON_FILENAME,
            schema_filename=SCHEMA_FILENAME)
        op.execute(None)

        # once for the file and once for the schema
        self.assertEqual(2, gcs_hook_mock.upload.call_count)

    @mock.patch('airflow.operators.mysql_to_gcs.MySqlHook')
    @mock.patch('airflow.operators.sql_to_gcs.GCSHook')
    def test_query_with_error(self, mock_gcs_hook, mock_mysql_hook):
        mock_mysql_hook.return_value.get_conn.\
            return_value.cursor.return_value.execute.side_effect = ProgrammingError
        op = MySQLToGCSOperator(
            task_id=TASK_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=JSON_FILENAME,
            schema_filename=SCHEMA_FILENAME)
        with self.assertRaises(ProgrammingError):
            op.query()

    @mock.patch('airflow.operators.mysql_to_gcs.MySqlHook')
    @mock.patch('airflow.operators.sql_to_gcs.GCSHook')
    def test_execute_with_query_error(self, mock_gcs_hook, mock_mysql_hook):
        mock_mysql_hook.return_value.get_conn.\
            return_value.cursor.return_value.execute.side_effect = ProgrammingError
        op = MySQLToGCSOperator(
            task_id=TASK_ID,
            sql=SQL,
            bucket=BUCKET,
            filename=JSON_FILENAME,
            schema_filename=SCHEMA_FILENAME)
        with self.assertRaises(ProgrammingError):
            op.execute(None)
# ticks since 1970 
print(time.time())

# local time 
print(time.localtime(time.time()))

#format time 
print(time.asctime(time.localtime(time.time())))

for i in range(0,5):
    print(i)
    time.sleep(0.1)


print(datetime.datetime.now())
print(datetime.datetime(2020,12,31,11,59,59))

#comparison two dates 
d1=datetime.datetime(2020,12,31,11,59,59)
d2=datetime.datetime(2020,1,2,11,59,59)
#print(type(d1-d2))
#print(str(d1-d2))
new_delta_value=str(d1-d2)
l=new_delta_value.split()
print(l[0])


twentytwenty=calendar.prcal(2020)
print(twentytwenty)

示例#51
0
def solution(a,b) :
    dayOfDate = ["MON","TUE", "WED", "THU", "FRI", "SAT", "SUN"]

    dateObj = datetime.datetime(2016,a,b)
    
    return dayOfDate[dateObj.weekday()]
示例#52
0
 def vrt_loader(self, tile, date, xmin, ymin, xmax, ymax):
     """
     this loads a
     """
     vrt_dir = '/home/users/jbrennan01/mod09_vrts/'
     dire = vrt_dir + tile + '/'
     # get the right band
     # load it
     yr = date.year
     xsize = xmax - xmin
     ysize = ymax - ymin
     data = {}
     files = [
         "brdf_%s_%s_b01.vrt" % (yr, tile),
         "brdf_%s_%s_b02.vrt" % (yr, tile),
         "brdf_%s_%s_b03.vrt" % (yr, tile),
         "brdf_%s_%s_b04.vrt" % (yr, tile),
         "brdf_%s_%s_b05.vrt" % (yr, tile),
         "brdf_%s_%s_b06.vrt" % (yr, tile),
         "brdf_%s_%s_b07.vrt" % (yr, tile),
         "statekm_%s_%s.vrt" % (yr, tile),
         "SensorAzimuth_%s_%s.vrt" % (yr, tile),
         "SensorZenith_%s_%s.vrt" % (yr, tile),
         "SolarAzimuth_%s_%s.vrt" % (yr, tile),
         "SolarZenith_%s_%s.vrt" % (yr, tile),
     ]
     dNames = [
         'brdf1',
         'brdf2',
         'brdf3',
         'brdf4',
         'brdf5',
         'brdf6',
         'brdf7',
         'qa',
         'vaa',
         'vza',
         'saa',
         'sza',
     ]
     qainfo = gdal.Open(dire + "statekm_%s_%s.vrt" % (yr, tile))
     doy = np.array([
         int(qainfo.GetRasterBand(b + 1).GetMetadataItem("DoY"))
         for b in xrange(qainfo.RasterCount)
     ])
     year_doy = np.array([
         int(qainfo.GetRasterBand(b + 1).GetMetadataItem("Year"))
         for b in xrange(qainfo.RasterCount)
     ])
     dates = np.array([
         datetime.datetime(year, 1, 1) + datetime.timedelta(days - 1)
         for year, days in zip(year_doy, doy)
     ])
     sens = np.array([
         qainfo.GetRasterBand(b + 1).GetMetadataItem("Platform")
         for b in xrange(qainfo.RasterCount)
     ])
     # select correct date
     #import pdb; pdb.set_trace()
     idx = np.where(dates == date)[0] + 1  # add 1 for GDAL
     # load these bands
     for nm, p in zip(dNames, files):
         datastack = []
         for band in idx:
             pp = gdal.Open(dire + p)
             data_p = pp.GetRasterBand(band)
             data_ = data_p.ReadAsArray(xoff=xmin,
                                        yoff=ymin,
                                        win_xsize=xsize,
                                        win_ysize=ysize)
             datastack.append(data_)
             #print nm, p, band
         data[nm] = np.array(datastack)
     data['dates'] = dates[idx - 1]
     data['sensor'] = sens[idx - 1]
     return data
示例#53
0
 def test_date(self, sup):
     args = {'--date': '2018-10-10'}
     cli.execute_args(args)
     sup.create_file.assert_called_with(datetime(2018, 10, 10))
    dts, vals = [], []
    while True:
        line = f.readline()
        if line == '':
            break
        if dstart_reg.search(line) != None:
            while True:
                line = f.readline()
                if line == '':
                    break
                raw = line.strip().split()
                yr = int(raw[1])
                mn = int(raw[2])
                station = (raw[0])
                dy = calendar.monthrange(yr, mn)[1]
                dt = datetime(year=yr, month=mn, day=dy)
                val = float(line[d_start:d_end].replace('[',
                                                        '').replace(']', ''))
                dts.append(dt)
                vals.append(val)

    f.close()
    site_name = station + '(' + dfile.split('.')[0] + ')'
    line_styles[site_name] = '--'
    line_weight[site_name] = 0.1
    line_color[site_name] = line_color_list[i]
    df = pandas.DataFrame({site_name: vals}, index=dts)
    df = df.dropna()
    dfs.append(df)

df = pandas.concat(dfs, axis=1)
示例#55
0
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from datetime import datetime, timedelta

default_args = {
    'owner': 'airflow',
    'depends_on_past': False,
    'start_date': datetime(2015, 6, 1),
    'email': ['*****@*****.**'],
    'email_on_failure': False,
    'email_on_retry': False,
    'retries': 1,
    'retry_delay': timedelta(minutes=5),
    # 'queue': 'bash_queue',
    # 'pool': 'backfill',
    # 'priority_weight': 10,
    # 'end_date': datetime(2016, 1, 1),
}

dag = DAG('tutorial2',
          default_args=default_args,
          schedule_interval=timedelta(days=1))

# t1, t2 and t3 are examples of tasks created by instantiating operators
t1 = BashOperator(task_id='print_date', bash_command='date', dag=dag)

t2 = BashOperator(task_id='sleep', bash_command='sleep 5', retries=3, dag=dag)

templated_command = """
    {% for i in range(5) %}
        echo "{{ ds }}"
示例#56
0
 def test_last(self, sup):
     args = {'--last': True}
     prev_date = datetime(2018, 10, 10)
     sup.find_last_date.return_value = prev_date
     cli.execute_args(args)
     sup.open_file.assert_called_with(prev_date)
newCol = []
for row in df.itertuples():
    # if(row.Index.date() == date(2018,1,1)):
    #     df2[row.Index.date(),'F'] = 0
    # else:
    #     df2[row.Index.date(),'F'] = 0
    print("A:", row.A, "B :", row.B, "C :", row.C, "D :", row.D)
    print(row.Index.date().type)
    print(date(2018, 1, 1))
    if (row.Index.date() == date(2018, 1, 1)): print("YES")
pd.to_datetime('20180101')
df2.loc[pd.to_datetime('20180101')]
df2.loc['20180101']
df2.index[0]
pd.to_datetime('20180101')
datetime(2018, 1, 1)
df2
# selection by label
df.loc['20180101']
df.loc['20180101', 'A']
# selection by position
df.iloc[3]
df.iloc[3, 3]
# appending new column
df2 = df.copy()
df2['E'] = ['one', 'two', 'three', 'four']
df
df2.drop('E')
df2
df.apply(lambda x: x.max() - x.min())
df2['F'] = df2.apply(lambda x: x.max() - x.min())
示例#58
0
 def test_print_by_date(self, sup):
     args = {'--print': True, '--date': '2018-10-10'}
     cli.execute_args(args)
     sup.print_sup.assert_called_with(datetime(2018, 10, 10))
示例#59
0
 def test_leaving_off_a_field_with_default_set_the_default_will_be_saved(self):
     a = Article(pub_date=datetime(2005, 7, 31))
     a.save()
     self.assertEqual(a.headline, 'Default headline')
示例#60
0
class Migration(migrations.Migration):

    dependencies = [
        ('OsasSystem', '0005_auto_20210512_1311'),
    ]

    operations = [
        migrations.AlterField(
            model_name='classroom',
            name='room_datecreated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='classroom',
            name='room_dateupdated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='org_accreditation',
            name='acc_datecreated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='org_accreditation',
            name='acc_dateupdated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='org_concept_paper',
            name='con_datecreated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='org_concept_paper',
            name='con_dateupdated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='organization',
            name='org_datecreated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='organization',
            name='org_dateupdated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='organization_chat',
            name='msg_date',
            field=models.DateTimeField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_notif',
            name='notif_datecreated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_r_auth_user',
            name='date_updated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_r_code_title',
            name='ct_datecreated',
            field=models.DateTimeField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_r_course',
            name='course_add_date',
            field=models.DateTimeField(default=datetime.datetime(2021,
                                                                 5,
                                                                 14,
                                                                 2,
                                                                 39,
                                                                 23,
                                                                 495939,
                                                                 tzinfo=utc),
                                       max_length=50),
        ),
        migrations.AlterField(
            model_name='osas_r_course',
            name='course_edit_date',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_r_designation_office',
            name='designation_datecreated',
            field=models.DateTimeField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_r_disciplinary_sanction',
            name='ds_datecreated',
            field=models.DateField(default=datetime.datetime(2021,
                                                             5,
                                                             14,
                                                             2,
                                                             39,
                                                             23,
                                                             495939,
                                                             tzinfo=utc),
                                   null=True),
        ),
        migrations.AlterField(
            model_name='osas_r_personal_info',
            name='date_updated',
            field=models.DateTimeField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_r_personal_info',
            name='stud_birthdate',
            field=models.DateField(default=datetime.datetime(2021,
                                                             5,
                                                             14,
                                                             2,
                                                             39,
                                                             23,
                                                             495939,
                                                             tzinfo=utc),
                                   max_length=12),
        ),
        migrations.AlterField(
            model_name='osas_r_section_and_year',
            name='yas_dateregistered',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_r_userrole',
            name='date_updated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_t_complaint',
            name='comp_datecreated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_t_excuse',
            name='excuse_datecreated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_t_excuse',
            name='excuse_dateupdated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
        migrations.AlterField(
            model_name='osas_t_sanction',
            name='sanction_dateupdated',
            field=models.DateField(default=datetime.datetime(
                2021, 5, 14, 2, 39, 23, 495939, tzinfo=utc)),
        ),
    ]