def testClientSearch(self): self.clean_domain([Commission, SaleItem, Sale, Client]) client = self.create_client(u'Richard Stallman') client.person.individual.birth_date = datetime.date(1989, 3, 4) client = self.create_client(u'Junio C. Hamano') client.person.individual.birth_date = datetime.date(1972, 10, 15) search = ClientSearch(self.store) search.search.refresh() self.check_search(search, 'client-no-filter') search.set_searchbar_search_string(u'ham') search.search.refresh() self.check_search(search, 'client-string-filter') column = search.search.get_column_by_attribute('birth_date') search_title = column.get_search_label() + ':' search.search.search.add_filter_by_column(column) birthday_filter = search.search.search.get_search_filter_by_label( search_title) search.set_searchbar_search_string('') birthday_filter.select(data=DateSearchFilter.Type.USER_DAY) birthday_filter.start_date.update(datetime.date(1987, 3, 4)) search.search.refresh() self.check_search(search, 'client-birthday-date-filter') birthday_filter.select(data=DateSearchFilter.Type.USER_INTERVAL) birthday_filter.start_date.update(datetime.date(1987, 10, 1)) birthday_filter.end_date.update(datetime.date(1987, 10, 31)) search.search.refresh() self.check_search(search, 'client-birthday-interval-filter')
def main(): #set up loop start_date = datetime.date(2003,01,01) end_date = datetime.date(2008,12,31) d = start_date delta = datetime.timedelta(days=1) while d <= end_date: #set up url url = 'http://www.cloud-net.org/quicklooks/data/chilbolton/products/classification/' \ + str(d.strftime("%Y")) \ + '/' + str(d.strftime("%Y%m%d")) \ + '_chilbolton_classification.png' #check if exists code = urllib.urlopen(url).code if (code / 100 >= 4): print "No data for ", str(d.strftime("%Y%m%d")), ' continuing...' d += delta continue #check size urllib.urlretrieve(url, str(d.strftime("%Y%m%d")) + '.png') #increment date print 'Succesfully retrieved ', str(d.strftime("%Y%m%d")), ' moving on...' d += delta
def processDataToMapOfDayOfMonthToShares(self, startMonth, startYear, amountPerInvest, dateToPrice): startdate = date(year=startYear,day=1,month=startMonth); today = date.today(); endDate = date(year=today.year, day=1,month=today.month); totalDayDiff = endDate - startdate; # map of day of a month to shares if bought on that day dayOfMonthToShares = {}; for d in range(1, 32): dayOfMonthToShares[d] = 0.0; # go thru each day and compute number of shares that would have been # bought that day with amountPerInvest. If month has < 31 days, use the last # day of the month's data to fill in up to 31 ONE_DAY = timedelta(days=1); currDate = startdate; for dayOffset in range(totalDayDiff.days): sharePrice = dateToPrice[currDate]; shares = amountPerInvest / float(sharePrice); currDay = currDate.day; dayOfMonthToShares[currDay] = dayOfMonthToShares[currDay] + shares; currDate = currDate + ONE_DAY; # if start of the next day, then fill in for missing days ie # up to 31 nextDay = currDate.day; if (nextDay == 1): for day in range(currDay + 1, 32): dayOfMonthToShares[day] = dayOfMonthToShares[day] + shares; return dayOfMonthToShares;
def test_save(self): # If an object is created signed, signature date must be set signature_value = [{"x": [1, 2], "y": [3, 4]}] i = JSignatureTestModel(signature=signature_value) i.save() i = JSignatureTestModel.objects.get(pk=i.pk) self.assertEqual(date.today(), i.signature_date.date()) # If signature doesn't change, signature date must not be updated i = JSignatureTestModel(signature=signature_value) i.save() i.signature_date = date(2013, 1, 1) i.signature = signature_value i.save() i = JSignatureTestModel.objects.get(pk=i.pk) self.assertEqual(date(2013, 1, 1), i.signature_date.date()) # If signature changes, signature date must be updated too new_signature_value = [{"x": [5, 6], "y": [7, 8]}] i = JSignatureTestModel(signature=signature_value, signature_date=date(2013, 1, 1)) i.save() i.signature_date = date(2013, 1, 1) i.signature = new_signature_value i.save() i = JSignatureTestModel.objects.get(pk=i.pk) self.assertEqual(date.today(), i.signature_date.date()) # If sinature is set to None, it must be the same for signature_date i = JSignatureTestModel(signature=signature_value) i.save() i.signature = None i.save() i = JSignatureTestModel.objects.get(pk=i.pk) self.assertIsNone(i.signature_date)
def HolidayDateByYear(year): holidays = {str(year) + "-01-01": "New Year's Day", str(year) + "-12-24": "Christmas Eve", str(year) + "-12-25": "Christmas Day", str(year) + "-12-31": "New Year's Eve", str(year) + "-07-04": "Independence Day",} for k in holiday_formulas: temp_date = None if holiday_formulas[k][0] < 0: if holiday_formulas[k][2] > 11: days = 31 else: days = ((date(int(year), holiday_formulas[k][2] + 1, 1) - date(int(year), holiday_formulas[k][2], 1))).days temp_date = date(int(year), holiday_formulas[k][2], days) delta = -timedelta(days=1) else: start_day = (7 * holiday_formulas[k][0]) + 1 if start_day == 0: start_day = 1 temp_date = date(int(year), holiday_formulas[k][2], start_day) delta = timedelta(days=1) while not temp_date.strftime("%A") == holiday_formulas[k][1]: temp_date = temp_date + delta if not temp_date.month == holiday_formulas[k][2]: continue if len(holiday_formulas[k]) > 3: temp_date = temp_date + timedelta(days=holiday_formulas[k][3]) holidays[str(year) + "-" + str(holiday_formulas[k][2]).zfill(2) + "-" + str(temp_date.day).zfill(2)] = k return holidays
def test_opinion_manager_between(filter): """Ensure date filters are applied by ``between`` manager.""" Opinion.objects.between(date_start=date(2011, 1, 1)) filter.assert_called_with(created__gte=date(2011, 1, 1)) Opinion.objects.between(date_start=date(2011, 1, 1)) filter.assert_called_with(created__gte=date(2011, 1, 1))
def addItem(self , item): dateItem = item[0] try: year = dateItem.year month = dateItem.month day = dateItem.day date = datetime.date( year , month , day ) except AttributeError: try: tmp = re.split("[ .-/]" , dateItem) day = int(tmp[0]) month = int(tmp[1]) year = int(tmp[2]) date = datetime.date( year , month , day) except Exception: raise ValueError("First element was invalid date item") if len(self.dateList): prevItem = self.dateList[-1] if prevItem[0] >= date: raise ValueError("The dates must be in a strictly increasing order") value = item[1] if isinstance(value , numbers.Real) or callable(value): self.dateList.append( (date , value) ) else: raise ValueError("The value argument must be a scalar number or callable")
def test_sqlparams_date(self): query = self.db.query('video.dvd', columns='$purchasedate', where='$purchasedate BETWEEN :d1 AND :d2', sqlparams={'d1': datetime.date(2005, 4, 1), 'd2': datetime.date(2005, 4, 30)}) result = query.selection().output('list') assert result[0][0] == datetime.date(2005, 4, 7)
def test_create_entries_from_data_and_source(self): """Take the data and source, and create the sitting and entries from it""" source = self._create_source_and_load_test_json_to_entries() # check source now marked as processed source = Source.objects.get(id=source.id) # reload from db self.assertEqual( source.last_processing_success.date(), datetime.date.today() ) # check sitting created sitting_qs = Sitting.objects.filter(source=source) self.assertEqual( sitting_qs.count(), 1 ) sitting = sitting_qs[0] # check start and end date and times correct self.assertEqual( sitting.start_date, datetime.date( 2011, 9, 1 ) ) self.assertEqual( sitting.start_time, datetime.time( 14, 30, 00 ) ) self.assertEqual( sitting.end_date, datetime.date( 2011, 9, 1 ) ) self.assertEqual( sitting.end_time, datetime.time( 18, 30, 00 ) ) # check correct venue set self.assertEqual( sitting.venue.slug, 'national_assembly' ) # check entries created and that we have the right number entries = sitting.entry_set self.assertEqual( entries.count(), 64 )
def test_builtin_override(): # create a few date objects d1 = date(1979, 10, 12) d2 = date(2000, 1, 1) d3 = date(2012, 1, 1) # jsonify using the built in rules result1 = encode(dict(date=d1)) assert '"1979-10-12"' in result1 result2 = encode(dict(date=d2)) assert '"2000-01-01"' in result2 result3 = encode(dict(date=d3)) assert '"2012-01-01"' in result3 def jsonify_date(obj): if obj.year == 1979 and obj.month == 10 and obj.day == 12: return "Jon's Birthday!" elif obj.year == 2000 and obj.month == 1 and obj.day == 1: return "Its Y2K! Panic!" return "%d/%d/%d" % (obj.month, obj.day, obj.year) custom_date_encoder = JSONEncoder(custom_encoders={date: jsonify_date}) # jsonify using the built in rules result1 = encode(dict(date=d1), encoder=custom_date_encoder) assert '"Jon\'s Birthday!"' in result1 result2 = encode(dict(date=d2), encoder=custom_date_encoder) assert '"Its Y2K! Panic!"' in result2 result3 = encode(dict(date=d3), encoder=custom_date_encoder) assert '"1/1/2012"' in result3
def test_clone(self): self.bsq.add_filter(SQ(foo="bar")) self.bsq.add_filter(SQ(foo__lt="10")) self.bsq.add_filter(~SQ(claris="moof")) self.bsq.add_filter(SQ(claris="moof"), use_or=True) self.bsq.add_order_by("foo") self.bsq.add_model(MockModel) self.bsq.add_boost("foo", 2) self.bsq.add_highlight() self.bsq.add_field_facet("foo") self.bsq.add_date_facet( "foo", start_date=datetime.date(2009, 1, 1), end_date=datetime.date(2009, 1, 31), gap_by="day" ) self.bsq.add_query_facet("foo", "bar") self.bsq.add_stats_query("foo", "bar") self.bsq.add_narrow_query("foo:bar") clone = self.bsq._clone() self.assertTrue(isinstance(clone, BaseSearchQuery)) self.assertEqual(len(clone.query_filter), 2) self.assertEqual(len(clone.order_by), 1) self.assertEqual(len(clone.models), 1) self.assertEqual(len(clone.boost), 1) self.assertEqual(clone.highlight, True) self.assertEqual(len(clone.facets), 1) self.assertEqual(len(clone.date_facets), 1) self.assertEqual(len(clone.query_facets), 1) self.assertEqual(len(clone.narrow_queries), 1) self.assertEqual(clone.start_offset, self.bsq.start_offset) self.assertEqual(clone.end_offset, self.bsq.end_offset) self.assertEqual(clone.backend.__class__, self.bsq.backend.__class__)
def test_calculate_partial_dues18(self): """ A test to check if partial dues are calculated the right way. "Partial dues" means you have to pay for half a year only, for example. """ from c3smembership.presentation.views.dues_2018 import ( calculate_partial_dues18) member = C3sMember.get_by_id(1) res = calculate_partial_dues18(member) assert res == (u'q1_2018', D('50')) # english member member_en = C3sMember.get_by_id(2) res = calculate_partial_dues18(member_en) assert res == (u'q1_2018', D('50')) member_en.membership_date = date(2018, 6, 1) res = calculate_partial_dues18(member_en) assert res == (u'q2_2018', D('37.50')) member_en.membership_date = date(2018, 9, 1) res = calculate_partial_dues18(member_en) assert res == (u'q3_2018', D('25')) member_en.membership_date = date(2018, 11, 1) res = calculate_partial_dues18(member_en) assert res == (u'q4_2018', D('12.50'))
def getAge(self, on_date=None): ''' return the age in form (year(int), months(int), isToday(bool)) ''' if on_date is None: # use today on_date = localsettings.currentDay() try: nextbirthday = datetime.date(on_date.year, self.dob.month, self.dob.day) except ValueError: # catch leap years!! nextbirthday = datetime.date(on_date.year, self.dob.month, self.dob.day - 1) ageYears = on_date.year - self.dob.year if nextbirthday > on_date: ageYears -= 1 months = (12 - self.dob.month) + on_date.month else: months = on_date.month - self.dob.month if self.dob.day > on_date.day: months -= 1 isToday = nextbirthday == localsettings.currentDay() return (ageYears, months, isToday)
def test_getting_jsondata(self): from invenio_deposit import fields from invenio_deposit.form import WebDepositForm from datetime import date class RelatedDatesForm(WebDepositForm): date = fields.Date() class TestForm(WebDepositForm): dates = fields.DynamicFieldList( fields.FormField(RelatedDatesForm) ) object_data = {'dates': [ {'date': date(2002, 1, 1)}, {'date': date(2013, 1, 1)}, ]} json_data = {'dates': [ {'date': '2002-01-01'}, {'date': '2013-01-01'}, ]} form = TestForm( formdata=self.multidict({ 'dates-0-date': '2002-01-01', 'dates-1-date': '2013-01-01', }) ) self.assertEqual(form.data, object_data) self.assertEqual(form.json_data, json_data)
def create_test_cases(self): def person_dict(person_id): return { "href": "/api/people/%d" % person_id, "id": person_id, "type": "Person" } self.one_time_workflow_1 = { "title": "one time test workflow", "notify_on_change": True, "description": "some test workflow", "owners": [person_dict(self.user.id)], "task_groups": [{ "title": "one time task group", "contact": person_dict(self.user.id), "task_group_tasks": [{ "title": "task 1", "description": "some task", "contact": person_dict(self.user.id), "start_date": date(2015, 5, 1), # friday "end_date": date(2015, 5, 5), }, { "title": "task 2", "description": "some task 2", "contact": person_dict(self.user.id), "start_date": date(2015, 5, 1), # friday "end_date": date(2015, 5, 5), }], "task_group_objects": self.random_objects }] }
def test_unique_for_date(self): Post.objects.create(title="Django 1.0 is released", slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3)) p = Post(title="Django 1.0 is released", posted=datetime.date(2008, 9, 3)) with self.assertRaises(ValidationError) as cm: p.full_clean() self.assertEqual(cm.exception.message_dict, {'title': ['Title must be unique for Posted date.']}) # Should work without errors p = Post(title="Work on Django 1.1 begins", posted=datetime.date(2008, 9, 3)) p.full_clean() # Should work without errors p = Post(title="Django 1.0 is released", posted=datetime.datetime(2008, 9, 4)) p.full_clean() p = Post(slug="Django 1.0", posted=datetime.datetime(2008, 1, 1)) with self.assertRaises(ValidationError) as cm: p.full_clean() self.assertEqual(cm.exception.message_dict, {'slug': ['Slug must be unique for Posted year.']}) p = Post(subtitle="Finally", posted=datetime.datetime(2008, 9, 30)) with self.assertRaises(ValidationError) as cm: p.full_clean() self.assertEqual(cm.exception.message_dict, {'subtitle': ['Subtitle must be unique for Posted month.']}) p = Post(title="Django 1.0 is released") with self.assertRaises(ValidationError) as cm: p.full_clean() self.assertEqual(cm.exception.message_dict, {'posted': ['This field cannot be null.']})
def testFromInstance(self): formdata = FormData.from_instance(self.instance) self.assertEqual(date(2010, 07, 22), formdata.time_start.date()) self.assertEqual(date(2010, 07, 23), formdata.time_end.date()) self.assertEqual("THIS_IS_THE_INSTANCEID", formdata.instance_id) self.assertEqual("THIS_IS_THE_DEVICEID", formdata.device_id) self.assertEqual("THIS_IS_THE_USERID", formdata.user_id)
def download(years,datasource,dest,skip_existing,datatype,filefmt): """ Create date range and downloads file for each date. Args: years (iterable): years for which to download data datasource (str): source ('merra' or 'merra2') dest (str): path to destination directory skip_existing (bool): skip download if target exists datatype (str): choose 'wind' or 'solar' data for presets filefmt (str): file format to download """ try: options = PRESETS[datasource] except KeyError as e: raise ArgumentError("Unknown datasource '{}'".format(datasource)) if datatype not in options['datatypes']: raise ArgumentError("Unknown datatype '{}' for source '{}'".format(datatype,datasource)) if filefmt not in options['fileformats']: raise ArgumentError("Unknown file format '{}' for source '{}'".format(filefmt,datasource)) dates = chain(*[utils.daterange(start_date=datetime.date(year,1,1), end_date=datetime.date(year+1,1,1)) for year in years]) pool = utils.ThreadPool(4) def dl_task(date): download_date(date,dest,skip_existing,settings=options, datatype=datatype,filefmt=filefmt) for date in dates: pool.add_task(dl_task,date) pool.wait_completion()
def processOrders(reader, customers, orders): statuses = ["Completed", "Shipped", "Partially Shipped"] for row in reader: if not row["Customer Name"]: continue else: while row["Order Status"] in statuses: currentCustomer = Customer(row["Customer Name"], row["Customer Phone"]) customers.append(currentCustomer) while row["Customer Name"] == currentCustomer.name: if row["Order Status"] in statuses: currentDateList = row["Order Date"].split(sep="/") currentDateList = list(map(lambda s: int(s), currentDateList)) currentDateObj = date(currentDateList[2], currentDateList[0], currentDateList[1]) currentOrder = Order(currentCustomer, currentDateObj, row["Order Total (inc tax)"]) while ( currentOrder.date == currentDateObj and currentOrder.total == row["Order Total (inc tax)"] ): currentOrder.updateItems(row["Product Name"], row["Product Qty"]) currentCustomer.updateItems(row["Product Name"], row["Product Qty"]) try: row = next(reader) currentDateList = row["Order Date"].split(sep="/") currentDateList = list(map(lambda s: int(s), currentDateList)) currentDateObj = date(currentDateList[2], currentDateList[0], currentDateList[1]) continue except StopIteration: orders.append(currentOrder) return orders.append(currentOrder) else: try: row = next(reader) except StopIteration: return
def archive_month(year, month): calendar_start_month = datetime.date(CALENDAR_START.year, CALENDAR_START.month, 1) first_month_redir = redirect(url_for('archive_month', year=CALENDAR_START.year, month=CALENDAR_START.month)) if year == 0 or month == 0: return first_month_redir date = datetime.date(year, month, 1) if date < calendar_start_month: return first_month_redir today = datetime.datetime.utcnow().date() if date > today: return redirect(url_for('archive_month', year=today.year, month=today.month)) cal = calendar.Calendar() days = list(cal.itermonthdays2(date.year, date.month)) prev = date - relativedelta(months=1) next = date + relativedelta(months=1) prev_disabled = prev < calendar_start_month next_disabled = next > today return render_template('pages/archive_month.html', date=date, days=days, prev=prev, prev_disabled=prev_disabled, next=next, next_disabled=next_disabled, today=today)
def test_histogram(self): session = self.session today = util.utcnow().date() one_day = today - timedelta(days=1) two_days = today - timedelta(days=2) one_month = today - timedelta(days=35) two_months = today - timedelta(days=70) long_ago = today - timedelta(days=100) stats = [ Stat(key=StatKey.cell, time=long_ago, value=40), Stat(key=StatKey.cell, time=two_months, value=50), Stat(key=StatKey.cell, time=one_month, value=60), Stat(key=StatKey.cell, time=two_days, value=70), Stat(key=StatKey.cell, time=one_day, value=80), Stat(key=StatKey.cell, time=today, value=90), ] session.add_all(stats) session.commit() result = histogram(session, StatKey.cell, days=90) self.assertTrue( [unixtime(one_day), 80] in result[0]) if two_months.month == 12: expected = date(two_months.year + 1, 1, 1) else: expected = date(two_months.year, two_months.month + 1, 1) self.assertTrue( [unixtime(expected), 50] in result[0])
def setUp(self): super(ObjectiveResourceTest, self).setUp() self.username = '******' self.password = '******' self.user = User.objects.create_user(self.username, '*****@*****.**', self.password) objective = Objective( name="Increase Revenue", description="Need more money", due_date=datetime.date(2014, 6, 1), created=datetime.datetime.now(), private=True, progress=0) objective.save() objective = Objective( name="Increase Revenue 2", description="Need more money", due_date=datetime.date(2014, 6, 1), created=datetime.datetime.now(), private=False, progress=0) objective.save()
def test_date_formats(self): dates = ( "2006-10-25", "2006/10/25", "25-10-2006", "25/10/2006", "Oct 25 2006", "Oct 25, 2006", "25 Oct 2006", "25 Oct, 2006", "October 25 2006", "October 25, 2006", "25 October 2006", "25 October, 2006" ) project = amcattest.create_test_project() for date in dates: p, c, form = self.get_form(start_date=date, project=project) form.full_clean() self.assertEqual(datetime.date(2006, 10, 25), form.cleaned_data["start_date"].date()) for date in dates: p, c, form = self.get_form(on_date=date, project=project, datetype="on") form.full_clean() self.assertEqual(datetime.date(2006, 10, 25), form.cleaned_data["start_date"].date()) self.assertEqual(datetime.date(2006, 10, 25), form.cleaned_data["end_date"].date()) for date in dates: p, c, form = self.get_form(end_date=date, project=project) form.full_clean() self.assertEqual(datetime.date(2006, 10, 25), form.cleaned_data["end_date"].date())
def toDateTime(sVal, iDefault=None): """ Suponer formato Iso OrderingDate """ if sVal is None: return iDefault try: if sVal.count("T") > 0: # IsoFormat DateTime (date, time) = sVal.split("T") (an, mois, jour) = date.split('-') (h, m, s) = time.split(':') return datetime.datetime(int(an), int(mois), int(jour), int(h), int(m), int(s)) elif sVal.count("-") == 2: # IsoFormat Date (an, mois, jour) = sVal.split('-') return datetime.date(int(an), int(mois), int(jour)) elif sVal.count("/") == 2: if sVal.count(' ') > 0: (date, time) = sVal.split(" ") (jour, mois, an) = date.split('/') (h, m, s) = time.split(':') return datetime.datetime(int(an), int(mois), int(jour), int(h), int(m), int(s)) else: (jour, mois, an) = date.split('/') return datetime.date(int(an), int(mois), int(jour)) except: return iDefault
def main(): from_date = datetime.date(*map(int, sys.argv[1].split('-'))) to_date = datetime.date(*map(int, sys.argv[2].split('-'))) assert from_date <= to_date quote = decode_quote(sys.argv[3]) #print quote session = Session() lit_years = {} date = from_date while date <= to_date: found = False session.rollback() lit_date = get_lit_date(date, lit_years, session) masses = [] try: masses = lit_date.get_masses(strict=True) except SelectingMassException: pass for mass in masses: if found: break for reading in mass.readings: try: verses = decode_quote(reading.quote) except BadQuoteException: pass if quotes_intersect(quote, verses): print "%s: %s" % (date, reading.quote) found = True break date += datetime.timedelta(days=1)
def test_edit_pair(self): simulate_login(self.testbed, '*****@*****.**', '123', True) group = Group(group_id='asgap', name='1', origin='1', admin=[u'*****@*****.**']) post_group(group) simulate_login(self.testbed) group_id = 'asgap' simulate_login(self.testbed, '*****@*****.**', '123', True) pair = ScheduledPair(classname='Math', date=datetime.date(2015, 4, 14), start_time=datetime.time(9, 40), duration=90, task='some_task', group_id='asgap') response = post_pair(pair) added_pair = ScheduledPair.query(ScheduledPair.group_id == group_id).fetch(2)[0] response = make_request('/asgap/edit_pair?key=' + added_pair.key.urlsafe(), 'GET') self.assertEqual(response.status_int, 200) pair = ScheduledPair(classname='Math 1', date=datetime.date(2016, 5, 15), start_time=datetime.time(10, 41), duration=120, task='some task\n1', group_id='asgap') response = post_pair(pair, added_pair.key.urlsafe()) self.assertEqual(response.status_int, 302) pairs_list = ScheduledPair.query(ScheduledPair.group_id == group_id).fetch(2) self.assertEqual(len(pairs_list), 1) added_pair = pairs_list[0] self.check_pair_fields(added_pair, pair) response = make_request('/asgap/pairs', 'GET') self.assertEqual(response.status_int, 200)
def test_insert_and_query(self): test_table = pyp_database.table.Table(['id', 'name', 'dob']) test_table.insert(1, 'Kevin Bacon', date(1958, 7, 8)) self.assertEquals(len(test_table.query(name='Kevin Bacon')), 1) test_table.insert(2, 'Kevin Bacon', date(1995, 5, 5)) self.assertEquals(len(test_table.query(name='Kevin Bacon')), 2) self.assertEquals(len(test_table.query(name='Kevin Bacon', id=1)), 1)
def get_sale_analysis(): """ 总体手机分类每月销售数,不是总评论数,柱状图 'date_list' 'count_list' 'change' 上一月的销售增幅百分比 """ from datetime import date import func start_date = date(2012, 1, 1) s_date = date(2012, 1, 1) date_list = [] count_list = [] from dateutil.relativedelta import relativedelta e_date = date(date.today().year, date.today().month, 1) while s_date < e_date: date_list.append(s_date) count_list.append(0) s_date = s_date + relativedelta(months=1) for c in c_list: for r in c['review']: date = func.get_review_date(r) if date < start_date: continue else: for i in range(len(date_list)): if date_list[i].year == date.year and date_list[i].month == date.month: count_list[i] += 1 break return {'date_list': [[d.year, d.month] for d in date_list], 'count_list': count_list, 'change': round(100 * ((count_list[-1] - count_list[-2]) / float(count_list[-2])), 1) }
def test_delete_pair(self): simulate_login(self.testbed, '*****@*****.**', '123', True) group = Group(group_id='asgap', name='1', origin='1', admin=[u'*****@*****.**']) post_group(group) simulate_login(self.testbed) group_id = 'asgap' simulate_login(self.testbed, '*****@*****.**', '123', True) pair1 = ScheduledPair(classname='Math 1', date=datetime.date(2015, 4, 14), start_time=datetime.time(10, 40), duration=90, task='some_task', group_id='asgap') pair2 = ScheduledPair(classname='Math 2', date=datetime.date(2015, 4, 15), start_time=datetime.time(9, 40), duration=120, task='some task', group_id='asgap') post_pair(pair1) post_pair(pair2) pairs_list = ScheduledPair.query(ScheduledPair.group_id == group_id).fetch(2) added_pair1 = pairs_list[0] added_pair2 = pairs_list[1] response = make_request('/' + group_id + '/delete_pair?key=' + added_pair1.key.urlsafe() + '&return_url=/pairs', 'GET') self.assertEqual(response.status_int, 302) pairs_list = ScheduledPair.query().fetch(2) self.assertEqual(len(pairs_list), 1) remained_pair = pairs_list[0] self.assertEqual(remained_pair, added_pair2)
def test_get_document(self): # Get document document = self.es_mapping.get_document(self.obj) # Sort edgengrams if '_edgengrams' in document: document['_edgengrams'].sort() # Check expected_result = { 'pk': '4', 'content_type': ["searchtests.Book"], '_edgengrams': ['The Fellowship of the Ring'], 'title': 'The Fellowship of the Ring', 'title_filter': 'The Fellowship of the Ring', 'authors': [ { 'name': 'J. R. R. Tolkien', 'date_of_birth_filter': datetime.date(1892, 1, 3) } ], 'publication_date_filter': datetime.date(1954, 7, 29), 'number_of_pages_filter': 423, 'tags': [] } self.assertDictEqual(document, expected_result)
def get_previous_month(date): if date.month == 1: return datetime.date(date.year - 1, 12, 1) else: return datetime.date(date.year, date.month - 1, 1)
sim['visit_date'] = sim['elapsed'].map(lambda x: end_date - timedelta(x)) sim['conversion_date'] = sim[['visit_date', 'lag']] \ .apply( lambda x: x[0]+timedelta(x[1]) \ if x[1] >= 0 else None, axis=1 ) return sim # simulate test and control data for 28 days end_date = date(2022, 2, 28) days = 42 start_date = end_date - timedelta(days-1) data_per_day = 5000 # model parameters for the simulation p_t, pi_t, lambda_t = 0.05, 0.6, 0.6 p_c, pi_c, lambda_c = 0.06, 0.2, 0.2 # get the simulated data sim_t = get_data(p_t, pi_t, lambda_t, days, data_per_day, end_date) sim_c = get_data(p_c, pi_c, lambda_c, days, data_per_day, end_date) sim_c['is_control'] = 1 sim_t['is_control'] = 0 sim = pd.concat([sim_c, sim_t]) sim = sim.sort_values(['is_control', 'visit_date', 'conversion_date']).reset_index()[['is_control', 'visit_date', 'conversion_date', 'frequency']]
from datetime import date days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] day, month = [int(s) for s in input().split()] date = date(2009, month, day) print(days[date.weekday()])
class Migration(migrations.Migration): initial = True dependencies = [ ('auth', '0009_alter_user_last_name_max_length'), ] operations = [ migrations.CreateModel( name='User', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('password', models.CharField(max_length=128, verbose_name='password')), ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), ('is_superuser', models.BooleanField( default=False, help_text= 'Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), ('username', models.CharField( error_messages={ 'unique': 'A user with that username already exists.' }, help_text= 'Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[ django.contrib.auth.validators. UnicodeUsernameValidator() ], verbose_name='username')), ('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')), ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), ('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')), ('is_staff', models.BooleanField( default=False, help_text= 'Designates whether the user can log into this admin site.', verbose_name='staff status')), ('is_active', models.BooleanField( default=True, help_text= 'Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), ('is_student', models.BooleanField(default=False)), ('is_teacher', models.BooleanField(default=False)), ], options={ 'verbose_name': 'user', 'verbose_name_plural': 'users', 'abstract': False, }, managers=[ ('objects', django.contrib.auth.models.UserManager()), ], ), migrations.CreateModel( name='Class', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('location', models.CharField(max_length=50)), ('time_day', models.CharField(choices=[('MON', 'Monday'), ('TUE', 'Tuesday'), ('WED', 'Wednesday'), ('THU', 'Thursday'), ('FRI', 'Friday'), ('SAT', 'Saturday'), ('SUN', 'Sunday')], default='WED', max_length=3)), ('time_start', models.TimeField(default=datetime.time(15, 30))), ('time_end', models.TimeField(default=datetime.time(16, 30))), ], ), migrations.CreateModel( name='MemberLevel', fields=[ ('level_status', models.CharField(choices=[('B', 'Bronze'), ('S', 'Silver'), ('G', 'Gold'), ('P', 'Platinum'), ('D', 'Diamond')], default='B', max_length=1, primary_key=True, serialize=False)), ('price', models.FloatField()), ], ), migrations.CreateModel( name='Member', fields=[ ('user', models.OneToOneField( on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)), ('funds', models.FloatField(default=0)), ('expired_date', models.DateField(default=datetime.date(2019, 5, 29))), ('name', models.CharField(max_length=100)), ('email', models.EmailField(max_length=254)), ('phone_number', models.CharField( blank=True, max_length=17, validators=[ django.core.validators.RegexValidator( message= "Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", regex='^\\+?1?\\d{9,15}$') ])), ('classes', models.ManyToManyField(to='login.Class')), ('level', models.ForeignKey(default='B', on_delete=django.db.models.deletion.CASCADE, to='login.MemberLevel')), ], ), migrations.CreateModel( name='Staff', fields=[ ('user', models.OneToOneField( on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)), ('name', models.CharField(max_length=100)), ], ), migrations.AddField( model_name='user', name='groups', field=models.ManyToManyField( blank=True, help_text= 'The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups'), ), migrations.AddField( model_name='user', name='user_permissions', field=models.ManyToManyField( blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions'), ), migrations.AddField( model_name='class', name='staff', field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to='login.Staff'), ), ]
import datetime from spindrift.micro import Micro PORT = 12345 PATH = '/test/coerce' ID = 123 DATE = datetime.date(2018, 12, 13) def to_date(ts): return datetime.datetime.strptime(ts, '%Y-%m-%d').date() def coerce(request, id, when): assert id == ID return when.isoformat() def on_coerce(rc, result): assert rc == 0 assert result == DATE.isoformat() def test_ping(): s = [ 'SERVER coerce {}'.format(PORT), r' ROUTE {}/(?P<id>\d+)$'.format(PATH), ' ARG int', ' GET test.test_micro_coerce.coerce',
def _get_partners_followp(self, cr, uid, ids, context=None): data = {} data = self.browse(cr, uid, ids, context=context)[0] company_id = data.company_id.id cr.execute( "SELECT l.partner_id, l.followup_line_id,l.date_maturity, l.date, l.id "\ "FROM account_move_line AS l "\ "LEFT JOIN account_account AS a "\ "ON (l.account_id=a.id) "\ "WHERE (l.reconcile_id IS NULL) "\ "AND (a.type='receivable') "\ "AND (l.state<>'draft') "\ "AND (l.partner_id is NOT NULL) "\ "AND (a.active) "\ "AND (l.debit > 0) "\ "AND (l.company_id = %s) " \ "AND (l.blocked = False)" \ "ORDER BY l.date", (company_id,)) #l.blocked added to take litigation into account and it is not necessary to change follow-up level of account move lines without debit move_lines = cr.fetchall() old = None fups = {} fup_id = 'followup_id' in context and context[ 'followup_id'] or data.followup_id.id date = 'date' in context and context['date'] or data.date current_date = datetime.date(*time.strptime(date, '%Y-%m-%d')[:3]) cr.execute( "SELECT * "\ "FROM account_followup_followup_line "\ "WHERE followup_id=%s "\ "ORDER BY delay", (fup_id,)) #Create dictionary of tuples where first element is the date to compare with the due date and second element is the id of the next level for result in cr.dictfetchall(): delay = datetime.timedelta(days=result['delay']) fups[old] = (current_date - delay, result['id']) old = result['id'] partner_list = [] to_update = {} #Fill dictionary of accountmovelines to_update with the partners that need to be updated for partner_id, followup_line_id, date_maturity, date, id in move_lines: if not partner_id: continue if followup_line_id not in fups: continue stat_line_id = partner_id * 10000 + company_id if date_maturity: if date_maturity <= fups[followup_line_id][0].strftime( '%Y-%m-%d'): if stat_line_id not in partner_list: partner_list.append(stat_line_id) to_update[str(id)] = { 'level': fups[followup_line_id][1], 'partner_id': stat_line_id } elif date and date <= fups[followup_line_id][0].strftime( '%Y-%m-%d'): if stat_line_id not in partner_list: partner_list.append(stat_line_id) to_update[str(id)] = { 'level': fups[followup_line_id][1], 'partner_id': stat_line_id } return {'partner_ids': partner_list, 'to_update': to_update}
def _receive(options): """receive packets""" decoder = gdl90.decoder.Decoder() if options.date: (year, month, day) = options.date.split("-") dayStart = datetime.date(int(year), int(month), int(day)) else: dayStart = datetime.date.today() decoder.dayStart = dayStart if options.plotflight: decoder.format = 'plotflight' if options.uat: decoder.uatOutput = True if options.inputfile: useNetwork = False s = open(options.inputfile, "rb") else: useNetwork = True try: if options.subnetbcast: listenIP = netifaces.ifaddresses( options.interface)[netifaces.AF_INET][0]['broadcast'] elif options.bcast: listenIP = '<broadcast>' else: listenIP = '' except KeyError as e: sys.stderr.write( "ERROR: error getting network details for '%s' %s\n" % (options.interface, e)) sys.exit(1) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.bind((listenIP, options.port)) packetTotal = 0 while True: if useNetwork: (data, dataSrc) = s.recvfrom(options.maxsize) (saddr, sport) = dataSrc sender = "%s:%s" % (saddr, sport) else: data = s.read(options.maxsize) if len(data) == 0: break sender = "file:%s" % (options.inputfile) packetTotal += 1 if packetTotal % options.reportcount == 0: ts = _getTimeStamp() print_error("[%s] %s packets received from %s" % (ts, packetTotal, sender)) decoder.addBytes(data) s.close()
def testToJSon(self): json_obj = {"cols": [{"id": "a", "label": "A", "type": "number"}, {"id": "b", "label": "b", "type": "string"}, {"id": "c", "label": "c", "type": "boolean"}], "rows": [{"c": [{"v": 1}, None, None]}, {"c": [None, {"v": "z"}, {"v": True}]}, {"c": [None, {"v": u"\u05d0"}, None]}, {"c": [None, {"v": u"\u05d1"}, None]}]} table = DataTable([("a", "number", "A"), "b", ("c", "boolean")], [[1], [None, "z", True], [None, u"\u05d0"], [None, u"\u05d1".encode("utf-8")]]) self.assertEqual(4, table.NumberOfRows()) self.assertEqual(json.dumps(json_obj, separators=(",", ":"), ensure_ascii=False).encode("utf-8"), table.ToJSon()) table.AppendData([[-1, "w", False]]) self.assertEqual(5, table.NumberOfRows()) json_obj["rows"].append({"c": [{"v": -1}, {"v": "w"}, {"v": False}]}) self.assertEqual(json.dumps(json_obj, separators=(",", ":"), ensure_ascii=False).encode("utf-8"), table.ToJSon()) json_obj = {"cols": [{"id": "t", "label": "T", "type": "timeofday"}, {"id": "d", "label": "d", "type": "date"}, {"id": "dt", "label": "dt", "type": "datetime"}], "rows": [{"c": [{"v": [1, 2, 3]}, {"v": "Date(1,1,3)"}, None]}]} table = DataTable({("d", "date"): [("t", "timeofday", "T"), ("dt", "datetime")]}) table.LoadData({date(1, 2, 3): [time(1, 2, 3)]}) self.assertEqual(1, table.NumberOfRows()) self.assertEqual(json.dumps(json_obj, separators=(",", ":")), table.ToJSon(columns_order=["t", "d", "dt"])) json_obj["rows"] = [ {"c": [{"v": [2, 3, 4], "f": "time 2 3 4"}, {"v": "Date(2,2,4)"}, {"v": "Date(1,1,3,4,5,6)"}]}, {"c": [None, {"v": "Date(3,3,5)"}, None]}] table.LoadData({date(2, 3, 4): [(time(2, 3, 4), "time 2 3 4"), datetime(1, 2, 3, 4, 5, 6)], date(3, 4, 5): []}) self.assertEqual(2, table.NumberOfRows()) self.assertEqual(json.dumps(json_obj, separators=(",", ":")), table.ToJSon(columns_order=["t", "d", "dt"])) json_obj = { "cols": [{"id": "a\"", "label": "a\"", "type": "string"}, {"id": "b", "label": "bb\"", "type": "number"}], "rows": [{"c": [{"v": "a1"}, {"v": 1}]}, {"c": [{"v": "a2"}, {"v": 2}]}, {"c": [{"v": "a3"}, {"v": 3}]}]} table = DataTable({"a\"": ("b", "number", "bb\"", {})}, {"a1": 1, "a2": 2, "a3": 3}) self.assertEqual(3, table.NumberOfRows()) self.assertEqual(json.dumps(json_obj, separators=(",", ":")), table.ToJSon())
def test_compose_literal(self, conn): s = sql.SQL("select {0};").format(sql.Literal(dt.date(2016, 12, 31))) s1 = s.as_string(conn) assert s1 == "select '2016-12-31';"
def get_next_month(date): "I can't believe this isn't in the standard library!" if date.month == 12: return datetime.date(date.year + 1, 1, 1) else: return datetime.date(date.year, date.month + 1, 1)
def test_init(self): assert isinstance(sql.Literal("foo"), sql.Literal) assert isinstance(sql.Literal(u"foo"), sql.Literal) assert isinstance(sql.Literal(b"foo"), sql.Literal) assert isinstance(sql.Literal(42), sql.Literal) assert isinstance(sql.Literal(dt.date(2016, 12, 31)), sql.Literal)
return True # Define an employee emp_1 = Employee('Soumadiptya', 'Chakraborty', 50000) emp_2 = Employee('Smriti', 'Sukul', 30000) print(Employee.raise_amount, emp_1.raise_amount, emp_2.raise_amount) Employee.set_raise(1.05) print(Employee.raise_amount, emp_1.raise_amount, emp_2.raise_amount) print(emp_1.__dict__) # Class methods as constructors emp_str1 = 'John-Doe-50000' emp_str2 = 'Steve-Smith-60000' emp_str1 = 'Jane-Doe-40000' first, last,pay = emp_str1.split('-') emp_3 = Employee(first, last, pay) print(emp_3.fname, emp_3.lname, emp_3.email) emp_4 = Employee.from_string(emp_str2) print(emp_4.fname, emp_4.lname, emp_4.email) # Check static method import datetime my_date = datetime.date(2019, 12, 26) print(Employee.is_workday(my_date))
# -*- coding: utf-8 -*- from __future__ import unicode_literals from datetime import date from django.db import migrations NEWSLINKS = [ { "title": "Redundant Homepage Link", "link": "http://jambonsw.com", "pub_date": date(2013, 1, 18), "startup": 'jambon-software', }, { "title": "Monkey (Wikipedia)", "link": "https://en.wikipedia.org/wiki/Monkey", "pub_date": date(2012, 7, 22), "startup": "monkey-software", }, { "title": "William Shakespeare (Wikipedia)", "link": "https://en.wikipedia.org/wiki/William_Shakespeare", "pub_date": date(2014, 4, 26), "startup": "monkey-software", }, ] def add_newslink_data(apps, schema_editor): NewsLink = apps.get_model('organizer', 'NewsLink')
def test_auto_literal(self, conn): obj = sql.Composed(["fo'o", dt.date(2020, 1, 1)]) obj = obj.join(", ") assert isinstance(obj, sql.Composed) assert noe(obj.as_string(conn)) == "'fo''o', '2020-01-01'"
def isodate(value, format='short'): dt = date(*map(int, value.split('-'))) return format_date(dt, format)
def test_auto_literal(self, conn): s = sql.SQL("select {}, {}, {}").format( "he'lo", 10, dt.date(2020, 1, 1) ) assert s.as_string(conn) == "select 'he''lo', 10, '2020-01-01'"
# See templates/oauth2_provider/authorize.html ALLOWED_METRIC_SCRAPE_IPS = [] EMERGENCY_MESSAGE = None # type: str # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts # # In production, Nginx filters requests that are not in this list. If this is # not done, a notification gets sent whenever someone messes with # the HTTP Host header. ALLOWED_HOSTS = ["ion.tjhsst.edu", "198.38.18.250", "localhost", "127.0.0.1"] # When school is scheduled to start SCHOOL_START_DATE = datetime.date(2017, 8, 28) # Dates when hoco starts and ends HOCO_START_DATE = datetime.date(2017, 10, 2) HOCO_END_DATE = datetime.date(2017, 10, 14) PRODUCTION = os.getenv("PRODUCTION", "").upper() == "TRUE" IN_CI = any(os.getenv(key, "").upper() == "TRUE" for key in ["TRAVIS", "GITHUB_ACTIONS"]) # FIXME: figure out a less-hacky way to do this. TESTING = any("test" in arg for arg in sys.argv) LOGGING_VERBOSE = PRODUCTION # Whether to report master password attempts MASTER_NOTIFY = False # DEBUG defaults to off in PRODUCTION, on otherwise.
(pytds.tds_types.BigIntType(), [2 ** 63 - 1, -2 ** 63]), (pytds.tds_types.IntType(), [None, 2 ** 31 - 1]), (pytds.tds_types.IntType(), [None, -2 ** 31]), (pytds.tds_types.RealType(), [0.25, None]), (pytds.tds_types.FloatType(), [0.25, None]), (pytds.tds_types.VarCharType(size=10), [u'', u'testtest12', None, u'foo']), (pytds.tds_types.VarCharType(size=4000), [u'x' * 4000, u'foo']), (pytds.tds_types.VarCharMaxType(), [u'x' * 10000, u'foo', u'', u'testtest', None, u'bar']), (pytds.tds_types.NVarCharType(size=10), [u'', u'testtest12', None, u'foo']), (pytds.tds_types.NVarCharType(size=4000), [u'x' * 4000, u'foo']), (pytds.tds_types.NVarCharMaxType(), [u'x' * 10000, u'foo', u'', u'testtest', None, u'bar']), (pytds.tds_types.VarBinaryType(size=10), [b'testtest12', b'', None]), (pytds.tds_types.VarBinaryType(size=8000), [b'x' * 8000, b'']), (pytds.tds_types.SmallDateTimeType(), [datetime.datetime(1900, 1, 1, 0, 0, 0), None, datetime.datetime(2079, 6, 6, 23, 59, 0)]), (pytds.tds_types.DateTimeType(), [datetime.datetime(1753, 1, 1, 0, 0, 0), None, datetime.datetime(9999, 12, 31, 23, 59, 59, 990000)]), (pytds.tds_types.DateType(), [datetime.date(1, 1, 1), None, datetime.date(9999, 12, 31)]), (pytds.tds_types.TimeType(precision=0), [datetime.time(0, 0, 0), None]), (pytds.tds_types.TimeType(precision=6), [datetime.time(23, 59, 59, 999999), None]), (pytds.tds_types.TimeType(precision=0), [None]), (pytds.tds_types.DateTime2Type(precision=0), [datetime.datetime(1, 1, 1, 0, 0, 0), None]), (pytds.tds_types.DateTime2Type(precision=6), [datetime.datetime(9999, 12, 31, 23, 59, 59, 999999), None]), (pytds.tds_types.DateTime2Type(precision=0), [None]), (pytds.tds_types.DateTimeOffsetType(precision=6), [datetime.datetime(9999, 12, 31, 23, 59, 59, 999999, pytds.tz.utc), None]), (pytds.tds_types.DateTimeOffsetType(precision=6), [datetime.datetime(9999, 12, 31, 23, 59, 59, 999999, pytds.tz.FixedOffsetTimezone(14)), None]), (pytds.tds_types.DateTimeOffsetType(precision=0), [datetime.datetime(1, 1, 1, 0, 0, 0, tzinfo=pytds.tz.FixedOffsetTimezone(-14))]), (pytds.tds_types.DateTimeOffsetType(precision=0), [datetime.datetime(1, 1, 1, 0, 14, 0, tzinfo=pytds.tz.FixedOffsetTimezone(14))]), (pytds.tds_types.DateTimeOffsetType(precision=6), [None]), (pytds.tds_types.DecimalType(scale=6, precision=38), [Decimal('123.456789'), None]), (pytds.tds_types.SmallMoneyType(), [Decimal('214748.3647'), None, Decimal('-214748.3648')]), (pytds.tds_types.MoneyType(), [Decimal('922337203685477.5807'), None, Decimal('-922337203685477.5808')]), (pytds.tds_types.SmallMoneyType(), [Decimal('214748.3647')]),
class MySqlToGoogleCloudStorageOperatorTest(unittest.TestCase): def test_init(self): """Test MySqlToGoogleCloudStorageOperator instance is properly initialized.""" op = MySqlToGoogleCloudStorageOperator(task_id=TASK_ID, sql=SQL, bucket=BUCKET, filename=JSON_FILENAME, export_format='CSV', field_delimiter='|') self.assertEqual(op.task_id, TASK_ID) self.assertEqual(op.sql, SQL) self.assertEqual(op.bucket, BUCKET) self.assertEqual(op.filename, JSON_FILENAME) self.assertEqual(op.export_format, 'csv') self.assertEqual(op.field_delimiter, '|') @parameterized.expand([ ("string", None, "string"), (datetime.date(1970, 1, 2), None, 86400), (datetime.datetime(1970, 1, 1, 1, 0), None, 3600), (decimal.Decimal(5), None, 5), (b"bytes", "BYTES", "Ynl0ZXM="), ]) def test_convert_type(self, value, schema_type, expected): op = MySqlToGoogleCloudStorageOperator(task_id=TASK_ID, mysql_conn_id=MYSQL_CONN_ID, sql=SQL, bucket=BUCKET, filename=JSON_FILENAME) self.assertEqual(op.convert_type(value, schema_type), expected) @mock.patch('airflow.contrib.operators.mysql_to_gcs.MySqlHook') @mock.patch('airflow.contrib.operators.sql_to_gcs.GoogleCloudStorageHook') def test_exec_success_json(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test successful run of execute function for JSON""" op = MySqlToGoogleCloudStorageOperator(task_id=TASK_ID, mysql_conn_id=MYSQL_CONN_ID, sql=SQL, bucket=BUCKET, filename=JSON_FILENAME) mysql_hook_mock = mysql_hook_mock_class.return_value mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS) mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): self.assertEqual(BUCKET, bucket) self.assertEqual(JSON_FILENAME.format(0), obj) self.assertEqual('application/json', mime_type) self.assertFalse(gzip) with open(tmp_filename, 'rb') as file: self.assertEqual(b''.join(NDJSON_LINES), file.read()) gcs_hook_mock.upload.side_effect = _assert_upload op.execute(None) mysql_hook_mock_class.assert_called_once_with( mysql_conn_id=MYSQL_CONN_ID) mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with( SQL) @mock.patch('airflow.contrib.operators.mysql_to_gcs.MySqlHook') @mock.patch('airflow.contrib.operators.sql_to_gcs.GoogleCloudStorageHook') def test_exec_success_csv(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test successful run of execute function for CSV""" op = MySqlToGoogleCloudStorageOperator(task_id=TASK_ID, mysql_conn_id=MYSQL_CONN_ID, sql=SQL, export_format='CSV', bucket=BUCKET, filename=CSV_FILENAME) mysql_hook_mock = mysql_hook_mock_class.return_value mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS) mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): self.assertEqual(BUCKET, bucket) self.assertEqual(CSV_FILENAME.format(0), obj) self.assertEqual('text/csv', mime_type) self.assertFalse(gzip) with open(tmp_filename, 'rb') as file: self.assertEqual(b''.join(CSV_LINES), file.read()) gcs_hook_mock.upload.side_effect = _assert_upload op.execute(None) mysql_hook_mock_class.assert_called_once_with( mysql_conn_id=MYSQL_CONN_ID) mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with( SQL) @mock.patch('airflow.contrib.operators.mysql_to_gcs.MySqlHook') @mock.patch('airflow.contrib.operators.sql_to_gcs.GoogleCloudStorageHook') def test_exec_success_csv_ensure_utc(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test successful run of execute function for CSV""" op = MySqlToGoogleCloudStorageOperator(task_id=TASK_ID, mysql_conn_id=MYSQL_CONN_ID, sql=SQL, export_format='CSV', bucket=BUCKET, filename=CSV_FILENAME, ensure_utc=True) mysql_hook_mock = mysql_hook_mock_class.return_value mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS) mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): self.assertEqual(BUCKET, bucket) self.assertEqual(CSV_FILENAME.format(0), obj) self.assertEqual('text/csv', mime_type) self.assertFalse(gzip) with open(tmp_filename, 'rb') as file: self.assertEqual(b''.join(CSV_LINES), file.read()) gcs_hook_mock.upload.side_effect = _assert_upload op.execute(None) mysql_hook_mock_class.assert_called_once_with( mysql_conn_id=MYSQL_CONN_ID) mysql_hook_mock.get_conn().cursor().execute.assert_has_calls( [mock.call(TZ_QUERY), mock.call(SQL)]) @mock.patch('airflow.contrib.operators.mysql_to_gcs.MySqlHook') @mock.patch('airflow.contrib.operators.sql_to_gcs.GoogleCloudStorageHook') def test_exec_success_csv_with_delimiter(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test successful run of execute function for CSV with a field delimiter""" op = MySqlToGoogleCloudStorageOperator(task_id=TASK_ID, mysql_conn_id=MYSQL_CONN_ID, sql=SQL, export_format='csv', field_delimiter='|', bucket=BUCKET, filename=CSV_FILENAME) mysql_hook_mock = mysql_hook_mock_class.return_value mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS) mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): self.assertEqual(BUCKET, bucket) self.assertEqual(CSV_FILENAME.format(0), obj) self.assertEqual('text/csv', mime_type) self.assertFalse(gzip) with open(tmp_filename, 'rb') as file: self.assertEqual(b''.join(CSV_LINES_PIPE_DELIMITED), file.read()) gcs_hook_mock.upload.side_effect = _assert_upload op.execute(None) mysql_hook_mock_class.assert_called_once_with( mysql_conn_id=MYSQL_CONN_ID) mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with( SQL) @mock.patch('airflow.contrib.operators.mysql_to_gcs.MySqlHook') @mock.patch('airflow.contrib.operators.sql_to_gcs.GoogleCloudStorageHook') def test_file_splitting(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test that ndjson is split by approx_max_file_size_bytes param.""" mysql_hook_mock = mysql_hook_mock_class.return_value mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS) mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION gcs_hook_mock = gcs_hook_mock_class.return_value expected_upload = { JSON_FILENAME.format(0): b''.join(NDJSON_LINES[:2]), JSON_FILENAME.format(1): NDJSON_LINES[2], } def _assert_upload(bucket, obj, tmp_filename, mime_type=None, gzip=False): self.assertEqual(BUCKET, bucket) self.assertEqual('application/json', mime_type) self.assertFalse(gzip) with open(tmp_filename, 'rb') as file: self.assertEqual(expected_upload[obj], file.read()) gcs_hook_mock.upload.side_effect = _assert_upload op = MySqlToGoogleCloudStorageOperator( task_id=TASK_ID, sql=SQL, bucket=BUCKET, filename=JSON_FILENAME, approx_max_file_size_bytes=len( expected_upload[JSON_FILENAME.format(0)])) op.execute(None) @mock.patch('airflow.contrib.operators.mysql_to_gcs.MySqlHook') @mock.patch('airflow.contrib.operators.sql_to_gcs.GoogleCloudStorageHook') def test_schema_file(self, gcs_hook_mock_class, mysql_hook_mock_class): """Test writing schema files.""" mysql_hook_mock = mysql_hook_mock_class.return_value mysql_hook_mock.get_conn().cursor().__iter__.return_value = iter(ROWS) mysql_hook_mock.get_conn().cursor().description = CURSOR_DESCRIPTION gcs_hook_mock = gcs_hook_mock_class.return_value def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument if obj == SCHEMA_FILENAME: self.assertFalse(gzip) with open(tmp_filename, 'rb') as file: self.assertEqual(b''.join(SCHEMA_JSON), file.read()) gcs_hook_mock.upload.side_effect = _assert_upload op = MySqlToGoogleCloudStorageOperator(task_id=TASK_ID, sql=SQL, bucket=BUCKET, filename=JSON_FILENAME, schema_filename=SCHEMA_FILENAME) op.execute(None) # once for the file and once for the schema self.assertEqual(2, gcs_hook_mock.upload.call_count)
collections = read_namespace_link('collections', filename=parent_dir + '/config/treb_report_page_info.yaml') rpts = {} rpt_dates = [] for collection in collections: if not collections[collection]['nested']: date_col = collections[collection]['date_col'] try: last_report_date = db[collection].find().sort([ (date_col, -1) ]).limit(1)[0][date_col].date() except: logging.info("Could not locate record, starting from 2015") last_report_date = datetime.date(2015, 1, 1) if args.start_date: last_report_date = datetime.datetime.strptime(args.start_date, '%Y-%m-%d').date() if args.end_date: today = datetime.datetime.strptime(args.end_date, '%Y-%m-%d').date() rpts[collection] = find_report_range(today, last_report_date) rpt_dates = rpt_dates + rpts[collection] deduped_rpt_dt = list(set(rpt_dates)) keys = {} keys['base_url'] = read_namespace_link('treb',
def test_get_egn_birth_date_with_valid_date_from_1800_to_1900(self): self.assertEqual(get_egn_birth_date('002101'), datetime.date(1800, 1, 1)) self.assertEqual(get_egn_birth_date('993231'), datetime.date(1899, 12, 31))
def test_date_field_type(api_client, data_fixture): user, token = data_fixture.create_user_and_token(email='*****@*****.**', password='******', first_name='Test1') table = data_fixture.create_database_table(user=user) response = api_client.post(reverse('api:database:fields:list', kwargs={'table_id': table.id}), { 'name': 'Date', 'type': 'date' }, format='json', HTTP_AUTHORIZATION=f'JWT {token}') response_json = response.json() assert response.status_code == HTTP_200_OK assert response_json['type'] == 'date' assert DateField.objects.all().count() == 1 date_field_id = response_json['id'] response = api_client.post(reverse('api:database:fields:list', kwargs={'table_id': table.id}), { 'name': 'Datetime', 'type': 'date', 'date_include_time': True }, format='json', HTTP_AUTHORIZATION=f'JWT {token}') response_json = response.json() assert response.status_code == HTTP_200_OK assert response_json['type'] == 'date' assert DateField.objects.all().count() == 2 date_time_field_id = response_json['id'] response = api_client.post(reverse( 'api:database:rows:list', kwargs={'table_id': table.id}), { f'field_{date_field_id}': '2020-04-01 12:00', f'field_{date_time_field_id}': '2020-04-01' }, format='json', HTTP_AUTHORIZATION=f'JWT {token}') response_json = response.json() assert response.status_code == HTTP_400_BAD_REQUEST assert response_json['error'] == 'ERROR_REQUEST_BODY_VALIDATION' assert response_json['detail'][f'field_{date_field_id}'][0][ 'code'] == 'invalid' assert response_json['detail'][f'field_{date_time_field_id}'][0]['code'] == \ 'invalid' response = api_client.post( reverse('api:database:rows:list', kwargs={'table_id': table.id}), { f'field_{date_field_id}': '2020-04-01', f'field_{date_time_field_id}': '2020-04-01 14:30:20' }, format='json', HTTP_AUTHORIZATION=f'JWT {token}') response_json = response.json() assert response.status_code == HTTP_200_OK assert response_json[f'field_{date_field_id}'] == '2020-04-01' assert response_json[ f'field_{date_time_field_id}'] == '2020-04-01T14:30:20Z' model = table.get_model(attribute_names=True) row = model.objects.all().last() assert row.date == date(2020, 4, 1) assert row.datetime == datetime(2020, 4, 1, 14, 30, 20, tzinfo=timezone('UTC')) url = reverse('api:database:fields:item', kwargs={'field_id': date_time_field_id}) response = api_client.delete(url, HTTP_AUTHORIZATION=f'JWT {token}') assert response.status_code == HTTP_204_NO_CONTENT assert DateField.objects.all().count() == 1
#!/usr/bin/env python3 from datetime import date import argparse parser = argparse.ArgumentParser() parser.add_argument("year1",type=int) parser.add_argument("month1",type=int) parser.add_argument("day1",type=int) parser.add_argument("year2",type=int) parser.add_argument("month2",type=int) parser.add_argument("day2",type=int) args = parser.parse_args() d1 = date(args.year1, args.month1, args.day1) d2 = date(args.year2, args.month2, args.day2) delta = d2 - d1 print( delta.days+1, "days")
def test_get_egn_birth_date_with_valid_date_from_2000_to_2100(self): self.assertEqual(get_egn_birth_date('004101'), datetime.date(2000, 1, 1)) self.assertEqual(get_egn_birth_date('995231'), datetime.date(2099, 12, 31))
def _parse_date(d): return datetime.date(*[int(x) for x in d[:10].split('-')])
def test_sqlalchemy_todo_repository_update(session: orm.Session) -> None: initial_ct = session.query(core.TodoDTO).count() assert initial_ct == 0 todo_dtos = [ src.todo.adapter.sqlalchemy_todo_repository.from_domain( todo.Weekly( advance_days=1, category=core.TodoCategory.Todo, date_added=datetime.date(2010, 1, 2), date_completed=None, description="Grocery Shopping", note="", start_date=None, todo_id=1, user_id=1, week_day=todo.Weekday.Sunday, ) ), src.todo.adapter.sqlalchemy_todo_repository.from_domain( todo.Weekly( advance_days=0, category=core.TodoCategory.Todo, date_added=datetime.date(2010, 1, 2), date_completed=None, description="Brush Teeth", note="", start_date=None, todo_id=2, user_id=2, week_day=todo.Weekday.Sunday, ) ), src.todo.adapter.sqlalchemy_todo_repository.from_domain( todo.Weekly( advance_days=2, category=core.TodoCategory.Todo, date_added=datetime.date(2010, 1, 2), date_completed=None, description="Vacuum", note="", start_date=None, todo_id=3, user_id=1, week_day=todo.Weekday.Sunday, ) ), ] session.add_all(todo_dtos) session.commit() updated_todo = todo.Weekly( advance_days=0, category=core.TodoCategory.Todo, date_added=datetime.date(2010, 1, 2), date_completed=None, description="Fly a Kite", note="", start_date=None, todo_id=2, user_id=2, week_day=todo.Weekday.Sunday, ) repo = todo.SqlAlchemyTodoRepository(session) repo.update(user_id=2, item=updated_todo) session.commit() actual = session.query(core.TodoDTO).filter_by(todo_id=2).first() assert actual is not None assert actual.description == "Fly a Kite"
import io import xlsxwriter from datetime import date from urllib.request import urlopen # ================= пример создания простой книги ============= workbook = xlsxwriter.Workbook('hello.xlsx') day_format = workbook.add_format({'num_format': 'yyyy-mm-dd'}) worksheet = workbook.add_worksheet() worksheet.write('A1', 'Hello world') # ================= форматы ============= worksheet.write('B1', date(2017, 3, 9), day_format) worksheet.set_column(1, 1, 15) worksheet.write('C1', date(2017, 3, 9)) worksheet.set_column(2, 2, 15, day_format) # ================== Вставка картинки ============= # worksheet.insert_image('B10', 'logo-icon-big.png') # ================== Вставка картинки по URL ============= # url = 'https://deutscheam.com/globalassets/siteresources/img/logo-icon-big.png' # image_data = io.BytesIO(urlopen(url).read()) # worksheet.insert_image( # 'B10', # 'logo-icon-big.png', # { # 'x_offset': 10, # 'y_offset': 10, # 'x_scale': 3,
def setUp(self): self.tax_calculator = DummyTaxCalculator() self.FIRST_OF_JAN_2019 = datetime.date(2019, 1, 1)
def app_load_file(): app = TestApp() app.drsel.set_date_range(MonthRange(date(2008, 2, 1))) app.mw.load_from_xml(testdata.filepath('moneyguru', 'simple.moneyguru')) app.show_tview() return app
# calculate number of days between the two dates from datetime import date d0 = date(2008, 8, 18) d1 = date(2008, 9, 26) delta = d1 - d0 print(delta.days)