def test_atm_seller_failed_empty_criteria(brief_response, brief, client, suppliers, supplier_user, supplier_domains, buyer_user, atm_brief, atm_data): res = client.post('/2/login', data=json.dumps({ 'emailAddress': '*****@*****.**', 'password': '******' }), content_type='application/json') assert res.status_code == 200 data = atm_data data['publish'] = True data['closedAt'] = pendulum.today(tz='Australia/Sydney').add(days=2).format('%Y-%m-%d') res = client.patch('/2/brief/1', content_type='application/json', data=json.dumps(data)) assert res.status_code == 200 assert brief.delay.called is True res = client.post('/2/login', data=json.dumps({ 'emailAddress': '*****@*****.**', 'password': '******' }), content_type='application/json') assert res.status_code == 200 res = client.post( '/2/brief/1/respond', data=json.dumps({ 'respondToEmailAddress': '*****@*****.**', 'respondToPhone': '0263636363', 'criteria': { 'TEST': 'bla bla', 'TEST 2': '' } }), content_type='application/json' ) assert res.status_code == 400
def test_rfx_non_invited_seller_can_not_respond(brief, client, suppliers, supplier_user, supplier_domains, buyer_user, rfx_brief, rfx_data): res = client.post('/2/login', data=json.dumps({ 'emailAddress': '*****@*****.**', 'password': '******' }), content_type='application/json') assert res.status_code == 200 data = rfx_data data['publish'] = True data['closedAt'] = pendulum.today(tz='Australia/Sydney').add(days=2).format('%Y-%m-%d') data['sellers'] = { '2': 'Test Supplier1' } res = client.patch('/2/brief/1', content_type='application/json', data=json.dumps(data)) assert res.status_code == 200 assert brief.delay.called is True res = client.post('/2/login', data=json.dumps({ 'emailAddress': '*****@*****.**', 'password': '******' }), content_type='application/json') assert res.status_code == 200 res = client.post( '/2/brief/1/respond', data=json.dumps({ 'respondToEmailAddress': '*****@*****.**', 'respondToPhone': '0263636363', 'attachedDocumentURL': [ 'test.pdf' ] }), content_type='application/json' ) assert res.status_code == 403
def log_query_relative_date(_string, _location, tokens): """Generate an SQL where clause for a date expressed via keyword. Recognized keywords are "today" and "yesterday". The SQL describes a range rather than a fixed day to account for timezone differences between the query and the source data. For example, "today" in local time is more like "today and a bit of tomorrow" in UTC. For performance, the query is structured to take advantage of an expression-based index. This only works when the query expression matches the expression used in the create index statement. """ if tokens[1] == "today": reference_date = pendulum.today() elif tokens[1] == "yesterday": reference_date = pendulum.yesterday() return "datestamp BETWEEN '{}' AND '{}'".format( reference_date.start_of('day').in_timezone('utc').format( 'YYYY-MM-DD-HH' ), reference_date.end_of('day').in_timezone('utc').format( 'YYYY-MM-DD-HH' ) )
def test_diff_for_humans_absolute_seconds(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans(now.subtract(seconds=59), True) == "59 seconds" now = pendulum.now().time() assert now.diff_for_humans(now.add(seconds=59), True) == "59 seconds"
def test_diff_for_humans_absolute_minutes(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans(now.subtract(minutes=30), True) == "30 minutes" now = pendulum.now().time() assert now.diff_for_humans(now.add(minutes=30), True) == "30 minutes"
def test_diff_for_humans_absolute_hours(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans(now.subtract(hours=3), True) == "3 hours" now = pendulum.now().time() assert now.diff_for_humans(now.add(hours=3), True) == "3 hours"
def parse_date(text, relative_to=None): """Converts a date string into a datetime.date This is relative to the relative_to date which defaults to today. :arg text: the text to parse :arg relative_to: (optional) the datetime object to parse dates relative to :returns: Pendulum (subclass of datetime) :raises ValueError: if the text is not parseable """ # First, if it's a date, try parsing it with pendulum--this doesn't require # a relative-to date. try: return pendulum.instance(datetime.datetime.strptime(text, '%Y-%m-%d')) except ValueError: pass if relative_to is None: relative_to = pendulum.today() else: relative_to = pendulum.instance(relative_to) # Match on lowercase messages text = text.lower() # Today and tomorrow if text.startswith('tod'): return relative_to if text.startswith('tom'): return relative_to.add(days=1) # Day of week; parsed as after today # (day of week is 0-based where 0 is a sunday) today_index = relative_to.day_of_week pairs = [ ('sunday', 0), ('monday', 1), ('tuesday', 2), ('wednesday', 3), ('thursday', 4), ('friday', 5), ('saturday', 6) ] for day, offset in pairs: if day.startswith(text): adjustment = (offset - today_index) % 7 print today_index, offset, adjustment return relative_to.add(days=adjustment) # FIXME: Other things to support from taskwarrior: # http://taskwarrior.org/docs/dates.html#names raise ValueError('"%s" is not parseable' % text)
def login(request): settings = request.registry.settings google_app_id = settings['google.app_id'] windows_app_id = settings['windows.app_id'] return { 'year': pendulum.today().year, 'google_app_id': google_app_id, 'windows_app_id': windows_app_id, }
def test_days_ago(self): today = pendulum.today() today_midnight = pendulum.instance(datetime.fromordinal(today.date().toordinal())) self.assertTrue(dates.days_ago(0) == today_midnight) self.assertTrue(dates.days_ago(100) == today_midnight + timedelta(days=-100)) self.assertTrue(dates.days_ago(0, hour=3) == today_midnight + timedelta(hours=3)) self.assertTrue(dates.days_ago(0, minute=3) == today_midnight + timedelta(minutes=3)) self.assertTrue(dates.days_ago(0, second=3) == today_midnight + timedelta(seconds=3)) self.assertTrue(dates.days_ago(0, microsecond=3) == today_midnight + timedelta(microseconds=3))
def process_closed_briefs(): # find briefs that were closed yesterday. this task is designed to run after midnight. closed_briefs = ( db .session .query(Brief) .join(Framework) .filter( Brief.closed_at >= pendulum.yesterday(tz='Australia/Canberra'), Brief.closed_at < pendulum.today(tz='Australia/Canberra'), Brief.withdrawn_at.is_(None), Framework.slug == 'digital-marketplace' ) .all() ) for closed_brief in closed_briefs: send_brief_closed_email(closed_brief)
def get_active_date(log_records, query): """Figure out which date the query pertains to. This value is used by the calender widget in the UI. """ if log_records: return log_records[0]["unix_timestamp"] timezone = cherrypy.engine.publish( "registry:local_timezone" ).pop() date_string = None matches = re.match( r"date\s+(\d{4}-\d{2})", query ) if matches: date_string = matches.group(1) + "-01" matches = re.match( r"date\s+(\d{4}-\d{2}-\d{2})", query ) if matches: date_string = matches.group(1) if date_string: active_date = pendulum.parse( date_string, tz=timezone ) elif re.match(r"date\s+yesterday", query): active_date = pendulum.yesterday(tz=timezone) else: active_date = pendulum.today(tz=timezone) return active_date.start_of('day')
def test_month_layout_invalid_start(self, publish_mock, render_mock): """An invalid start date for a monthly layout is handled gracefully""" first_of_current_month = pendulum.today().start_of('month') def side_effect(*args, **_): """Side effects local function""" if args[0] == "registry:search": value = "Column 4, Column 5, Column 6\nlayout=month" return [{"grids:test1": value}] return mock.DEFAULT publish_mock.side_effect = side_effect self.request("/", name="test1", start="1234-56") self.assertEqual( helpers.html_var(render_mock, "rows")[0][0], first_of_current_month.strftime("%b %-d, %Y") )
def timesheet_cmd(cfg, ctx): """Shows timesheet for the week""" api = todoist.api.TodoistAPI(cfg['auth_token']) api.sync() marker = pendulum.today() while marker.day_of_week != 0: marker = marker.add(days=-1) click.echo('Timesheet week of %s' % marker.strftime('%c')) click.echo('') for i in range(7): begin_time = marker.start_of('day').in_timezone('UTC') end_time = marker.end_of('day').in_timezone('UTC') activity = api.completed.get_all( since=begin_time.strftime('%Y-%m-%dT%H:%M'), until=end_time.strftime('%Y-%m-%dT%H:%M'), limit=50 ) click.echo('[%s: %s]' % (marker.strftime('%A (%Y-%m-%d)'), len(activity['items']))) click.echo('') table = [ ('id', 'content', 'proj') ] for event in activity['items']: table.append( ( event['task_id'], event['content'], display_project(api.projects.get_by_id(event['project_id'])), ) ) table = prettytable(click.get_terminal_size()[0] - 2, table) for row in table.splitlines(): click.echo(' ' + row) click.echo('') marker = marker.add(days=1)
def test_atm_seller_success_with_file(brief_response, brief, client, suppliers, supplier_user, supplier_domains, buyer_user, atm_brief, atm_data): res = client.post('/2/login', data=json.dumps({ 'emailAddress': '*****@*****.**', 'password': '******' }), content_type='application/json') assert res.status_code == 200 data = atm_data data['publish'] = True data['requestMoreInfo'] = 'yes' data['evaluationType'].append('Case study') data['closedAt'] = pendulum.today(tz='Australia/Sydney').add(days=2).format('%Y-%m-%d') res = client.patch('/2/brief/1', content_type='application/json', data=json.dumps(data)) assert res.status_code == 200 assert brief.delay.called is True res = client.post('/2/login', data=json.dumps({ 'emailAddress': '*****@*****.**', 'password': '******' }), content_type='application/json') assert res.status_code == 200 res = client.post( '/2/brief/1/respond', data=json.dumps({ 'respondToEmailAddress': '*****@*****.**', 'respondToPhone': '0263636363', 'attachedDocumentURL': ['TEST.pdf'], 'criteria': { 'TEST': 'bla bla', 'TEST 2': 'bla bla' } }), content_type='application/json' ) assert res.status_code == 201 assert brief_response.delay.called is True
def test_diff_for_humans_other_and_minute(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans(now.add(minutes=1)) == "1 minute before"
def test_diff_for_humans_now_and_future_hours(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.add(hours=2).diff_for_humans() == "in 2 hours"
def test_diff_for_humans_now_and_future_minute(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.add(minutes=1).diff_for_humans() == "in 1 minute"
def get_days_remaining(self, examprogress): today = pendulum.today().date() period = pendulum.period(today, examprogress.plan.end_time.date(), absolute=False) return period.days if period.days > 0 else 0
def test_dates_clock_events_with_after_argument(self): dt = pendulum.today("UTC").add(days=1) c = clocks.DatesClock([dt]) assert islice(c.events(after=dt - timedelta(seconds=1)), 1) == [dt] assert islice(c.events(after=dt.add(days=-1)), 1) == [dt] assert islice(c.events(after=dt.add(days=1)), 1) == []
def previous_days(n, before=None): """Return last n days before specified date.""" before = before or pendulum.today() return (before - before.subtract(days=n)).range('days')
def test_diff_for_humans_other_and_future_hours(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans(now.subtract(hours=2)) == "2 hours after"
from d3a.models.config import SimulationConfig from d3a.models.strategy import BaseStrategy from d3a.models.strategy.external_strategies import ExternalMixin log = getLogger(__name__) # TODO: As this is only used in the unittests, please remove it here and replace the usages # of this class with d3a-interface.constants_limits.GlobalConfig class: DEFAULT_CONFIG = SimulationConfig( sim_duration=duration(hours=24), market_count=1, slot_length=duration(minutes=15), tick_length=duration(seconds=1), cloud_coverage=ConstSettings.PVSettings.DEFAULT_POWER_PROFILE, start_date=today(tz=d3a.constants.TIME_ZONE), max_panel_power_W=ConstSettings.PVSettings.MAX_PANEL_OUTPUT_W ) def check_area_name_exists_in_parent_area(parent_area, name): """ Check the children of parent area , iterate through its children and check if the name to be appended does not exist Note: this check is to be called before adding a new area of changing its name :param parent_area: Parent Area :param name: New name of area :return: boolean """ for child in parent_area.children: if child.name == name:
import pendulum now = pendulum.now() print("now", now) london = pendulum.now("Europe/London") print('----' * 10) print("london", london) print('----' * 10) print("yesterday", pendulum.yesterday()) print("today", pendulum.today()) print("tomorrow", pendulum.tomorrow()) print('----' * 10) dt1 = pendulum.datetime(2020, 7, 28) dt2 = pendulum.datetime(2020, 12, 22) d = dt1.diff_for_humans(dt2) d2 = dt2 - dt1 print("in_hours", d2.in_hours()) print("in_months", d2.in_months()) print("in_days", d2.in_days()) print("for_humans", d) print('----' * 10) dt3 = pendulum.datetime(2020, 7, 28, 15, 30) print("cookie", dt3.to_cookie_string()) print("iso", dt3.to_iso8601_string()) print("rfc", dt3.to_rfc822_string())
@click.option('--seed', help="Manually specify random seed") @click.option('--paused', is_flag=True, default=False, show_default=True, help="Start simulation in paused state") @click.option('--pause-at', type=str, default=None, help=f"Automatically pause at a certain time. " f"Accepted Input formats: ({DATE_FORMAT}, {TIME_FORMAT}) [default: disabled]") @click.option('--repl/--no-repl', default=False, show_default=True, help="Start REPL after simulation run.") @click.option('--no-export', is_flag=True, default=False, help="Skip export of simulation data") @click.option('--export-path', type=str, default=None, show_default=False, help="Specify a path for the csv export files (default: ~/d3a-simulation)") @click.option('--enable-bc', is_flag=True, default=False, help="Run simulation on Blockchain") @click.option('--compare-alt-pricing', is_flag=True, default=False, help="Compare alternative pricing schemes") @click.option('--start-date', type=DateType(DATE_FORMAT), default=today(tz=TIME_ZONE).format(DATE_FORMAT), show_default=True, help=f"Start date of the Simulation ({DATE_FORMAT})") def run(setup_module_name, settings_file, slowdown, duration, slot_length, tick_length, market_count, cloud_coverage, compare_alt_pricing, start_date, pause_at, **kwargs): try: if settings_file is not None: simulation_settings, advanced_settings = read_settings_from_file(settings_file) update_advanced_settings(advanced_settings) validate_global_settings(simulation_settings) simulation_settings["external_connection_enabled"] = False simulation_config = SimulationConfig(**simulation_settings) else: global_settings = {"sim_duration": duration, "slot_length": slot_length, "tick_length": tick_length,
def briefs(self, app, users, suppliers): framework = frameworks_service.find(slug='digital-marketplace').one_or_none() atm_lot = lots_service.find(slug='atm').one_or_none() specialist_lot = lots_service.find(slug='specialist').one_or_none() rfx_lot = lots_service.find(slug='rfx').one_or_none() now = pendulum.now('utc') with app.app_context(): atm_brief = Brief( id=1, data={ 'title': 'ATM title', 'closedAt': pendulum.today(tz='Australia/Sydney').add(days=14).format('%Y-%m-%d'), 'organisation': 'ABC', 'summary': 'My ATM summary', 'location': [ 'New South Wales' ], 'sellerCategory': '', 'openTo': 'all', 'requestMoreInfo': 'yes', 'evaluationType': [ 'References', 'Case study', ], 'attachments': [ 'TEST3.pdf' ], 'industryBriefing': 'TEST', 'startDate': 'ASAP', 'includeWeightings': True, 'evaluationCriteria': [ { 'criteria': 'TEST', 'weighting': '55' }, { 'criteria': 'TEST 2', 'weighting': '45' } ], 'contactNumber': '0263635544', 'timeframeConstraints': 'TEST', 'backgroundInformation': 'TEST', 'outcome': 'TEST', 'endUsers': 'TEST', 'workAlreadyDone': 'TEST' }, framework=framework, lot=atm_lot, users=users, published_at=now, withdrawn_at=None ) atm_brief.questions_closed_at = now.add(days=3) atm_brief.closed_at = now.add(days=5) db.session.add(atm_brief) specialist_brief = Brief( id=2, data={ 'areaOfExpertise': 'Software engineering and Development', 'attachments': [], 'budgetRange': '', 'closedAt': pendulum.today(tz='Australia/Sydney').add(days=14).format('%Y-%m-%d'), 'contactNumber': '0123456789', 'contractExtensions': '', 'contractLength': '1 year', 'comprehensiveTerms': True, 'essentialRequirements': [ { 'criteria': 'TEST', 'weighting': '55' }, { 'criteria': 'TEST 2', 'weighting': '45' } ], 'evaluationType': [ 'Responses to selection criteria', 'Résumés'.decode('utf-8') ], 'includeWeightingsEssential': False, 'includeWeightingsNiceToHave': False, 'internalReference': '', 'location': [ 'Australian Capital Territory' ], 'maxRate': '123', 'niceToHaveRequirements': [ { 'criteria': 'Code review', 'weighting': '0' } ], 'numberOfSuppliers': '3', 'openTo': 'selected', 'organisation': 'Digital Transformation Agency', 'preferredFormatForRates': 'dailyRate', 'securityClearance': 'noneRequired', 'securityClearanceCurrent': '', 'securityClearanceObtain': '', 'securityClearanceOther': '', 'sellers': { '1': { 'name': 'Seller 1' } }, 'sellerCategory': '6', 'sellerSelector': 'oneSeller', 'startDate': pendulum.today(tz='Australia/Sydney').add(days=14).format('%Y-%m-%d'), 'summary': 'My specialist summary', 'title': 'Specialist title' }, framework=framework, lot=specialist_lot, users=users, published_at=now, withdrawn_at=None ) specialist_brief.questions_closed_at = now.add(days=3) specialist_brief.closed_at = now.add(days=5) db.session.add(specialist_brief) rfx_brief = Brief( id=3, data={ 'title': 'TEST', 'closedAt': pendulum.today(tz='Australia/Sydney').add(days=14).format('%Y-%m-%d'), 'organisation': 'ABC', 'summary': 'TEST', 'workingArrangements': 'TEST', 'location': [ 'New South Wales' ], 'sellerCategory': '1', 'sellers': { '1': { 'name': 'Seller 1' } }, 'evaluationType': [ 'Response template', 'Written proposal' ], 'proposalType': [ 'Breakdown of costs' ], 'requirementsDocument': [ 'TEST.pdf' ], 'responseTemplate': [ 'TEST2.pdf' ], 'startDate': 'ASAP', 'contractLength': 'TEST', 'includeWeightings': True, 'essentialRequirements': [ { 'criteria': 'TEST', 'weighting': '55' }, { 'criteria': 'TEST 2', 'weighting': '45' } ], 'niceToHaveRequirements': [], 'contactNumber': '0263635544' }, framework=framework, lot=specialist_lot, users=users, published_at=now, withdrawn_at=None ) rfx_brief.questions_closed_at = now.add(days=3) rfx_brief.closed_at = now.add(days=5) db.session.add(rfx_brief) yield db.session.query(Brief).all()
def __init__(self, aDate=pendulum.today()): super(Prix, self).__init__() self.aDate = aDate self.timestamp = self.aDate.timestamp() print("PENDULUM UTC TODAY", self.aDate.today()) print("PENDULUM UTC TIMESTAMP TODAY ", self.timestamp)
def get_days_remaining(self, examprogress): today = pendulum.today().date() period = pendulum.period(examprogress.plan.end_time.date(), today, absolute=True) return period.days
def test_diff_for_humans_other_and_hours(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans(now.add(hours=2)) == "2 hours before"
def test_diff_for_humans_other_and_future_minute(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans(now.subtract(minutes=1)) == "1 minute after"
# TODO: convert the time to another time zone d2 = dt1.in_timezone("Asia/Singapore") print(d2) # TODO: create a new datetime using the now() function dt3 = pendulum.now() print(dt3) # TODO: Use the local function function here = pendulum.local(2020, 12, 31) print(here) print(here.timezone.name) # TODO: Use today, tomorrow, yesterday today = pendulum.today() tomorrow = pendulum.tomorrow() yesterday = pendulum.yesterday("America/New_York") print(today) print(tomorrow) print(yesterday) # TODO: create a datetime from a system timestamp t = time.time() dt4 = pendulum.from_timestamp(t) print(dt4)
def future(ctx): """Create a calendar event.""" events = read_events() events = list(filter(lambda e: e.dt >= pendulum.today(), events)) for e in events: print(e.dt.diff_for_humans(), e.summary)
def fn_subscription_start_date(cohort): return pendulum.today().start_of('month')._datetime
def test_dates_clock_events(self): """Test that default after is *now*""" start_date = pendulum.today("UTC").add(days=1) c = clocks.DatesClock([start_date]) assert islice(c.events(), 3) == [start_date] assert islice(c.events(), 1) == [start_date]
async def event_timer_check(self): loop_time = pendulum.now() for rec_hash, rec_time in self.records.items(): rec_diff = loop_time - rec_time if rec_diff.hours < 1 or ( rec_diff.hours == 1 and rec_diff.minutes < 1 ): continue else: del self.records[rec_hash] for guild_id, guild_db in self.guilds.items(): messages = [] results = await guild_db.list_all_events() if not results: continue channels = await guild_db.get_channels( 'events' ) if not channels: continue diff_h, diff_m = await vaivora.common.get_time_diff(guild_id) full_diff = timedelta(hours=diff_h, minutes=diff_m) # Sort by time - year, month, day, hour, minute results.sort(key=itemgetter(5,6,7,8,9)) for result in results: try: entry_time = pendulum.datetime( *[int(t) for t in result[1:-1]], tz=loop_time.timezone_name ) except ValueError as e: logger.error( f'Caught {e} in cogs.events: event_timer_check; ' f'guild: {guild_id}; ' f'user: {ctx.author.id}; ' f'command: {ctx.command}' ) continue end_date = entry_time.strftime("%Y/%m/%d %H:%M") time_diff = entry_time - (loop_time + full_diff) # Record is in the past if any of the below conditions match if (time_diff.hours < 0 or time_diff.minutes < 0 or time_diff.days != 0 ): continue mins = f'{time_diff.minutes} minute' if time_diff.minutes != 1: mins = f'{mins}s' name = result[0] if name in vaivora.db.permanent_events: # If a permanent event is disabled, simply skip it if result[-1] == 0: continue today = pendulum.today() to_add = ( vaivora.db.event_days[name] - today.day_of_week + 7 ) % 7 next_occurrence = today + timedelta(days=to_add) time = [ next_occurrence.year, next_occurrence.month, next_occurrence.day, *vaivora.db.event_times[name] ] name = f"""{name} {emoji['alert']}""" record = cleandoc( f"""**{name}** - {ending} at **{end_date}** ({mins})""" ) hashed_record = await vaivora.common.hash_object( channels, name, entry_time ) if hashed_record in self.records: continue # Record is within 1 hour behind the loop time if time_diff.seconds <= 3600 and time_diff.seconds > 0: for channel in channels: messages.append( { 'record': record, 'discord_channel': channel } ) self.records[hashed_record] = entry_time if not messages: continue else: await vaivora.common.send_messages( self.bot.get_guild(guild_id), messages, 'events' )
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import pytest import sys import pendulum from d3a.models.market.market_structures import Offer, Trade, BalancingOffer from d3a.models.strategy.commercial_producer import CommercialStrategy from d3a.models.area import DEFAULT_CONFIG from d3a.d3a_core.device_registry import DeviceRegistry from d3a_interface.constants_limits import ConstSettings from d3a.constants import TIME_ZONE, TIME_FORMAT from d3a.d3a_core.util import change_global_config TIME = pendulum.today(tz=TIME_ZONE).at(hour=10, minute=45, second=0) def setup_function(): change_global_config(**DEFAULT_CONFIG.__dict__) class FakeArea: def __init__(self, count): self.current_tick = 2 self.appliance = None self.name = 'FakeArea' self.test_market = FakeMarket(0) self.test_balancing_market = FakeMarket(1) self.test_balancing_market_2 = FakeMarket(2) self._past_markets = {}
import pendulum print(pendulum.today())
def loom_stats_email(): #Date utilities yesterday = pendulum.yesterday() today = pendulum.today() week = yesterday.subtract(weeks=1) start_week = week.start_of('week') end_week = week.end_of('week').add(days=1) month = yesterday.subtract(months=1) days_in_month = month.days_in_month start_month = month.start_of('month') end_month = month.end_of('month').add(days=1) date_joined = calculate_date_ranges('date_joined', yesterday, today) date_created = calculate_date_ranges('date_created', yesterday, today) sent_at = calculate_date_ranges('sent_at', yesterday, today) create_date = calculate_date_ranges('create_date', yesterday, today) date_sent = calculate_date_ranges('sent_date', yesterday, today) date_paid = calculate_date_ranges('date_paid', yesterday, today) week_date_joined = calculate_date_ranges('date_joined', start_week, end_week) week_date_created = calculate_date_ranges('date_created', start_week, end_week) week_sent_at = calculate_date_ranges('sent_at', start_week, end_week) week_create_date = calculate_date_ranges('create_date', start_week, end_week) week_date_sent = calculate_date_ranges('sent_date', start_week, end_week) week_date_paid = calculate_date_ranges('date_paid', start_week, end_week) month_date_joined = calculate_date_ranges('date_joined', start_month, end_month) month_date_created = calculate_date_ranges('date_created', start_month, end_month) month_sent_at = calculate_date_ranges('sent_at', start_month, end_month) month_create_date = calculate_date_ranges('create_date', start_month, end_month) month_date_sent = calculate_date_ranges('sent_date', start_month, end_month) month_date_paid = calculate_date_ranges('date_paid', start_month, end_month) admins = Profile.objects.filter(is_superuser=True) developers = Profile.objects.exclude(roles=None) entrepreneurs = Profile.objects.filter(roles=None, biography__isnull=False) companies = Employee.objects.filter(primary=True) projects = Project.objects.filter(published=True, deleted=False) projects_mix = Project.objects.filter(mix=True, published=True, deleted=False) projects_cash = Project.objects.filter( (Q(estimated_equity_percentage__isnull=True) | Q(estimated_equity_percentage=0)), estimated_cash__isnull=False, published=True, deleted=False) projects_equity = Project.objects.filter( (Q(estimated_cash__isnull=True) | Q(estimated_cash=0)), estimated_equity_percentage__isnull=False, published=True, deleted=False) average_mix = projects_mix.aggregate(Avg('estimated_cash'), Avg('estimated_equity_percentage')) average_equity = projects_equity.aggregate( Avg('estimated_equity_percentage')) average_cash = projects_cash.aggregate(Avg('estimated_cash')) messages = Message.objects.all() proposals = Proposal.objects.all() proposals_mix = Proposal.objects.filter(cash=True, equity=True) proposals_cash = Proposal.objects.filter(cash=True, equity=False) proposals_equity = Proposal.objects.filter(cash=False, equity=True) rate = Proposal.objects.filter(hourly_rate__isnull=False).aggregate( Avg('hourly_rate')) hours = proposals.aggregate(Avg('hours')) invoices = Invoice.objects.exclude(status='draft') paid_invoices = Invoice.objects.filter(status='paid') daily_paid_invoices = paid_invoices.filter(**date_paid) week_paid_invoices = paid_invoices.filter(**week_date_paid) month_paid_invoices = paid_invoices.filter(**month_date_paid) invoices_cash = invoices.aggregate( Sum('invoice_items__amount'))['invoice_items__amount__sum'] invoices_hours = invoices.aggregate( Sum('invoice_items__hours'))['invoice_items__hours__sum'] invoices_fees = sum( [invoice.application_fee() for invoice in paid_invoices]) daily_developers = developers.filter(**date_joined).count() daily_entrepreneurs = entrepreneurs.filter(**date_joined).count() daily_company = companies.filter(profile__date_joined__gte=yesterday, profile__date_joined__lt=today).count() daily_projects = projects.filter(**date_created).count() daily_projects_cash = projects_cash.filter(**date_created).count() daily_projects_equity = projects_equity.filter(**date_created).count() daily_projects_mix = projects_mix.filter(**date_created).count() daily_messages = messages.filter(**sent_at).count() daily_proposals = proposals.filter(**create_date).count() daily_proposals_mix = proposals_mix.filter(**create_date).count() daily_proposals_cash = proposals_cash.filter(**create_date).count() daily_proposals_equity = proposals_equity.filter(**create_date).count() daily_invoices = invoices.filter(**date_sent).count() daily_invoices_cash = invoices.filter(**date_sent).aggregate( Sum('invoice_items__amount'))['invoice_items__amount__sum'] daily_invoices_hours = invoices.filter(**date_sent).aggregate( Sum('invoice_items__hours'))['invoice_items__hours__sum'] daily_invoices_fees = sum( [invoice.application_fee() for invoice in daily_paid_invoices]) # Last week daily average week_developers = developers.filter(**week_date_joined).count() / 7 week_entrepreneurs = entrepreneurs.filter(**week_date_joined).count() / 7 week_company = companies.filter( profile__date_joined__gte=start_week, profile__date_joined__lt=end_week).count() / 7 week_projects = projects.filter(**week_date_created).count() / 7 week_projects_cash = projects_cash.filter(**week_date_created).count() / 7 week_projects_equity = projects_equity.filter( **week_date_created).count() / 7 week_projects_mix = projects_mix.filter(**week_date_created).count() / 7 week_messages = messages.filter(**week_sent_at).count() / 7 week_proposals = proposals.filter(**week_create_date).count() / 7 week_proposals_mix = proposals_mix.filter(**week_create_date).count() / 7 week_proposals_cash = proposals_cash.filter(**week_create_date).count() / 7 week_proposals_equity = proposals_equity.filter( **week_create_date).count() / 7 week_invoices = invoices.filter(**week_date_sent).count() / 7 week_invoices_cash = invoices.filter(**week_date_sent).aggregate( Sum('invoice_items__amount')) week_invoices_cash = week_invoices_cash[ 'invoice_items__amount__sum'] / 7 if week_invoices_cash[ 'invoice_items__amount__sum'] else 0 week_invoices_hours = invoices.filter(**week_date_sent).aggregate( Sum('invoice_items__hours')) week_invoices_hours = week_invoices_hours[ 'invoice_items__hours__sum'] / 7 if week_invoices_hours[ 'invoice_items__hours__sum'] else 0 week_invoices_fees = sum( [invoice.application_fee() for invoice in week_paid_invoices]) / 7 # Last month daily average month_developers = developers.filter( **month_date_joined).count() / days_in_month month_entrepreneurs = entrepreneurs.filter( **month_date_joined).count() / days_in_month month_company = companies.filter( profile__date_joined__gte=start_month, profile__date_joined__lt=end_month).count() / days_in_month month_projects = projects.filter( **month_date_created).count() / days_in_month month_projects_cash = projects_cash.filter( **month_date_created).count() / days_in_month month_projects_equity = projects_equity.filter( **month_date_created).count() / days_in_month month_projects_mix = projects_mix.filter( **month_date_created).count() / days_in_month month_messages = messages.filter(**month_sent_at).count() / days_in_month month_proposals = proposals.filter( **month_create_date).count() / days_in_month month_proposals_mix = proposals_mix.filter( **month_create_date).count() / days_in_month month_proposals_cash = proposals_cash.filter( **month_create_date).count() / days_in_month month_proposals_equity = proposals_equity.filter( **month_create_date).count() / days_in_month month_invoices = invoices.filter(**month_date_sent).count() / days_in_month month_invoices_cash = invoices.filter(**month_date_sent).aggregate( Sum('invoice_items__amount')) month_invoices_cash = month_invoices_cash[ 'invoice_items__amount__sum'] / days_in_month if month_invoices_cash[ 'invoice_items__amount__sum'] else 0 month_invoices_hours = invoices.filter(**month_date_sent).aggregate( Sum('invoice_items__hours')) month_invoices_hours = month_invoices_hours[ 'invoice_items__hours__sum'] / days_in_month if month_invoices_hours[ 'invoice_items__hours__sum'] else 0 month_invoices_fees = sum( [invoice.application_fee() for invoice in month_paid_invoices]) / days_in_month context = { 'DAILY_DEVELOPERS': daily_developers, 'DAILY_ENTREPRENEURS': daily_entrepreneurs, 'DAILY_COMPANIES': daily_company, 'DAILY_PROJECTS': daily_projects, 'DAILY_CASHPROJECTS': daily_projects_cash, 'DAILY_EQUITYPROJECTS': daily_projects_equity, 'DAILY_MIXPROJECTS': daily_projects_mix, 'DAILY_MESSAGES': daily_messages, 'DAILY_PROPOSALS': daily_proposals, 'DAILY_MIXPROPOSALS': daily_proposals_mix, 'DAILY_CASHPROPOSALS': daily_proposals_cash, 'DAILY_EQUITYPROPOSALS': daily_proposals_equity, 'DAILY_INVOICES': daily_invoices, 'DAILY_INVOICES_CASH': daily_invoices_cash, 'DAILY_INVOICES_HOURS': daily_invoices_hours, 'DAILY_INVOICES_FEES': daily_invoices_fees, 'WOW_DEVELOPERS': round(week_developers, 2), 'WOW_ENTREPRENEURS': round(week_entrepreneurs, 2), 'WOW_COMPANIES': round(week_company, 2), 'WOW_PROJECTS': round(week_projects, 2), 'WOW_CASHPROJECTS': round(week_projects_cash, 2), 'WOW_EQUITYPROJECTS': round(week_projects_equity, 2), 'WOW_MIXPROJECTS': round(week_projects_mix, 2), 'WOW_MESSAGES': round(week_messages, 2), 'WOW_PROPOSALS': round(week_proposals, 2), 'WOW_CASHPROPOSALS': round(week_proposals_cash, 2), 'WOW_EQUITYPROPOSALS': round(week_proposals_equity, 2), 'WOW_MIXPROPOSALS': round(week_proposals_mix, 2), 'WOW_INVOICES': week_invoices, 'WOW_INVOICES_CASH': week_invoices_cash, 'WOW_INVOICES_HOURS': week_invoices_hours, 'WOW_INVOICES_FEES': week_invoices_fees, 'MOM_DEVELOPERS': round(month_developers, 2), 'MOM_ENTREPRENEURS': round(month_entrepreneurs, 2), 'MOM_COMPANIES': round(month_company, 2), 'MOM_PROJECTS': round(month_projects, 2), 'MOM_CASHPROJECTS': round(month_projects_cash, 2), 'MOM_EQUITYPROJECTS': round(month_projects_equity, 2), 'MOM_MIXPROJECTS': round(month_projects_mix, 2), 'MOM_MESSAGES': round(month_messages, 2), 'MOM_PROPOSALS': round(month_proposals, 2), 'MOM_MIXPROPOSALS': round(month_proposals_mix, 2), 'MOM_CASHPROPOSALS': round(month_proposals_cash, 2), 'MOM_EQUITYPROPOSALS': round(month_proposals_equity, 2), 'MOM_INVOICES': month_invoices, 'MOM_INVOICES_CASH': month_invoices_cash, 'MOM_INVOICES_HOURS': month_invoices_hours, 'MOM_INVOICES_FEES': month_invoices_fees, 'DEVELOPERS': developers.count(), 'ENTREPRENEURS': entrepreneurs.count(), 'COMPANIES': companies.count(), 'PROJECTS': projects.count(), 'CASHPROJECTS': projects_cash.count(), 'EQUITYPROJECTS': projects_equity.count(), 'MIXPROJECTS': projects_mix.count(), 'EQUITY': '{0}%'.format( round(average_equity['estimated_equity_percentage__avg'], 2)), 'CASH': '${0}'.format(round(average_cash['estimated_cash__avg'], 2)), 'MIX': '${0}, {1}%'.format( round(average_mix['estimated_cash__avg'], 2), round(average_mix['estimated_equity_percentage__avg'], 2)), 'MESSAGES': messages.count(), 'PROPOSALS': proposals.count(), 'MIXPROPOSALS': proposals_mix.count(), 'CASHPROPOSALS': proposals_cash.count(), 'EQUITYPROPOSALS': proposals_equity.count(), 'HOURLYRATE': '${0}/hour'.format(round(rate['hourly_rate__avg'], 2)), 'HOURS': hours['hours__avg'], 'INVOICES': invoices.count(), 'INVOICES_CASH': '${0}'.format(invoices_cash) if invoices_cash else 0, 'INVOICES_HOURS': invoices_hours, 'INVOICES_FEES': invoices_fees } send_mail('loom-stats', [admin for admin in admins], context)
def today_str(): """Returns today's date formatted as dt string.""" return pendulum.today().to_datetime_string()
async def _list(self, ctx): """Lists all events for a Discord guild. Args: ctx (discord.ext.commands.Context): context of the message name (str): the name of the custom event Returns: bool: True if successful; False otherwise """ vdb = vaivora.db.Database(ctx.guild.id) results = await vdb.list_all_events() if not results: await ctx.send( cleandoc( f"""{ctx.author.mention} No events were found. Verify file permissions. """ ) ) return False output = [] diff_h, diff_m = await vaivora.common.get_time_diff(ctx.guild.id) full_diff = timedelta(hours=diff_h, minutes=diff_m) now = pendulum.now() for result in results: status = '' name = result[0] time = result[1:-1] if name in vaivora.db.permanent_events: today = pendulum.today() to_add = (vaivora.db.event_days[name] - today.day_of_week) % 7 next_day = today + timedelta(days=to_add) time = [ next_day.year, next_day.month, next_day.day, *vaivora.db.event_times[name] ] status = '✅' if result[-1] == 1 else '❌' name = f'{name} ({status})' try: entry_time = pendulum.datetime( *time, tz=now.timezone_name ) except ValueError as e: logger.error( f'Caught {e} in cogs.events: _list; ' f'guild: {ctx.guild.id}; ' f'user: {ctx.author.id}; ' f'command: {ctx.command}' ) continue time_diff = entry_time - (now + full_diff) diff_days = abs(time_diff.days) time_as_text = [] # Print days conditionally if diff_days == 1: time_as_text.append('1 day') elif diff_days > 1: time_as_text.append(f'{diff_days} days') # Print hours conditionally diff_minutes = abs(floor(time_diff.seconds/60)) if diff_minutes > 119: time_as_text.append(f'{floor((diff_minutes % 86400)/60)} hours') elif diff_minutes > 59: time_as_text.append('1 hour') # Print minutes unconditionally # e.g. 0 minutes from now # e.g. 59 minutes ago diff_minutes = floor(diff_minutes % 60) minutes = f'{diff_minutes} minute' if diff_minutes != 1: minutes = f'{minutes}s' when = 'from now' if int(time_diff.seconds) >= 0 else 'ago' time_since = f'{minutes} {when}' if time_as_text: time_since = f"""{', '.join(time_since)}, {time_since}""" ending = 'ending' if int(time_diff.seconds) >= 0 else 'ended' if status: ending = 'resets' end_date = entry_time.strftime("%Y/%m/%d %H:%M") message = cleandoc( f"""**{name}** - {ending} at **{end_date}** ({time_since}) """ ) output.append(message) await ctx.send( cleandoc( f"""{ctx.author.mention} Records: """ ) ) for message in await vaivora.common.chunk_messages(output, 5): async with ctx.typing(): await asyncio.sleep(1) await ctx.send(message) return True
def __init__(self, aDate=pendulum.today()): super(AirportsSpider, self).__init__() self.aDate = aDate self.timestamp = self.aDate.timestamp() print("PENDULUM UTC TODAY ", self.aDate.isoformat()) print("PENDULUM TO TIMESTAMP ", self.timestamp)
def test_diff_for_humans_now_and_future_seconds(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.add(seconds=2).diff_for_humans() == "in a few seconds"
def _parseGames(self, games, team=None): new_games = [] if team: if (team.lower() != "all" and team.lower() != "today" and team.lower() != "inp"): for idx, game in enumerate(games): if (team.upper() == game["competitions"][0]["competitors"] [0]["team"]["abbreviation"] or team.upper() == game["competitions"][0] ["competitors"][1]["team"]["abbreviation"]): games = [games.pop(idx)] break for game in games: date = pendulum.parse(game["date"]).in_tz("US/Pacific") today = pendulum.today("US/Pacific") new_game = {} new_game["id"] = game["id"] new_game["time"] = (pendulum.parse( game["date"]).in_tz("US/Eastern").format("ddd h:mm A zz")) new_game["date"] = (pendulum.parse(game["date"]).in_tz( "US/Eastern").format("dddd, MMMM Do, h:mm A zz")) new_game["home_full"] = game["competitions"][0]["competitors"][0][ "team"]["location"] new_game["home"] = game["competitions"][0]["competitors"][0][ "team"]["abbreviation"] new_game["home_id"] = game["competitions"][0]["competitors"][0][ "team"]["id"] new_game["away_full"] = game["competitions"][0]["competitors"][1][ "team"]["location"] new_game["away"] = game["competitions"][0]["competitors"][1][ "team"]["abbreviation"] new_game["away_id"] = game["competitions"][0]["competitors"][1][ "team"]["id"] new_game["status"] = game["status"]["type"]["state"] new_game["shortDetail"] = game["status"]["type"]["shortDetail"] new_game["final"] = game["status"]["type"]["completed"] new_game["in_progress"] = False # Rankings new_game["home_team_rank"] = game["competitions"][0][ "competitors"][0]["curatedRank"].get("current") new_game["away_team_rank"] = game["competitions"][0][ "competitors"][1]["curatedRank"].get("current") # Odds try: new_game["odds"] = "{} (O/U: {:.0f})".format( game["competitions"][0]["odds"][0]["details"], game["competitions"][0]["odds"][0]["overUnder"], ) except Exception as e: new_game["odds"] = "" print(e) if new_game["status"] == "in" and not new_game["final"]: new_game["in_progress"] = True try: new_game["last_play"] = game["competitions"][0][ "situation"]["lastPlay"]["text"] except: new_game["last_play"] = "" new_game["pos"] = game["competitions"][0]["situation"].get( "possession") new_game["rz"] = game["competitions"][0]["situation"].get( "isRedZone") new_game["desc"] = game["competitions"][0]["situation"].get( "downDistanceText") new_game["clock"] = game["status"]["type"]["shortDetail"] try: new_game["clock"] = ( new_game["clock"].split("-")[0].strip() + " " + self._green(new_game["clock"].split("-")[1].strip())) except: new_game["clock"] = new_game["clock"] if "Delayed" in new_game["clock"]: new_game["clock"] = self._orange("DLY") if "Halftime" in new_game["clock"]: new_game["clock"] = "HT" new_game["HT"] = True else: new_game["HT"] = False elif new_game["status"] == "post": new_game["in_progress"] = False new_game["broadcasts"] = "{}".format(", ".join( item["media"]["shortName"] for item in game["competitions"][0]["geoBroadcasts"])) new_game["h_score"] = int( game["competitions"][0]["competitors"][0]["score"]) new_game["a_score"] = int( game["competitions"][0]["competitors"][1]["score"]) if team == "today": if date.day == today.day: new_games.append(new_game) elif team == "inp": if new_game["in_progress"]: new_games.append(new_game) else: new_games.append(new_game) return new_games
def test_diff_for_humans_now_and_nearly_future_hour(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.add(minutes=59).diff_for_humans() == "in 59 minutes"
def _read_from_different_sources_todict( input_profile: Any) -> Dict[DateTime, float]: """ Reads arbitrary profile. Handles csv, dict and string input. :param input_profile:Can be either a csv file path, or a dict with hourly data (Dict[int, float]) or a dict with arbitrary time data (Dict[str, float]) or a string containing a serialized dict of the aforementioned structure :return: """ if os.path.isfile(str(input_profile)): # input is csv file profile = _readCSV(input_profile) elif isinstance(input_profile, dict) or isinstance(input_profile, str): # input is profile if isinstance(input_profile, str): # input in JSON formatting profile = ast.literal_eval( input_profile.encode('utf-8').decode("utf-8-sig")) # Remove filename entry to support d3a-web profiles profile.pop("filename", None) profile = _remove_header(profile) time_format = _eval_time_format(profile) profile = { _str_to_datetime(key, time_format): val for key, val in profile.items() } elif isinstance(list(input_profile.keys())[0], DateTime): return input_profile elif isinstance(list(input_profile.keys())[0], str): # input is dict with string keys that are properly formatted time stamps input_profile = _remove_header(input_profile) # Remove filename from profile input_profile.pop("filename", None) time_format = _eval_time_format(input_profile) profile = { _str_to_datetime(key, time_format): val for key, val in input_profile.items() } elif isinstance(list(input_profile.keys())[0], int) or \ isinstance(list(input_profile.keys())[0], float): # input is hourly profile profile = dict((today(tz=TIME_ZONE).add(hours=hour), val) for hour, val in input_profile.items()) else: raise D3AReadProfileException("Unsupported input type : " + str(list(input_profile.keys())[0])) elif isinstance(input_profile, int) or \ isinstance(input_profile, float) or \ isinstance(input_profile, tuple): # input is single value profile = default_profile_dict(val=input_profile) else: raise D3AReadProfileException( f"Unsupported input type: {str(input_profile)}") return profile
def test_diff_for_humans_other_and_seconds(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans(now.add(seconds=2)) == "a few seconds before"
from helpers.upload import upload import pendulum date = pendulum.today("America/Los_Angeles") date_formatted = date.format("YYYY-MM-DD") agency = "lametro-rail" for line in range(801, 807): source_path = f"data/schedule/{line}_{agency}/{date_formatted}.csv" upload(source_path, source_path)
def test_diff_for_humans_other_and_nearly_hour(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans(now.add(minutes=59)) == "59 minutes before"
def test_diff_in_seconds_vs_default_now(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.subtract(hours=1).diff().in_seconds() == 3600
def test_diff_for_humans_other_and_future_second(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans(now.subtract(seconds=1)) == "a few seconds after"
def GET(self, *_args, **kwargs): """Display the list of available grids, or the current grid""" name = kwargs.get('name', '') start = kwargs.get('start') grids = cherrypy.engine.publish( "registry:search", "grids:*", as_dict=True ).pop() options = defaultdict(lambda: None) try: config = next( value.split("\n") for key, value in grids.items() if key.endswith(":" + name) ) headers = [value.strip() for value in config[0].split(",")] options.update([value.split('=') for value in config[1:]]) except StopIteration: headers = [] rows = [] if options["layout"] == "month": today = pendulum.today() try: start = pendulum.from_format(start, "YYYY-MM") except (TypeError, ValueError): start = today.start_of("month") headers = ["Date", "Day"] + headers options["last_month"] = start.subtract(months=1) options["next_month"] = start.add(months=1) options["this_month"] = today period = pendulum.period(start, start.end_of("month")) for day in period.range("days"): row = [''] * len(headers) row[0] = day.format("MMM D, YYYY") row[1] = day.format("dddd") rows.append(row) elif headers: row = [''] * len(headers) rows = [row for x in range(1, 30)] return { "html": ("grids.jinja.html", { "headers": headers, "name": name, "names": [key.split(":")[1] for key in grids.keys()], "options": options, "rows": rows, }) }
def test_diff_for_humans_other_and_nearly_future_hour(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans(now.subtract(minutes=59)) == "59 minutes after"
from d3a_interface.constants_limits import GlobalConfig from d3a_interface.area_validator import validate_area from d3a.models.area.redis_external_connection import RedisAreaExternalConnection import d3a.constants log = getLogger(__name__) DEFAULT_CONFIG = SimulationConfig( sim_duration=duration(hours=24), market_count=1, slot_length=duration(minutes=15), tick_length=duration(seconds=1), cloud_coverage=ConstSettings.PVSettings.DEFAULT_POWER_PROFILE, iaa_fee=ConstSettings.IAASettings.FEE_PERCENTAGE, iaa_fee_const=ConstSettings.IAASettings.FEE_CONSTANT, start_date=today(tz=TIME_ZONE), max_panel_power_W=ConstSettings.PVSettings.MAX_PANEL_OUTPUT_W) class Area: def __init__(self, name: str = None, children: List["Area"] = None, uuid: str = None, strategy: BaseStrategy = None, appliance: BaseAppliance = None, config: SimulationConfig = None, budget_keeper=None, balancing_spot_trade_ratio=ConstSettings.BalancingSettings. SPOT_TRADE_RATIO, event_list=[],
def get_feed_prices(node): provider_names = {p.lower() for p in cfg['feed_providers']} active_providers = set() for name, provider in ALL_FEED_PROVIDERS.items(): if name in provider_names: active_providers.add(provider()) # get currency rates from yahoo # do not include: # - BTC as we don't get it from yahoo # - USD as it is our base currency yahoo = YahooFeedProvider() yahoo_prices = yahoo.get(YAHOO_ASSETS | {'CNY'}, 'USD') # still get CNY, we might need it later base_usd_price = yahoo_prices CURRENCYLAYER_ACTIVE = False if CURRENCYLAYER_ACTIVE: try: currency_layer = CurrencyLayerFeedProvider() currency_layer_prices = currency_layer.get(YAHOO_ASSETS | {'CNY'}, 'USD') base_usd_price = FeedSet(yahoo_prices + currency_layer_prices) except Exception as e: log.debug('Could not get feeds from CurrencyLayer: {}'.format(e)) # 1- get the BitShares price in major markets: BTC, USD and CNY btcavg = core.config['credentials']['bitcoinaverage'] bitcoinavg = BitcoinAverageFeedProvider(btcavg['secret_key'], btcavg['public_key']) bitfinex = BitfinexFeedProvider() bitstamp = BitstampFeedProvider() bittrex = BittrexFeedProvider() btc38 = Btc38FeedProvider() bter = BterFeedProvider() cmc = CoinMarketCapFeedProvider() coincap = CoinCapFeedProvider() poloniex = PoloniexFeedProvider() yunbi = YunbiFeedProvider() # 1.1- first get the bts/btc valuation providers_bts_btc = {poloniex, bittrex} & active_providers if not providers_bts_btc: log.warning('No feed providers for BTS/BTC feed price') all_feeds = get_multi_feeds('get', [('BTS', 'BTC')], providers_bts_btc) feeds_bts_btc = all_feeds.filter('BTS', 'BTC') if not feeds_bts_btc: raise core.NoFeedData('Could not get any BTS/BTC feeds') btc_price = feeds_bts_btc.price() # 1.2- get the btc/usd (bitcoin avg) try: feeds_btc_usd = FeedSet([bitcoinavg.get('BTC', 'USD')]) except Exception: # fall back on Bitfinex, Bitstamp if BitcoinAverage is down or not configured - TODO: add Kraken, others? CMC log.debug('Could not get BTC/USD using BitcoinAverage, trying other sources') feeds_btc_usd = get_multi_feeds('get', [('BTC', 'USD')], {bitfinex, bitstamp}) btc_usd = feeds_btc_usd.price() usd_price = btc_price * btc_usd # 1.3- get the bts/cny valuation directly from cny markets. Going from bts/btc and # btc/cny to bts/cny introduces a slight difference (2-3%) that doesn't exist on # the actual chinese markets providers_bts_cny = {bter, btc38, yunbi} & active_providers # TODO: should go at the beginning: submit all fetching tasks to an event loop / threaded executor, # compute valuations once we have everything #all_feeds.append(get_multi_feeds('get', [('BTS', 'CNY')], providers_bts_cny)) feeds_bts_cny = get_multi_feeds('get', [('BTS', 'CNY')], providers_bts_cny) if not feeds_bts_cny: # if we couldn't get the feeds for cny, go BTS->BTC, BTC->CNY log.warning('Could not get any BTS/CNY feeds, going BTS->BTC, BTC->CNY') bts_cny = btc_price * btc_usd / base_usd_price.price('CNY') # # if we couldn't get the feeds for cny, try picking up our last value # if price_history.get('cny'): # log.warning('Could not get any BTS/CNY feeds, using last feed price') # bts_cny = price_history['cny'][-1] # else: # raise core.NoFeedData('Could not get any BTS/CNY feeds') else: bts_cny = feeds_bts_cny.price() cny_price = bts_cny feeds = {} # TODO: do we really want to reset the global var 'feeds' everytime we come here? feeds['BTC'] = btc_price feeds['USD'] = usd_price feeds['CNY'] = cny_price feeds['HERO'] = usd_price / (1.05 ** ((pendulum.today() - pendulum.Pendulum(1913, 12, 23)).in_days() / 365.2425)) log.debug('Got btc/usd price: {}'.format(btc_usd)) log.debug('Got usd price: {}'.format(usd_price)) log.debug('Got cny price: {}'.format(cny_price)) # 2- now get the BitShares price in all other required currencies for asset in YAHOO_ASSETS: feeds[asset] = usd_price / base_usd_price.price(asset) # 2.1- RUBLE is used temporarily by RUDEX instead of bitRUB (black swan) # see https://bitsharestalk.org/index.php/topic,24004.0/all.html feeds['RUBLE'] = feeds['RUB'] # 3- get the feeds for major composite indices # REMOVED, was using yahoo, GoogleFeedProvider, BloombergFeedProvider # 4- get other assets altcap = get_multi_feeds('get', [('ALTCAP', 'BTC')], {coincap, cmc}) altcap = altcap.price(stddev_tolerance=0.08) feeds['ALTCAP'] = altcap gridcoin = get_multi_feeds('get', [('GRIDCOIN', 'BTC')], {poloniex, bittrex}) feeds['GRIDCOIN'] = btc_price / gridcoin.price(stddev_tolerance=0.1) steem_btc = get_multi_feeds('get', [('STEEM', 'BTC')], {poloniex, bittrex}) steem_usd = steem_btc.price() * btc_usd feeds['STEEM'] = steem_usd # 5- Bit20 asset if 'BTWTY' not in get_disabled_assets(): bit20 = get_bit20_feed(node, usd_price) if bit20 is not None: feeds['BTWTY'] = bit20 # 6- HERTZ asset if 'HERTZ' not in get_disabled_assets(): hertz_reference_timestamp = "2015-10-13T14:12:24+00:00" # Bitshares 2.0 genesis block timestamp hertz_current_timestamp = pendulum.now().timestamp() # Current timestamp for reference within the hertz script hertz_amplitude = 0.14 # 14% fluctuation (1% per day) hertz_period_days = 28 # 28 days hertz_phase_days = 0.908056 # Time offset from genesis till the first wednesday, to set wednesday as the primary Hz day. hertz_reference_asset_price = usd_price hertz = get_hertz_feed(hertz_reference_timestamp, hertz_current_timestamp, hertz_period_days, hertz_phase_days, hertz_reference_asset_price, hertz_amplitude) if hertz is not None: feeds['HERTZ'] = hertz # 7- update price history for all feeds for cur, price in feeds.items(): price_history[cur].append(price) return feeds
Transformer, _transform_datetime) from singer.catalog import Catalog, CatalogEntry from functools import partial from facebook_business import FacebookAdsApi import facebook_business.adobjects.adcreative as adcreative import facebook_business.adobjects.ad as fb_ad import facebook_business.adobjects.adset as adset import facebook_business.adobjects.campaign as fb_campaign import facebook_business.adobjects.adsinsights as adsinsights import facebook_business.adobjects.user as fb_user from facebook_business.exceptions import FacebookRequestError TODAY = pendulum.today() API = None INSIGHTS_MAX_WAIT_TO_START_SECONDS = 2 * 60 INSIGHTS_MAX_WAIT_TO_FINISH_SECONDS = 30 * 60 INSIGHTS_MAX_ASYNC_SLEEP_SECONDS = 5 * 60 RESULT_RETURN_LIMIT = 100 STREAMS = [ 'adcreative', 'ads', 'adsets', 'campaigns', 'ads_insights',
def today(): return pendulum.today()
def today(cls): return pendulum.today().date()
def test_diff_for_humans_now_and_second(): with pendulum.test(pendulum.today().at(12, 34, 56)): now = pendulum.now().time() assert now.diff_for_humans() == "a few seconds ago"
def get_today_is_checkin_by_user_id(cls, user_id): return cls.objects.filter(user_id=user_id, date=pendulum.today()).exists()
'other_circumstances': 'no', 'other_circumstances_details': '', 'structual_changes': 'no', 'structual_changes_details': '' }, 'methodologies': 'abc', 'tools': 'abc', 'recruiter': 'no', 'documents': { 'financial': { 'application_id': 1, 'filename': '1.pdf' }, 'liability': { 'application_id': 1, 'expiry': pendulum.today().add(years=1).format('%Y-%m-%d'), 'filename': '2.pdf' }, 'workers': { 'application_id': 1, 'expiry': pendulum.today().add(years=1).format('%Y-%m-%d'), 'filename': '3.pdf' } }, 'services': { 'Content and Publishing': True, 'User research and Design': True }, 'pricing': { 'Content and Publishing': { 'maxPrice': '555'
def get_today_register_user(cls): """返回今日注册的用户""" return cls.objects.filter(date_joined__gt=pendulum.today())