Beispiel #1
0
 def _invalidate_meetup_update(self):
     """ Invalidate the MeetupUpdate by making more than an hour ago
     """
     settings.REDIS.set(settings.MEETUPS_LAST_CHECKED, Delorean().datetime - timedelta(hours=1))
Beispiel #2
0
    def __init__(self):
        self._time = Delorean()
        self._timezone = Timezones()

        self._address: Address = Address()
        self._coordinates: Coordinate = None
Beispiel #3
0
 def exchange_dt_in_utc(self, dt):
     delorean = Delorean(dt, self.exchange_tz)
     return delorean.shift(pytz.utc.zone).datetime
Beispiel #4
0
 def is_stuck(self):
     return self.amount.currency == "BTC" and \
         self.transaction_status == self.IN_TRANSIT and \
         Delorean(self.time_created, "UTC") < Delorean().last_hour(3)
Beispiel #5
0
def end_of_day_in_utc(input_datetime):
    return Delorean(input_datetime, timezone="utc").end_of_day
Beispiel #6
0
print ("\n\tLooks good to us.")
time_clear_headerEND()

designatedSubReddit = input('\n\t At last, what subreddit would you like to scrape? \n\n\t www.reddit.com\\r\\')

print("\n\t \t   Downloading images from /r/" + str(designatedSubReddit) + "... \n\n\t    ___________________________________________\n\n \t\t\t\t*** \n")
time.sleep(2.0)
########################
##### INPUT PROCESSING #
########################

# Y / M / D / H / Mi / S / ??
# (2017, 3, 9, 23, 49, 20, 000000), timezone='GMT')

bd_dt = Delorean(datetime=datetime(year_b, month_b, day_b, 0, 0, 0, 000000),  timezone='GMT')
bd_epoch = bd_dt.epoch
print(bd_epoch)

ed_dt = Delorean(datetime=datetime(year_e, month_e, day_e, 23, 59, 0, 000000),  timezone='GMT')
ed_epoch = ed_dt.epoch
print(ed_epoch)

# Messages for the downlad_file() function
MSG_START = 'Downloading file: {}.'
MSG_END = '\t{} downloaded in {} seconds.\n'

# Returns a list of urls posted to the subreddit_name
# between start_date and end_date.
# The list is in the form:
# ((url, date_string), (url, date_string), (url, date_string) ...)
Beispiel #7
0
def now(timezone: Optional[str] = None) -> datetime:
    tz = timezone or settings.TIME_ZONE
    return Delorean().shift(tz).datetime
Beispiel #8
0
def int_to_datetime(value):
    time = datetime.utcfromtimestamp(value / 1000)
    return Delorean(time, timezone="UTC").shift("Europe/Dublin").datetime
 def _serialize(self, value, attr, obj, **kwargs):
     del_obj = Delorean(value, timezone=BaseConfig.TIMEZONE)
     return self._convert_to_timezone(del_obj).isoformat()
Beispiel #10
0
 def notify_at_full(self) -> str:
     if not self.notify_at:
         return "never"
     return (Delorean(self.notify_at).shift(
         settings.TIME_ZONE).datetime.strftime("%H:%M at %A, %B %d, %Y"))
Beispiel #11
0
 def notify_at_humanized(self) -> str:
     if not self.notify_at:
         return "never"
     return Delorean(self.notify_at).humanize()
Beispiel #12
0
 def _get_delorean(self, ref_date):
     if isinstance(ref_date, date):
         ref_date = datetime.combine(ref_date, datetime.min.time())
     d = Delorean(ref_date, self.time_zone)
     return d
Beispiel #13
0
def live_run(configuration):
    strategy_name = configuration['platform']['strategy']
    is_builtin_strategy = configuration['platform']['builtin']
    execute = configuration['platform']['execute']

    logger.info('live_run(%s, %s)' % (strategy_name, execute))

    db = session.get_a_trading_db_mysql_session()

    harness = Harness(db, configuration)

    strategy_class = get_strategy_class(strategy_name, is_builtin_strategy)
    strategy = strategy_class(db, harness, configuration['strategy'])
    strategy.set_up()

    harness.strategy = strategy

    if execute:
        EventRecorder().create(db=db)
        DatumRecorder().create(db=db)
    else:
        EventRecorder().create()
        DatumRecorder().create()

    sentry = Sentry(configuration['platform']['sentry'])
    heartbeat = Heartbeat(configuration['platform']['heartbeat'])

    try:
        tick_count = 0

        while True:
            try:
                tick_start = Delorean().epoch
                print '\n\n%s' % strategy.name

                if warm_shutdown_flag:
                    return  # This takes us into the finally block.

                # Initial audit. This is done inside the main loop so that our robust
                # exception catching kicks in on initial audit failures.
                if harness.audit is True and tick_count == 0:
                    # We try a fast audit (no wind down) since the bots usually start
                    # from a clean slate.
                    try:
                        harness.full_audit(wind_down=False)
                    except AuditException:
                        logger.info(
                            'Bot was not cleanly shut down, winding down and auditing',
                        )

                        harness.full_audit(wind_down=True)

                # Regular audits.
                if (harness.audit is True and tick_count > 0
                        and tick_count % harness.audit_tick == 0):
                    harness.full_audit()
                else:
                    harness.tick()

                    harness.post_tick(tick_count)

                tick_profiling.record_tick_data(tick_start, strategy.name)
                tick_profiling.record_tick_block_data(
                    strategy,
                    tick_count,
                    strategy.name,
                )

                heartbeat.heartbeat(strategy.name)

            except Exception as e:
                sentry.captureException()

                logger.exception(
                    tc.colored(
                        '[%s] %s' % (e.__class__.__name__, e.message),
                        'red',
                    ))

                exception_retry_loop(harness, sentry, db)
            finally:
                session.commit_mysql_session(db)
                tick_count += 1

            if harness.strategy_complete() is True:
                break
            else:
                gentle_sleep(harness.sleep_time_to_next_tick())
    finally:
        warm_shutdown(harness, db, sentry, execute)
        session.commit_mysql_session(db)
        db.remove()

        if restart_flag:
            restart()
Beispiel #14
0
def epoch(date):
    dt = Delorean(datetime=date, timezone='US/Eastern')
    return int(dt.epoch())
Beispiel #15
0
#alarm.py

from delorean import Delorean
from pydub import AudioSegment
from pydub.playback import play

alarm = False
while alarm == False:
    now = str(Delorean().datetime.time())
    print("The time is ", now)
    if now >= "22:41:00.000000":
        alarm = True
        sound = AudioSegment.from_wav('Echo.wav')
        play(sound)
Beispiel #16
0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import pandas as pd
import pytz

from datetime import datetime, timedelta
from dateutil import rrule
from delorean import Delorean

start = datetime(1990, 1, 1, tzinfo=pytz.utc)
end_dln = Delorean(datetime.utcnow(), pytz.utc.zone)
end_dln.shift('US/Eastern').truncate('day').shift(pytz.utc.zone)
end = end_dln.datetime - timedelta(days=1)


def canonicalize_datetime(dt):
    # Strip out any HHMMSS or timezone info in the user's datetime, so that
    # all the datetimes we return will be 00:00:00 UTC.
    return datetime(dt.year, dt.month, dt.day, tzinfo=pytz.utc)


def get_non_trading_days(start, end):
    non_trading_rules = []

    start = canonicalize_datetime(start)
    end = canonicalize_datetime(end)
Beispiel #17
0
from delorean import Delorean  #importing the  library  we installed previously

dt_tm = Delorean()  #Function that carries all the date and time properties

print("date = ", dt_tm)

tm = Delorean().datetime.time()  #new
print("time = ", tm)  #new

#display naive datetime
naive_dt_tm = dt_tm.naive  #new
print("Naive datetime is", naive_dt_tm)  #new

change_tz = dt_tm.shift('Africa/Lagos')
print("date and time after changing timezone = ", change_tz)

nxt_fri = dt_tm.next_friday()
print("date for next week friday is = ", nxt_fri)

# Date for Two Fridays ago
last_2_tues = dt_tm.last_fiday(2)
print("date for 2 Fridays ago", last_2_tues)

# Get Two Fridays from now at midnight
next_2_fris_midnight = dt_tm.next_friday(2).midnight
print("Two Fridays from now at midnight is ", next_2_fris_midnight)
Beispiel #18
0
 def utc_dt_in_exchange(self, dt):
     delorean = Delorean(dt, pytz.utc.zone)
     return delorean.shift(self.exchange_tz).datetime
Beispiel #19
0
def main():
    year_range = range(2005, int(datetime.now().year) + 1)
    month_range = range(1, 13)
    year_start_msg = '\n\tEnter the year you would like to START your range: \n\t'
    year_end_msg = '\n\tEnter the year you would like to END your range: \n\t'
    month_msg = '\n\tNow, how about a month: '
    day_msg = '\n\tLastly, enter the day: '

    ########################
    ### START DATE LOGIC ###
    ########################

    sleep(0.2)
    time_clear_header1()
    year_b = validate_date(year_range, year_start_msg)

    print(f'\n\tExcellent!{year_b} is a great year.')
    time_clear_header1()

    month_b = validate_date(month_range, month_msg)

    print("\n\tWe'll accept that")
    time_clear_header1()

    date_range = set_date_range(month_b)
    day_b = validate_date(date_range, day_msg)

    print("\n\tLooks good to us.")
    print(
        f'\n\n\tYou have selected a start date of: {month_b}-{day_b}-{year_b}')
    print("\n\n\tIs this correct? We hope so!")
    sleep(2.0)

    ########################
    ##### END DATE LOGIC ###
    ########################

    time_clear_headerSTART(month_b=month_b, day_b=day_b, year_b=year_b)
    year_e = validate_date(year_range, year_end_msg)

    print(f'\n\tExcellent! {year_e} is a great year. Just like {year_b}!')

    time_clear_headerSTART(month_b=month_b, day_b=day_b, year_b=year_b)
    month_e = validate_date(month_range, month_msg)

    print("\n\tWe'll accept that")

    time_clear_header1()
    time_clear_headerSTART(month_b=month_b, day_b=day_b, year_b=year_b)

    date_range = set_date_range(month_e)
    day_e = validate_date(date_range, day_msg)

    print("\n\tLooks good to us.")
    time_clear_headerEND(month_b=month_b,
                         day_b=day_b,
                         year_b=year_b,
                         month_e=month_e,
                         day_e=day_e,
                         year_e=year_e)

    input_prompt = '\n\t At last, what subreddit would you like to scrape? \n\n\t www.reddit.com\\r\\'
    designatedSubReddit = input(input_prompt)

    print(f'\n\t \t   Downloading images from /r/{designatedSubReddit}...')
    print(
        "\n\n\t    ___________________________________________\n\n \t\t\t\t*** \n"
    )
    sleep(2.0)

    ########################
    ##### INPUT PROCESSING #
    ########################

    # Y / M / D / H / Mi / S / ??
    # (2017, 3, 9, 23, 49, 20, 000000), timezone='GMT')

    bd_dt = Delorean(datetime=datetime(year_b, month_b, day_b, 0, 0, 0,
                                       000000),
                     timezone='GMT')
    bd_epoch = bd_dt.epoch
    ed_dt = Delorean(datetime=datetime(year_e, month_e, day_e, 23, 59, 0,
                                       000000),
                     timezone='GMT')
    ed_epoch = ed_dt.epoch
    urls = urls_by_period(designatedSubReddit, bd_epoch, ed_epoch)

    total_downloaded = 0

    ########################
    ##### DOWNLOADING ######
    ########################

    for url_list in urls:
        try:
            url = url_list[0]  # -- the actual url
            date_created = url_list[1]  # -- the date the image was posted
            post_title_untamed = url_list[2]  # -- post title without parsing
            post_title = sub(r'([^\s\w]|_)+', '',
                             post_title_untamed)  # -- post title with parsing

            def file_namer(splitter):
                file_name = f'{date_created} {post_title} {splitter}'

                return file_name

            def soup_parse(parse_string):
                soup = bs(get(url).text, parse_string)

                return soup

            def counter_namer(filename):
                if start != -1:
                    seconds = (datetime.now() - start).total_seconds()

                print(f'\t{filename} downloaded in {seconds} seconds.\n')

            # == DIRECT DOWNLOAD
            if url.endswith(config.extensions):
                filename = file_namer(url.split("/")[-1])
                start = download_file(url, date_created, filename,
                                      designatedSubReddit)

                print(f"\nDownloading from\n {post_title_untamed} \n")
                print(f'Downloading file: {filename}.')

                counter_namer(filename)
                total_downloaded += 1

            # ----- ADDED FEATURES

            # == REDDITBOORU
            elif url.startswith(('https://redditbooru.com/gallery/')):
                soup = soup_parse('lxml')
                gallery_links = soup.find_all('script')

                print(
                    f"\nDownloading redditbooru gallery items from: {post_title_untamed}\n"
                )

                for i in gallery_links:
                    cdn_url = findall('\"cdnUrl\":(\".*?\")', i.text)

                    if len(cdn_url) > 0:
                        redditbooru_list = [
                            i.replace('"', '').replace("\\", '')
                            for i in cdnURL
                        ]

                        counter_namer()
                        total_downloaded += 1

                    for i in redditbooru_list:
                        filename = file_namer(i.split("/")[-1])
                        start = download_file(i, date_created, filename,
                                              designatedSubReddit)

                        counter_namer(filename)
                        total_downloaded += 1

            # == IMGUR GALLERIES
            elif url.startswith('https://imgur.com/a/'):
                slug = url.rsplit('/', 1)[-1]
                imgur_source = get(
                    f'https://imgur.com/ajaxalbums/getimages/{slug}').text
                parse_for_images = findall("\"images\":\[.*\]", imgur_source)

                if not parse_for_images:
                    print("-" * 60)
                    print(
                        "\nLooks like this image was removed! Let's move on...\n"
                    )
                    print("-" * 60)
                    continue

                else:
                    convert_to_json = loads(parse_for_images[0][9:])
                    imgur_list = [
                        f'https://i.imgur.com/{i["hash"]}{i["ext"]}'
                        for i in convert_to_json
                    ]

                    print(
                        f"\nDownloading imgur gallery items from: {post_title_untamed}\n"
                    )

                    for i in imgur_list:
                        filename = file_namer(i.split("/")[-1])
                        start = download_file(i, date_created, filename,
                                              designatedSubReddit)

                        counter_namer(filename)
                        total_downloaded += 1

            # == PIXV
            elif url.startswith('https://pixv.net'):
                soup_parse('html.parser')
                pixv_image_containers = soup.find_all(
                    "div", {"class", "img-container"})
                pixv_imgaes = [
                    i.a.img.get("src") for i in pixv_image_containers
                ]
                print(
                    f"\nDownloading pixv item(s) from: {post_title_untamed}\n")

                for i in imgur_list:
                    filename = file_namer(i.split("/")[-1])
                    start = download_file(i, date_created, filename,
                                          designatedSubReddit)

                    counter_namer(filename)
                    total_downloaded += 1

            # == INSTAGRAM
            elif url.startswith('https://www.instagram.com/p/'):
                source = get(url).text
                img_link = findall('display_url":(\".*?\")',
                                   source)[0][1:][:-1]

                print(
                    f"\nDownloading an Instagram image from: {post_title_untamed}\n"
                )
                filename = file_namer(img_link.split("/")[-1])
                start = download_file(i, date_created, filename,
                                      designatedSubReddit)

                counter_namer()
                total_downloaded += 1

            # == TUMBLR
            elif url.split('/')[2].split('.')[1] == 'tumblr':
                soup = soup_parse('lxml')
                gallery_links = soup.find_all("iframe", {"class": "photoset"})

                for i in gallery_links:
                    request_link = i.get("src")
                    link = url.split('/')[2]
                    full_request_link = f'http://{link}{request_link}'
                    new_soup = bs(get(full_request_link).text, 'lxml')
                    tumblr_img_links = [
                        i.get('src') for i in new_soup.find_all('img')
                    ]

                    for i in tumblr_img_links:
                        print(
                            f"\nDownloading tumblr image(s) from: {post_title_untamed}\n"
                        )
                        filename = file_namer(i.split("/")[-1])
                        start = download_file(i, date_created, filename,
                                              designatedSubReddit)

                        counter_namer(filename)
                        total_downloaded += 1

            # == TWITTER
            elif url.startswith('https://twitter.com'):
                soup = soup_parse('lxml')
                twitter_img_tags = soup.find_all('img')

                for i in twitter_img_tags:

                    try:
                        if img_source.startswith(
                                'https://pbs.twimg.com/media/'):
                            img_source = i.get('src')
                            print(
                                f"\nDownloading a Twitter image from: {post_title_untamed}\n"
                            )
                            filename = file_namer(i[0].split("/")[-1])
                            start = download_file(i, date_created, filename,
                                                  designatedSubReddit)

                            counter_namer(filename)
                            total_downloaded += 1

                    except AttributeError:
                        continue

        except OSError:

            print("-" * 60)
            print("\nThat image causes an error! Let's move on...\n")
            print("-" * 60)
            continue

    #Closing statements
    print("-" * 60)
    print(f"\n You downloaded a total of {total_downloaded} images.\n")
    print("\n Thanks for using Reddit Image Scraper 1.0 -SPECIAL EDITION- \n")
Beispiel #20
0
from delorean import Delorean
import json

d = Delorean().datetime
print("Normal datetime value", d, "Type: ", type(d))

d_string = d.isoformat()
print("serialized datetime value", d_string, "Type: ", type(d_string))

d_str = str(d)
print("serialized datetime value with str method", d_str, "Type: ",
      type(d_str))
Beispiel #21
0
def utcnow() -> datetime:
    return Delorean().datetime
Beispiel #22
0
    def test_datedelta(self):
        y20m1d1 = date(year=2020, month=1, day=1)
        y20m1d2 = date(year=2020, month=1, day=2)
        y20m1d3 = date(year=2020, month=1, day=3)

        y20m2d1 = date(year=2020, month=2, day=1)
        y20m3d1 = date(year=2020, month=3, day=1)
        y20m4d1 = date(year=2020, month=4, day=1)

        y21m1d1 = date(year=2021, month=1, day=1)
        y21m2d1 = date(year=2021, month=2, day=1)
        y21m3d1 = date(year=2021, month=3, day=1)
        y22m1d1 = date(year=2022, month=1, day=1)
        y22m2d1 = date(year=2022, month=2, day=1)
        y22m3d1 = date(year=2022, month=3, day=1)

        delta = DateDelta.build(start=y20m1d1, finish=y20m1d1)
        self.assertEqual(delta.years, 0)
        self.assertEqual(delta.months, 0)
        self.assertEqual(str(delta), "<1 mo")

        delta = DateDelta.build(start=y20m1d1, finish=y20m1d2)
        self.assertEqual(delta.years, 0)
        self.assertEqual(delta.months, 0)
        self.assertEqual(str(delta), "<1 mo")

        delta = DateDelta.build(start=y20m1d1, finish=y20m1d3)
        self.assertEqual(delta.years, 0)
        self.assertEqual(delta.months, 0)
        self.assertEqual(str(delta), "<1 mo")

        delta = DateDelta.build(start=y20m1d1, finish=y20m2d1)
        self.assertEqual(delta.years, 0)
        self.assertEqual(delta.months, 1)
        self.assertEqual(str(delta), "1 mo")

        delta = DateDelta.build(start=y20m1d1, finish=y20m3d1)
        self.assertEqual(delta.years, 0)
        self.assertEqual(delta.months, 2)
        self.assertEqual(str(delta), "2 mos")

        delta = DateDelta.build(start=y20m1d1, finish=y20m4d1)
        self.assertEqual(delta.years, 0)
        self.assertEqual(delta.months, 3)
        self.assertEqual(str(delta), "3 mos")

        delta = DateDelta.build(start=y20m1d1, finish=y21m1d1)
        self.assertEqual(delta.years, 1)
        self.assertEqual(delta.months, 0)
        self.assertEqual(str(delta), "1 y")

        delta = DateDelta.build(start=y20m1d1, finish=y21m2d1)
        self.assertEqual(delta.years, 1)
        self.assertEqual(delta.months, 1)
        self.assertEqual(str(delta), "1 y 1 mo")

        delta = DateDelta.build(start=y20m1d1, finish=y21m3d1)
        self.assertEqual(delta.years, 1)
        self.assertEqual(delta.months, 2)
        self.assertEqual(str(delta), "1 y 2 mos")

        delta = DateDelta.build(start=y20m1d1, finish=y22m1d1)
        self.assertEqual(delta.years, 2)
        self.assertEqual(delta.months, 0)
        self.assertEqual(str(delta), "2 ys")

        delta = DateDelta.build(start=y20m1d1, finish=y22m2d1)
        self.assertEqual(delta.years, 2)
        self.assertEqual(delta.months, 1)
        self.assertEqual(str(delta), "2 ys 1 mo")

        delta = DateDelta.build(start=y20m1d1, finish=y22m3d1)
        self.assertEqual(delta.years, 2)
        self.assertEqual(delta.months, 2)
        self.assertEqual(str(delta), "2 ys 2 mos")

        delta = DateDelta.build(start=y20m1d1)
        delta_expected = Delorean().date - y20m1d1
        years_expected, _days = divmod(delta_expected.days, 365)
        months_expected = _days // 30
        self.assertEqual(delta.years, years_expected)
        self.assertEqual(delta.months, months_expected)
Beispiel #23
0
def tick():
    now = Delorean().datetime
    settings.REDIS.set(settings.MEETUPS_LAST_CHECKED, now)
Beispiel #24
0
    def audit_orderbook(self, orderbook, orderbook_timestamp):
        orderbook_timestamp_early = orderbook_timestamp - timedelta(seconds=5)
        orderbook_timestamp_late = orderbook_timestamp + timedelta(seconds=5)

        result = yield self.engine.execute(
            orderbook_table.select(orderbook_table).where(
                and_(
                    orderbook_table.c.exchange == self.exchange_name,
                    orderbook_table.c.timestamp.between(
                        orderbook_timestamp_early, orderbook_timestamp_late))))

        our_orderbooks = yield result.fetchall()

        # Non Blocking ^^^
        # Potentially Blocking vvv

        start_time = Delorean().epoch
        audit_successful = 'SOFT'
        change_dict = {}
        fundamental_values = {}

        exchange_object = exchange_factory.make_exchange_from_key(
            self.exchange_name)

        price_limit = Money(ORDERBOOK_PRICE_LIMIT, exchange_object.currency)

        http_orderbook = exchange_object.parse_orderbook(
            orderbook,
            price_limit=price_limit,
        )

        http_fundamental_value = self.fundamental_value(http_orderbook)
        indexed_http_ob = self.index_orderbook(http_orderbook)

        for our_ob in our_orderbooks:
            raw_db_orderbook = {
                'bids': json.loads(our_ob.bids),
                'asks': json.loads(our_ob.asks),
            }

            db_orderbook = exchange_object.parse_orderbook(
                raw_db_orderbook,
                price_limit=price_limit,
                cached_orders=True,
            )

            # Check for soft falilures
            db_fundamental_value = self.fundamental_value(db_orderbook)

            fund_value_closeness = (
                abs(db_fundamental_value - http_fundamental_value) /
                http_fundamental_value)

            indexed_db_ob = self.index_orderbook(db_orderbook)

            ask_diffs = DictDiffer(indexed_db_ob['asks'],
                                   indexed_http_ob['asks'])
            bid_diffs = DictDiffer(indexed_db_ob['bids'],
                                   indexed_http_ob['bids'])

            changes = (list(ask_diffs.added()) + list(ask_diffs.removed()) +
                       list(ask_diffs.changed()) + list(bid_diffs.added()) +
                       list(bid_diffs.removed()) + list(bid_diffs.changed()))

            total_changes = len(changes)

            change_dict[total_changes] = changes
            fundamental_values[fund_value_closeness] = {
                'db_fundamental_value': db_fundamental_value,
                'http_fundamental_value': http_fundamental_value,
            }

            hard_failure_fund_value_closeness = 10

            if (total_changes < self.acceptable_changes_threshold
                    and fund_value_closeness <
                    self.acceptable_fund_value_threshold):
                audit_successful = 'SUCCESSFUL'

            # Check for hard failures.
            if self.detect_orderbook_cross(db_orderbook):
                audit_successful = 'HARD'

                log.err('%s Auditor Detects Cross -  Bids:%s, Asks:%s' %
                        (self.exchange_name, db_orderbook['bids'][:3],
                         db_orderbook['asks'][:3]))

                break

            if fund_value_closeness > hard_failure_fund_value_closeness:
                audit_successful = 'HARD'

                log.err(
                    'Funamental Value difference is more than %s% its:' %
                    (hard_failure_fund_value_closeness, fund_value_closeness))

                break

        if not audit_successful == 'SUCCESSFUL':
            log.msg('%s Orderbook Auditor Soft Failure Report:' %
                    self.exchange_name)

            if not our_orderbooks:
                log.msg('No orderbooks to audit against')

            for key, value in fundamental_values.iteritems():
                log.msg(
                    '------ Fundamental Value Closeness:%.6f, DBfv:%s, HTTPfv:%s'
                    % (key, value['db_fundamental_value'],
                       value['http_fundamental_value']))

            for key, value in change_dict.iteritems():
                log.msg('------ Change Count: %s' % key)

        log.msg('Time Elapsed Auditing %s Orderbook: %s' % (
            self.exchange_name,
            Delorean().epoch - start_time,
        ))

        defer.returnValue(audit_successful)
Beispiel #25
0
def start_of_day_in_utc(input_datetime):
    return Delorean(input_datetime, timezone="utc").start_of_day
from delorean import Delorean
import datetime

naive_d1 = Delorean(datetime=datetime.datetime(2021, 5, 15),
                    timezone='UTC').naive
naive_d2 = Delorean(datetime=datetime.datetime(2021, 6, 15, 5),
                    timezone='UTC').naive
naive_d3 = Delorean(datetime=datetime.datetime(2021, 5, 15),
                    timezone='US/Pacific').naive
naive_d4 = Delorean(datetime=datetime.datetime(2021, 5, 15, 7),
                    timezone='UTC').naive
print(
    "naive_d1 == naive_d2 -",
    naive_d1 == naive_d2)  # output False because of the hour added to naive_d2
print("naive_d2 > naive_d1 -",
      naive_d2 > naive_d1)  # naive_d2 is a farther date than naive_d1
print(
    "naive_d3 == naive_d4 -", naive_d3 == naive_d4
)  # Even though the the timezones are different adding the 7 hours to naive_d4 makes time equal hence, True

d1 = Delorean(datetime=datetime.datetime(2021, 5, 15), timezone='UTC')
d2 = Delorean(datetime=datetime.datetime(2021, 5, 15, 5), timezone='UTC')
d3 = Delorean(datetime=datetime.datetime(2021, 5, 15, 1),
              timezone='Africa/Lagos')
d4 = Delorean(datetime=datetime.datetime(2021, 5, 15), timezone='UTC')
print(
    "d1 == d3 -", d1 == d3
)  # Even though the the timezones are different adding the 1 hour to d3 makes time equal hence, True
print("d2 > d4 -",
      d2 > d4)  # adding 5 hours to d2 made it a farther date hence, True
Beispiel #27
0
def record_tick_data(tick_start, strategy_name):
    tick_end = Delorean().epoch
    tick_length = tick_end - tick_start

    datum_name = '%s_TICK_TIME' % strategy_name
    DatumRecorder().record_mean(datum_name, tick_length, TICK_SAMPLE_SIZE)
Beispiel #28
0
__author__ = 'nickyuan'

# class SimpleItemContainer(object):
#     def __init__(self, id, item_container):
#         self.id = id
#         self.data = {}
#         for item in item_container:
#             self.data[item.id] = item
#
# containerA = SimpleItemContainer(2,['a','b','c'])
# print(containerA)

from delorean import Delorean


d = Delorean()
d = d.shift("Asia/Shanghai")


from requests import request

print(d)
print(d.datetime, d.date)

import wget

print(wget.download("http://music.163.com/#/song?id=27678693"))
Beispiel #29
0
 def serialize(self, value):
     """
     Takes a datetime object and returns a string
     """
     fmt = Delorean(value, timezone=UTC).datetime.strftime(DATETIME_FORMAT)
     return six.u(fmt)
Beispiel #30
0
def test_compute_time_diff():
	t = Delorean(datetime(2014, 6, 5, 13), timezone='UTC')
	start_time = Delorean(datetime(2014, 6, 5, 7), timezone='US/Eastern')
	t.shift('US/Eastern')
	hour_diff = hrn.compute_hour_diff(start_time.datetime, t.datetime)
	assert hour_diff == 2