def start(city_name="Los Angeles", folder="images"):
    dawn, dusk = get_interesting_times(city_name)
    delay = get_timelapse_delay(dawn, dusk)
    tzinfo = dawn.tzinfo

    # Get the datetime so we can make a folder for it.
    raw_time = arrow.now(tzinfo)
    formatted_time = raw_time.format("YYYY_MM_DD-HH_mm_ss")

    # Build the folder path for datetime.
    image_folder = os.path.join(folder, formatted_time)
    if not os.path.exists(folder):
        os.makedirs(folder)

    # Initialize camera so we don't have to get a new handle to it
    # each time we want to use it.
    camera = picamera.PiCamera()

    shot = 0
    while arrow.now(tzinfo) < dusk:
        print("Taking picture {0} at {1}".format(shot, formatted_time))
        filename = "timelapse_{0:04d}.jpeg".format(shot)
        imgpath = os.path.join(folder, filename)
        take_picture(imgpath, camera)
        shot += 1
        time.sleep(delay)
    def predict_from_rf(self,lat,lon,date,time,k):
        '''
        :param lat: latitude
        :param lon: longitude
        :param date: the date,"2016-12-10" the default value is the current date
        :param time: time, "13:00", the default value is current time
        :param k: top k result we want to return
        return top k predictions from logitic regression and corresponding probabilities.

        Notice:
            The features are [lat,long,one-hot-hour-feature,one-hot-day-feature,one-hot-month-feature]
        '''
        if not date:
            date_obj = ar.now().date()
        else:
            date_obj = ar.get(date).date()
        if not time:
            time = str(ar.now().datetime)[11:16]

        # get the one hot for time and date
        one_hot_time = self.get_one_hot_time(time)
        one_hot_month_day = self.get_one_hot_month_day(date_obj)

        #form the features and normlize
        features_unnormed = [lat,lon]
        features_unnormed.extend(one_hot_time)
        features_unnormed.extend(one_hot_month_day)

        features_normed = self.scaler.transform(np.array(features_unnormed))
        predict_proba_list = self.rf_model.predict_proba(features_normed)[0]

        # form the result, choose the top two result and corresponding probability
        return self.form_result_from_proba(predict_proba_list,k)
Exemple #3
0
    def get_bounces(self):
        r = requests.get('https://api.mailgun.net/v2/oeconsortium.org/bounces',
                auth=('api', settings.MAILGUN_APIKEY))
        data = json.loads(r.content)
        for bounce in data['items']:
            for contact in Contact.objects.filter(email__iexact=bounce.get('address'), bouncing=False):
                contact.bouncing = True
                contact.save()

                self.stdout.write(u"Marking {contact.email} as bouncing - {contact.first_name} {contact.last_name} from {contact.organization.display_name}".format(contact=contact))

        r = requests.get("https://api.mailgun.net/v2/oeconsortium.org/events",
                        auth=('api', settings.MAILGUN_APIKEY),
                        params={'begin': arrow.now().replace(weeks=-1).strftime('%a, %d %b %Y %H:%M:%S +0000'),
                                'end': arrow.now().strftime('%a, %d %b %Y %H:%M:%S +0000'),
                                'event': 'failed'
                                })
        data = json.loads(r.content)
        for bounce in data['items']:
            if bounce.get('severity') == 'permanent':
                for contact in Contact.objects.filter(email__iexact=bounce.get('recipient'), bouncing=False):
                    contact.bouncing = True
                    contact.save()

                    self.stdout.write(u"Marking {contact.email} as bouncing - {contact.first_name} {contact.last_name} from {contact.organization.display_name}".format(contact=contact))
 def get_metadata(self):
     metadata = self.feed['metadata']
     metadata['created'] = arrow.now().format('YYYY-MM-DD HH:mm:ss')
     metadata['modified'] = arrow.now().format('YYYY-MM-DD HH:mm:ss')
     metadata['feed_id'] = self.feed_metadata['identifier']
     self.metadata = metadata
     return [metadata]
def sort_downloads():
    downloads = os.path.expanduser('~/Downloads')

    os.chdir(downloads)

    link_week(downloads, arrow.now(), 'This week')
    link_week(downloads, arrow.now().replace(weeks=-1), 'Last week')

    one_day_ago = arrow.now().replace(days=-1)

    for filename in os.listdir(downloads):
        if filename in IGNORE_FILES:
            continue

        file_date = date_added(filename)

        if file_date > one_day_ago:
            continue

        destination = os.path.join(downloads, *format_week(file_date))

        if not os.path.exists(destination):
            mkdirp(destination)

        if os.path.exists(os.path.join(destination, filename)):
            print(f'Skipping "{filename}" because it already exists')

            continue

        shutil.move(os.path.join(downloads, filename), destination)
Exemple #6
0
def start(city_name = 'Brussels', folder = 'images'):
    dawn, dusk = get_interesting_times(city_name)
    delay = get_timelapse_delay(dawn, dusk)
    time_zone_info = dawn.tzinfo

    #Get the datetime so we can make a folder for it.
    raw_time = arrow.now(time_zone_info)
    formatted_time = raw_time.format('YYYY_MM_DD-HH_mm_ss')

    #Build the folder path for datetime.
    image_folder = os.path.join(folder, formatted_time)
    if not os.path.exists(folder):
        os.makedirs(folder)

    #Initialize camera so we don't have to get a new handle every time
    camera = picamera.PiCamera()

    shot = 0
    while arrow.now(time_zone_info) < dusk:
        print('Taking picture {} at {}'.format(shot, formatted_time))
        filename = 'timelapse_{:04d}.jpeg'.format(shot)
        image_path = os.path.join(folder, filename)
        take_picture(image_path, camera)
        shot += 1
        time.sleep(delay)
    else:
        print('Finished the time lapse')
Exemple #7
0
    def run_job(self, repo):
        repo_name = repo.repo_name

        bj = BuildRepo2(username=self.gh_username,
                password=self.gh_password,
                repo_name=repo_name,
                repo_owner='GITenberg',
                update_travis_commit_msg='build using gitenberg.travis',
                tag_commit_message='build using gitenberg.travis',
                access_token=self.access_token)

        #self.results[repo_name] = (bj, bj.run())

        buildable = bj.buildable()

        repo.buildable = buildable.buildable
        repo.has_source = buildable.has_source
        repo.has_metadata = buildable.has_metadata

        if buildable.buildable:
            self.results[repo_name] = (bj, bj.run())
            repo.datebuilt = arrow.now().isoformat()
        else:
            self.results[repo_name] = (bj, False)

        # just mark as started

        repo.updated = arrow.now().isoformat()

        self.session().commit()
Exemple #8
0
 def countdown(self, n, delta=0.5):
     now = arrow.now()
     then = now + datetime.timedelta(seconds=n)
     while now < then:
         print ("\r{}".format(then - now), end="")
         time.sleep(delta)
         now = arrow.now()
 def _signal(self, qry, locale, profile, country=None):
     start_time = time.time()
     html = self._html(qry, 1, locale, country)
     listings = self._listings(html)
     last_page = html.find('ul',{'class':'paginationNumbers'})
     last_page = last_page.find_all('li') if last_page else None
     last_page = int(last_page[-1].text.strip()) if last_page else 1
     conn = rethink_conn.conn()
     for page in range(last_page):
         html = self._html(qry, page, locale, country)
         #listings = listings.append(self._listings(html))
         listings = self._listings(html)
         listings['source'] = 'Zip Recruiter'
         listings["keyword"] = qry
         listings["profile"] = profile
         companies = listings
         keys = [row.company_name.lower().replace(" ","")+"_"+profile for i, row in companies.iterrows()]
         companies["company_key"] = keys
         companies["createdAt"] = arrow.now().timestamp
         #r.table("hiring_signals").insert(companies.to_dict('r')).run(conn)
         r.table("triggers").insert(companies.to_dict('r')).run(conn)
     bitmapist.mark_event("function:time:ziprecruiter_job_scrape", 
                          int((time.time() - start_time)*10**6))
     #HiringSignal()._persist(listings, profile, report)
     rd.zadd("function:time:ziprecruiter_job_scrape", 
                        str((time.time() - start_time)*10**6), 
                        arrow.now().timestamp)
def _get_tz_aware_start_date_and_end_date(subscriber, start_date, end_date):
    offset = arrow.now(subscriber.timezone).utcoffset()
    if offset.seconds == 0:
        # Same or is UTC
        aware_start_date = start_date
        aware_end_date = end_date
    else:
        offset = arrow.now(subscriber.timezone).utcoffset()
        if start_date == end_date:
            if offset.days == 0:
                aware_start_date = start_date
                aware_end_date = end_date
#                 aware_end_date = end_date + timedelta(hours=24)
            else:
                aware_start_date = start_date + timedelta(hours=-24)
                aware_end_date = end_date
        else:
            if offset.days != 0:
                aware_start_date = start_date + timedelta(hours=-24)
                aware_end_date = end_date
            else:
                aware_start_date = start_date
                aware_end_date = end_date + timedelta(hours=24)

    return aware_start_date, aware_end_date
Exemple #11
0
def make_backup(cleanup=False, scheduler=False):
    """ Makes a backup of config file, removes all but the last 5 backups """

    if scheduler:
        backup_file = 'config.backup-%s.sched.ini' % arrow.now().format('YYYYMMDDHHmmss')
    else:
        backup_file = 'config.backup-%s.ini' % arrow.now().format('YYYYMMDDHHmmss')
    backup_folder = plexpy.CONFIG.BACKUP_DIR
    backup_file_fp = os.path.join(backup_folder, backup_file)

    # In case the user has deleted it manually
    if not os.path.exists(backup_folder):
        os.makedirs(backup_folder)

    plexpy.CONFIG.write()
    shutil.copyfile(plexpy.CONFIG_FILE, backup_file_fp)

    if cleanup:
        now = time.time()
        # Delete all scheduled backup older than BACKUP_DAYS.
        for root, dirs, files in os.walk(backup_folder):
            ini_files = [os.path.join(root, f) for f in files if f.endswith('.sched.ini')]
            for file_ in ini_files:
                if os.stat(file_).st_mtime < now - plexpy.CONFIG.BACKUP_DAYS * 86400:
                    try:
                        os.remove(file_)
                    except OSError as e:
                        logger.error(u"PlexPy Config :: Failed to delete %s from the backup folder: %s" % (file_, e))

    if backup_file in os.listdir(backup_folder):
        logger.debug(u"PlexPy Config :: Successfully backed up %s to %s" % (plexpy.CONFIG_FILE, backup_file))
        return True
    else:
        logger.warn(u"PlexPy Config :: Failed to backup %s to %s" % (plexpy.CONFIG_FILE, backup_file))
        return False
Exemple #12
0
def test_commish_search(args, result_order):
    # searcher
    u1 = db_utils.create_user(username="******")

    # user open for commissions
    create_commish_searchable_user("u2", submittime=arrow.now())

    # user sometimes open for commissions (should behave same as above)
    create_commish_searchable_user("u3", commish_status='s',
                                   submittime=arrow.now() - datetime.timedelta(days=1))

    # user open for commissions, with blacklisted tags
    u4 = create_commish_searchable_user("u4", submittime=arrow.now() - datetime.timedelta(days=2))
    searchtag.associate(u4, optout_tags_userid=u4, tags={'cat'})

    # user with a different commish class and a preference tag
    u5 = create_commish_searchable_user("u5", commishclass="sketch",
                                        submittime=arrow.now() - datetime.timedelta(days=3))
    searchtag.associate(u5, preferred_tags_userid=u5, tags={'cat'})

    # user with a different price
    create_commish_searchable_user("u6", minprice="100.0", maxprice="100.0",
                                   submittime=arrow.now() - datetime.timedelta(days=4))

    results = commishinfo.select_commissionable(userid=u1,
                                                limit=10,
                                                offset=0,
                                                **args)
    rids = [r['username'] for r in results]
    assert rids == result_order
Exemple #13
0
    def _signal(self, qry, locale, profile, country=None):
        page = 1
        start_time = time.time()
        print "Simply Hired"
        html = self._html(qry, page, locale, country)
        listings = self._listings(html)
        #print listings
        if listings.empty: return "none found"
        while 'day' not in listings.date.tolist()[-1]:
            page = page + 1
            html = self._html(qry, page, locale, country)
            listings = listings.append(self._listings(html))
            print page
        listings = listings[~listings.date.str.contains('day')]
        listings["keyword"] = qry
        listings = listings.drop_duplicates('company_name')
        listings['source'] = 'Simply Hired'
        listings["profile"] = profile
        #print listings
        companies = listings

        keys = [row.company_name.lower().replace(" ","")+"_"+profile for i, row in companies.iterrows()]
        companies["company_key"] = keys
        companies["createdAt"] = arrow.now().timestamp

        conn = rethink_conn.conn()
        #r.table("hiring_signals").insert(companies.to_dict('r')).run(conn)
        r.table("triggers").insert(companies.to_dict('r')).run(conn)
        bitmapist.mark_event("function:time:simplyhired_job_scrape", 
                             int((time.time() - start_time)*10**6))
        rd.zadd("function:time:simplyhired_job_scrape", 
                           str((time.time() - start_time)*10**6), 
                           arrow.now().timestamp)
Exemple #14
0
    def parse_date(self, value):
        """
        Parses a date and returns the value as an arrow type

        :returns: arrow object
        :raises TaskServiceParseException: on parse error
        """
        value = value.strip()

        # try to parse formated date
        try:
            return arrow.get(value)
        except arrow.parser.ParserError:
            pass

        shortcuts = {
            'today': arrow.now(),
            'yesterday': arrow.now().replace(days=-1),
            'tomorrow': arrow.now().replace(days=1),
        }
        shortcut_value = value.lower()
        if shortcut_value in shortcuts:
            return shortcuts[shortcut_value]

        weekday = value.lower()
        now = arrow.now()
        next_week = now.replace(days=8)
        while now <= next_week:
            if now.format('dddd').lower().startswith(weekday):
                return now
            now = now.replace(days=1)

        raise TaskServiceParseException(
            "Invalid date format: {}".format(value))
def test_from_isodatetime(app):
    """Test from_isodate filter."""
    with app.test_request_context():
        assert render_template_string(
            "{{ 'yes' if dt|from_isodatetime < now else 'no'}}",
            dt='2002-01-01T00:01:00', now=arrow.now()) == 'yes'

        assert render_template_string("{{ dt|from_isodatetime }}", dt='') \
            == 'None'
        assert render_template_string(
            "{{ dt|from_isodatetime }}", dt=datetime(2002, 1, 1, 1, 1)) == \
            '2002-01-01 01:01:00+00:00'
        pytest.raises(
            TypeError, render_template_string,
            "{{ 'yes' if dt < now else 'no'}}",
            dt='2002-01-01T00:01', now=arrow.now())
        pytest.raises(
            ParserError,
            render_template_string, "{{ dt|from_isodatetime }}",
            dt='abcd-01-01T00:00:00')
        pytest.raises(
            ParserError, render_template_string,
            "{{ dt|from_isodatetime(strict=True) }}", dt='')

        # Test pre-1900 centuries.
        assert render_template_string(
            "{{ '0001-01-01T00:00:00'|from_isodatetime"
            " > '1500-01-01'|from_isodatetime }}") == \
            "False"
def test_kakou_post():
    url = 'http://{0}:{1}/kakou'.format(IP, PORT)
    headers = {'content-type': 'application/json'}
    data = [
        {
            'jgsj': arrow.now().format('YYYY-MM-DD HH:mm:ss'),
            'hphm': '粤L70939',
            'kkdd_id': '441302004',
            'hpys_id': '0',
            'fxbh': 'IN',
            'cdbh':4,
            'img_path': 'http:///img/123.jpg'
        },
        {
            'jgsj': arrow.now().format('YYYY-MM-DD HH:mm:ss'),
            'hphm': '粤L12345',
            'kkdd_id': '441302004',
            'hpys_id': '0',
            'fxbh': 'IN',
            'cdbh': 4,
            'img_path': 'http:///img/123.jpg',
            'cllx': 'K41'
        }
    ]

    return requests.post(url, headers=headers, data=json.dumps(data))
 def testFeatureGenWithOnePoint(self):
   # ensure that the start and end datetimes are the same, since the average calculation uses
   # the total distance and the total duration
   ts = esta.TimeSeries.get_time_series(self.testUUID)
   trackpoint1 = ecwlo.Location({u'coordinates': [0,0], 'type': 'Point'})
   ts.insert_data(self.testUUID, "analysis/recreated_location", trackpoint1)
   testSeg = ecws.Section({"start_loc": trackpoint1,
               "end_loc": trackpoint1,
               "distance": 500,
               "sensed_mode": 1,
               "duration": 150,
               "start_ts": arrow.now().timestamp,
               "end_ts": arrow.now().timestamp,
               "_id": 2,
               "speeds":[],
               "distances":[],
               })
   testSegEntry = ecwe.Entry.create_entry(self.testUUID, "analysis/cleaned_section", testSeg)
   d = testSegEntry.data
   m = testSegEntry.metadata
   enufc.expand_start_end_data_times(d, m)
   testSegEntry["data"] = d
   testSegEntry["metadata"] = m
   inserted_id = ts.insert(testSegEntry)
   featureMatrix = np.zeros([1, len(self.pipeline.featureLabels)])
   resultVector = np.zeros(1)
   self.pipeline.updateFeatureMatrixRowWithSection(featureMatrix, 0, testSegEntry) 
   logging.debug("featureMatrix = %s" % featureMatrix)
   self.assertEqual(np.count_nonzero(featureMatrix[0][5:16]), 0)
   self.assertEqual(np.count_nonzero(featureMatrix[0][19:21]), 0)
Exemple #18
0
def make_backup(cleanup=False, scheduler=False):
    """ Makes a backup of config file, removes all but the last 5 backups """

    if scheduler:
        backup_file = 'config.backup-%s.sched.ini' % arrow.now().format('YYYYMMDDHHmmss')
    else:
        backup_file = 'config.backup-%s.ini' % arrow.now().format('YYYYMMDDHHmmss')
    backup_folder = plexpy.CONFIG.BACKUP_DIR
    backup_file_fp = os.path.join(backup_folder, backup_file)

    # In case the user has deleted it manually
    if not os.path.exists(backup_folder):
        os.makedirs(backup_folder)

    plexpy.CONFIG.write()
    shutil.copyfile(plexpy.CONFIG_FILE, backup_file_fp)

    if cleanup:
        # Delete all scheduled backup files except from the last 5.
        for root, dirs, files in os.walk(backup_folder):
            db_files = [os.path.join(root, f) for f in files if f.endswith('.sched.ini')]
            if len(db_files) > 5:
                backups_sorted_on_age = sorted(db_files, key=os.path.getctime, reverse=True)
                for file_ in backups_sorted_on_age[5:]:
                    try:
                        os.remove(file_)
                    except OSError as e:
                        logger.error(u"PlexPy Config :: Failed to delete %s from the backup folder: %s" % (file_, e))

    if backup_file in os.listdir(backup_folder):
        logger.debug(u"PlexPy Config :: Successfully backed up %s to %s" % (plexpy.CONFIG_FILE, backup_file))
        return True
    else:
        logger.warn(u"PlexPy Config :: Failed to backup %s to %s" % (plexpy.CONFIG_FILE, backup_file))
        return False
Exemple #19
0
def fixQuoteTime(bot, trigger):
	requireDb()
	if(not trigger.group(2)):
		bot.say("Usage: !fixquotetime <quote id> <quote time>")
		return

	split = trigger.group(2).split(' ', 1);
	if(len(split) != 2):
		bot.say("Please provide the fixed quote time!")
		return

	quote = getQuote(split[0])
	if(quote == None):
		bot.say("Could not find quote id%s" % (split[0]))
		return

	pTime,flag = dateParser.parseDT(split[1], sourceTime=arrow.now(defaultTz)) # use beginning of today as the source day to ensure DT returned.
	#Anything not explicitly dated will go to next occurance of that date
	#so check if the date given was pushed more than 6 months in the future
	#if it was assume the issuer ment the past. Will break setting dates manually with years.
	pTime = arrow.get(pTime, defaultTz) # avoid python AWFUL datetimes.
	if (pTime > arrow.now(defaultTz).replace(months=+6)):
			pTime = pTime.replace(years=-1)

	quote.timestamp = pTime.to('UTC').timestamp;
	bot.say("Quote #%d moved to date: %s" %(quote.id, quote.time.strftime("%b %d")))
Exemple #20
0
def validate_date_format(value):
    try:
        value = value.replace('yyyy', 'YYYY').\
                      replace('dd', 'DD')
        arrow.now().format(value)
    except:
        raise ValidationError(u'不正な日時フォーマットです。')
Exemple #21
0
def setUp(self):
  tomorrow = arrow.now().replace(days=1,hour=13,minute=00).datetime
  next_day = arrow.now().replace(days=2,hour=13,minute=00).datetime
  end = "14:00"

  # fee = 45 because it tests that discounts are including fractional dollars
  # Session 1 has class tomorrow and the next day from 1-2pm
  self.session1 = Session.objects.create(
    course=Course.objects.filter(active=True,fee=45).order_by("?")[0],
    user_id=1
  )
  ClassTime.objects.create(session=self.session1,start=tomorrow,end_time=end)
  ClassTime.objects.create(session=self.session1,start=next_day,end_time=end)

  # Session 2 has class day after tomorrow at the same time as session 1
  self.session2 = Session.objects.create(
    course=Course.objects.filter(active=True,fee__gt=0).order_by("?")[0],
    user_id=1
  )
  ClassTime.objects.create(session=self.session2,start=tomorrow.replace(hour=18),end_time="19:00")

  # conflict_session1 is the same time as session1. currently unused
  self.conflict_session1 = Session.objects.create(
    course=Course.objects.filter(active=True,fee__gt=0).order_by("?")[0],
    user_id=1
  )
  ClassTime.objects.create(session=self.conflict_session1,start=next_day,end_time=end)
Exemple #22
0
def parse_disp(dispatch):
    
    # Conserves the original page by assigning it to the variable "raw_page" before any modifications are performed
    raw_page = dispatch
    
    # Returns the page without ' - From ' suffix OR returns '0' if not a legitimate VC page or if just a message page     
    page = mods.from_vc(dispatch)
    
    # Gets rid of 'message only' pages
    if not page:
        return None
    
    # Fixes the three letter city codes in the VC pages
    page = mods.fix_cities(page, mods.replace_dict)
    
    # Returns the call type
    type = mods.get_type(page)
    aid_calls = ['AID', 'ACC', 'SHOT', 'STAB', 'CPR', 'DOA', 'OBV', 'DROWN', 'MEDIC', 'ODF', 'ODMDF', 'RESCUE', 'SERVIC']
    if any(x in type for x in aid_calls):
        cat = 'Aid'
    else:
        cat = 'Fire'    
    # Returns the address, place name (if any), and apt/ste number (if any)
    address_whole = mods.get_address(page)
    place = address_whole['Place']
    unit_num = address_whole['Unit']
    address = address_whole['Address']
    
    # Maintain the raw address for dispatching purposes as 'raw_add'
    comma = address.find(', ')
    raw_add = address[:comma]
    
    # Returns the units
    units = mods.get_units(page)
    units = mods.fix_units(units)
    
    # Returns the department
    dept = mods.get_dept(page)
    nh = ['E18', 'E19', 'E191', 'A18']
    if set(nh) & set(units):
        dept = 'Highline'
    
    # Get latitude and longitude
    g = geocoder.google(address + ', WA')
    latlng = [g.lat, g.lng]
    postal = g.postal
    city = g.city
    
    # Append the zip code to the address, but only if geocoder is confident of the results
    
    # Assign the current date/time to the variable 'timestamp'
    isotime = str(arrow.now('US/Pacific'))
    i = str(arrow.now('US/Pacific').format('HH:mm'))
    hourago = str(arrow.now('US/Pacific').replace(hours=-1))

    client = MongoClient()
    db = client.mydb
    collection = db.dispatch

    collection.find_and_modify(query={'Address': address,'TimestampISO': {'$gte': hourago,'$lte': isotime}},update={'$addToSet': {'Units': {'$each': units}},'$set': {'Address': address,'Type': type,'Category': cat,'Department': dept,'Coordinates': latlng,'Timestamp': i,'TimestampISO': isotime,'raw_add': raw_add,'place_name': place,'unit_num': unit_num}}, upsert=True)
Exemple #23
0
    def _cron(self, role, locale, profile, country=None):
        ''' Get Todays Jobs For Job Posting '''

        page = 0
        #companies = self._indeed_page(role, locale, page, country)
        start_time = time.time()

        indeed_results = self._search(role, page, locale, country)
        companies = self._search_results_html_to_df(indeed_results)
        #print companies
        companies = companies[companies.date.str.contains("hour|minute|Just")]
        companies = self._date_phrase_to_timestamp(companies)
        companies = companies.drop_duplicates('company_name')
        companies["source"] = "Indeed"
        companies["keyword"] = role
        companies["profile"] = profile

        keys = [row.company_name.lower().replace(" ","")+"_"+profile for i, row in companies.iterrows()]
        companies["company_key"] = keys
        companies["createdAt"] = arrow.now().timestamp

        conn = rethink_conn.conn()

        #r.table("hiring_signals").insert(companies.to_dict('r')).run(conn)
        r.table("triggers").insert(companies.to_dict('r')).run(conn)

        print "function:time:indeed_job_scrape", str((time.time() - start_time)*10**6), arrow.now().timestamp
        rd.zadd("function:time:indeed_job_scrape", 
                           #"{0}:{1}".format(profile, str((time.time() - start_time)*10**6)), 
                           str((time.time() - start_time)*10**6), 
                           arrow.now().timestamp)
Exemple #24
0
def copy_dir(source_dir, target_dir):
    local_files = get_localFileList(local_path('file_list.txt'))
    print arrow.now().format('YYYY-MM-DD HH:mm:ss') + " source path:" + source_dir
    wqar_files = []
    for dir_name in os.listdir(source_dir):
        dir_path = os.path.join(source_dir, dir_name)
        if os.path.isdir(dir_path):
            #print dir_path
            if re_wqar_dir_name(dir_name):
                if os.path.exists(os.path.join(source_dir, dir_name, 'raw.dat')):
                    #print arrow.now().format('YYYY-MM-DD HH:mm:ss')
                    wqar_files.append(dir_name)
                    #print dir_name
    append_files = []
    for file in wqar_files:
        if file in  local_files:
            continue
        append_files.append(file)

    for file in append_files:
        dir_source_path = os.path.join(source_dir, file)
        dir_target_path = os.path.join(target_dir, file)
        shutil.copytree(dir_source_path, dir_target_path)

    update_localFileList(local_path('file_list.txt'), append_files)

    return wqar_files
Exemple #25
0
    def __sleep_score(self):
        self.__get_sleep_time_with_fitbit()

        activity_data = self.data_handler.read_record().get("activity", {})
        sleep_data = activity_data.get("sleep", [])

        if len(sleep_data) == 0:
            sleep_start_time = arrow.now()
            sleep_end_time = arrow.now()
        else:
            for s in sleep_data:
                if s["is_main"]:
                    sleep_start_time = arrow.get(s.get("start_time", None))
                    sleep_end_time = arrow.get(s.get("end_time", None))

        sleep_time = (sleep_end_time - sleep_start_time).seconds / 60 / 60
        sleep_time = sleep_time * 100

        if sleep_time > 800:
            sleep_time -= sleep_time - 800

        if sleep_time > 700:
            sleep_time = 700

        return Score.percent(sleep_time, 100, 700)
    def test_virtual_tags(self, ts):
        arguments = u'hello world +test due:yesterday'
        task = ts.from_arguments(arguments.split(' '))
        assert task is not None
        assert task.id is not None
        assert task.due < arrow.now()
        assert Task.query.count() > 0

        arg_sets = [
            u'+overdue',
            u'+test +overdue',
            u'+yesterday',
            u'+test due:yesterday',
        ]
        for arg in arg_sets:
            query = ts.filter_by_arguments(arg.split(' '))
            assert query is not None
            assert query.count() > 0
            assert task in query.all()

        task.due = arrow.now().replace(days=1)
        db.session.add(task)
        db.session.commit()

        arg_sets = [
            u'+tomorrow',
            u'+test +tomorrow',
        ]
        for arg in arg_sets:
            query = ts.filter_by_arguments(arg.split(' '))
            assert query is not None
            assert query.count() > 0
            assert task in query.all()
Exemple #27
0
def lod_dump(ctx, dataset_args, step, stage, reset, full_data, data, archive=True):
    """
    :param ctx:
    :param path:
    :param step:
    :param stage:
    :param first_only:
    :return:
    """
    lod_db = _get_db(ctx, stage, dataset_args)

    step_table = _get_step_table_in_stage(
        ctx, step, stage, reset, dataset_args)
    counter = Counter()
    counter.set_parent(ctx.gbc)

    stamped_data = make_stamping_gen(ctx, data, full_data, counter)
    rows = []
    if 'DS_CHUNKSIZE' in dataset_args:
        chunk_size = dataset_args['DS_CHUNKSIZE']
    else:
        chunk_size = 10
    ctx.gbc.say('dumping chunksize: %d' % chunk_size, verbosity=40)

    start_ts = arrow.now()
    for ditem in stamped_data:
        jitem = {u'json_str': json.dumps(ditem)}
        rows.append(jitem)
        if len(rows) == chunk_size:
            # lod_db._release()
            lod_db.begin()
            step_table.insert_many(rows, chunk_size=chunk_size)
            lod_db.commit()
            rows = []
            curr_time = arrow.now() - start_ts
            ctx.gbc.say('dumping chunk took: ' + str(curr_time), verbosity=40)
    # take care of the rest
    if len(rows) > 0:
        step_table.insert_many(rows)

    ret = {}
    ret['total'] = counter.get_total()
    ret['count_in_table'] = step_table.count()

    totals_equal = False
    if ret['count_in_table'] == ret['count_in_table']:
        totals_equal = True
    ret['totals_equal'] = totals_equal

    ctx.gbc.say('lod_dump:return', stuff=ret, verbosity=100)
    freeze_file = './remember/dataset_freeze_' + step + '_' + stage + '.json'

    dataset.freeze(step_table, format='json', filename=freeze_file)
    if archive:
        zipped = zipit(ctx, freeze_file)
    if zipped and dataset_args['DS_TYPE'] != 'freeze_json':
        os.remove(freeze_file)
    total_time = arrow.now() - start_ts
    ctx.gbc.say('dumping took:' + str(total_time), verbosity=10)
    return ret
 def test_add(self):
     p = Package(ip='127.0.0.1', path='c:\\test\\test.zip',
                 date_created=arrow.now().datetime,
                 expired=arrow.now().timestamp+600, banned=0)
     db.session.add(p)
     db.session.commit()
     self.assertTrue(isinstance(p.id, int))
    def test_parse_date_option(self, ts):
        # test weekday shorthand
        options = ts.parse_date_option({}, 'test', 'Th')
        assert 'test' in options
        assert options['test'].format('dddd').lower() == 'thursday'
        assert options['test'] > arrow.now().floor('day')
        assert options['test'] < arrow.now().replace(weeks=1)

        # test formated date
        date = arrow.get('2015-07-22')
        options = ts.parse_date_option({}, 'test', date.format('YYYY-MM-DD'))
        assert 'test' in options
        assert options['test'].date() == date.date()

        # test shortcuts
        now = arrow.now()
        shortcuts = {
            'today': now,
            'yesterday': now.replace(days=-1),
            'tomorrow': now.replace(days=1),
        }
        for shortcut, day in shortcuts.iteritems():
            options = ts.parse_date_option({}, 'test', shortcut)
            assert options['test'].date() == day.date()

        # test duplicate
        with pytest.raises(TaskServiceParseException):
            ts.parse_date_option(options, 'test', 'Mo')

        # test invalid date
        with pytest.raises(TaskServiceParseException):
            ts.parse_date_option({}, 'val', 'Mayday')
Exemple #30
0
def get_entry():
  last_year = arrow.now().replace(years=-1).format('YYYY-MM-DD')
  last_month = arrow.now().replace(months=-1).format('YYYY-MM-DD')
  last_week = arrow.now().replace(weeks=-1).format('YYYY-MM-DD')

  day_query = 'select entry from entries where day = ? order by random()'
  random_query = 'select day, entry from entries order by random()'

  with sqlite3.connect('ohlife.db') as db:
    result = db.execute(day_query, (last_year,)).fetchone()
    if result:
      return ("One year", result[0])

    result = db.execute(day_query, (last_month,)).fetchone()
    if result:
      return ("One month", result[0])

    result = db.execute(day_query, (last_week,)).fetchone()
    if result:
      return ("One week", result[0])

    result = db.execute(random_query).fetchone()
    if result:
      num_days_ago = (arrow.now() - arrow.get(result[0])).days - 1
      return ("%d days" % num_days_ago, result[1])

    return (None, None)
    try:
        symbol = prelim_data['symbol']
        data = pd.DataFrame.from_dict(prelim_data['candles'])
        return True, symbol, data
    except KeyError:
        print('symbol {} is invalid'.format(symbol))
        return False, None, None


if __name__ == '__main__':
    # symbol, data = grub(symbol='SPY')

    grub_targets = ['VIX', 'VIX9D']

    '''File Handling'''
    filename = 'D:/StockData/S&P_500_VIX_{}'.format(str(arrow.now('America/New_York').date()))

    if not os.path.exists(filename):
        print('creating datafile:')
        print(filename)
        datafile = h5py.File(filename)
    else:
        print('Data file already exists!')
        print('exiting program')
        sys.exit()

    successful_grubs = 0
    for grubbie in grub_targets:
        success, symbol, data = grub(symbol=grubbie)
        successful_grubs += 1
Exemple #32
0
def _get_day_text():
    return arrow.now('+08:00').replace(days=+3).format('YYYY-MM-DD')
Exemple #33
0
    def screen(self,
               frame,
               end_dt=None,
               adv_lim=25,
               win=7,
               a5=0.02,
               a10=0.001):
        all = []
        fired = []
        if end_dt is None:
            end_dt = arrow.now().datetime

        for i, code in enumerate(stocks.all_stocks()):
            try:
                name = stocks.name_of(code)
                if name.endswith("退"):
                    continue
                if name.find("ST") != -1:
                    continue

                bars = stocks.get_bars(code, 30, frame, end_dt=end_dt)
                if len(bars) == 0:
                    print("get 0 bars", code)
                    continue

                if arrow.get(bars['date'].iat[-1]).date() != arrow.get(
                        end_dt).date():
                    continue

                # 30日涨幅必须小于adv_lim
                if bars['close'].iat[-1] / bars['close'].min(
                ) >= 1 + adv_lim / 100:
                    print(f"{code}涨幅大于", adv_lim)
                    continue

                ma5 = np.array(moving_average(bars['close'], 5))
                ma10 = np.array(moving_average(bars['close'], 10))

                err5, coef5, vertex5 = polyfit(ma5[-win:])
                err10, coef10, vertex10 = polyfit(ma10[-win:])

                vx5, _ = vertex5
                vx10, _ = vertex10
                _a5 = coef5[0]
                _a10 = coef10[0]
                all.append([code, _a5, _a10, vx5, vx10, err5, err10])

                # print(code, round_list([err5, vx, pred_up, y5, ma5[-1], y10, ma10[-1]],3))
                # 如果曲线拟合较好,次日能上涨up%以上,10日线也向上,最低点在win/2以内
                t1 = err5 <= 0.003 and err10 <= 0.003
                t2 = _a5 > a5 and _a10 > a10
                t3 = (win - 1 > vx5 >= win / 2 - 1) and (vx10 < win / 2 - 1)
                if t1 and t2 and t3:
                    c1, c0 = bars['close'].iat[-2], bars['close'].iat[-1]
                    if stocks.check_buy_limit(c1, c0, name):  # 跳过涨停的
                        continue

                    print(f"{stocks.name_of(code)} {code}",
                          [_a5, _a10, vx5, vx10, err5, err10])
                    fired.append([code, _a5, _a10, vx5, vx10, err5, err10])
            except Exception as e:
                print(i, e)
                continue
        return DataFrame(
            data=all,
            columns=['code', 'a5', 'a10', 'vx5', 'vx10', 'err_5', 'err_10'])
Exemple #34
0
	except Exception, e:
		# 创建默认计划
		number = 50
		level = 1
		LearningPlan.objects.create(
				level = level,
				num = number,
				user = request.user,
			)
	#首先尝试从正在学习的单词中抽取 number 个
	#条件:	1.复习次数不满8次
	#		2.达到需要再次复习的时间(复习频率逐渐降低)
	review_words = LearningList.objects.filter(word__level=level,
												author=user,
												repeat_times__lt=8,
												review_time__lte=arrow.now().datetime).order_by('id')[:number]
	
	
	# 确定实际抽取的个数
	review_count = review_words.count()
	# 查询已经学习过的单词
	learning_words = LearningList.objects.filter(word__level=level,author=user).all()

	word_list = {}
	for index,review_word in enumerate(review_words):
		word_list[review_word.word.english.strip('\n')] = review_word.word.toDict()

	#如果不足 number 则从单词表中提取剩余单词,作为新词
	if review_count<number:
		#需要过滤掉已经学习过的单词
		words = Words.objects.filter(level=level).exclude(id__in=[word.word_id for word in learning_words]).order_by('id')[:(number-review_count)]
Exemple #35
0
 def test_sw_now(self):
     import arrow
     now = arrow.now()
     begin_date = str(now.date())
     parse_sw_history(begin_date)
Exemple #36
0
def f_addLog(dss):
    if zsys.logFN != '':
        timStr = arrow.now().format('YYYY:MM:DD HH:mm:ss')
        tss = timStr + '-->  ' + dss
        #print('log,',tss)
        f_add(zsys.logFN, tss)
Exemple #37
0
def fetch_exchange(zone_key1,
                   zone_key2,
                   session=None,
                   target_datetime=None,
                   logger=None) -> list:
    """Requests the last known power exchange (in MW) between two zones."""
    url = (
        "http://mis.nyiso.com/public/csv/ExternalLimitsFlows/{}ExternalLimitsFlows.csv"
    )

    sorted_zone_keys = "->".join(sorted([zone_key1, zone_key2]))

    # In the source CSV, positive is flow into NY, negative is flow out of NY.
    # In Electricity Map, A->B means flow to B is positive.
    if (sorted_zone_keys == "US-NEISO->US-NY"
            or sorted_zone_keys == "US-NE-ISNE->US-NY-NYIS"):
        direction = 1
        relevant_exchanges = [
            "SCH - NE - NY", "SCH - NPX_1385", "SCH - NPX_CSC"
        ]
    elif sorted_zone_keys == "US-NY->US-PJM":
        direction = -1
        relevant_exchanges = [
            "SCH - PJ - NY",
            "SCH - PJM_HTP",
            "SCH - PJM_NEPTUNE",
            "SCH - PJM_VFT",
        ]
    elif sorted_zone_keys == "US-MIDA-PJM->US-NY-NYIS":
        direction = 1
        relevant_exchanges = [
            "SCH - PJ - NY",
            "SCH - PJM_HTP",
            "SCH - PJM_NEPTUNE",
            "SCH - PJM_VFT",
        ]
    elif sorted_zone_keys == "CA-ON->US-NY" or sorted_zone_keys == "CA-ON->US-NY-NYIS":
        direction = 1
        relevant_exchanges = ["SCH - OH - NY"]
    elif sorted_zone_keys == "CA-QC->US-NY" or sorted_zone_keys == "CA-QC->US-NY-NYIS":
        direction = 1
        relevant_exchanges = ["SCH - HQ_CEDARS", "SCH - HQ - NY"]
    else:
        raise NotImplementedError(
            "Exchange pair not supported: {}".format(sorted_zone_keys))

    if target_datetime:
        # ensure we have an arrow object
        target_datetime = arrow.get(target_datetime)
    else:
        target_datetime = arrow.now("America/New_York")
    ny_date = target_datetime.format("YYYYMMDD")
    exchange_url = url.format(ny_date)

    try:
        exchange_data = read_csv_data(exchange_url)
    except HTTPError:
        # this can happen when target_datetime has no data available
        return None

    new_england_exs = exchange_data.loc[exchange_data["Interface Name"].isin(
        relevant_exchanges)]
    consolidated_flows = new_england_exs.reset_index().groupby(
        "Timestamp").sum()

    now = arrow.utcnow()

    exchange_5min = []
    for row in consolidated_flows.itertuples():
        flow = float(row[3]) * direction
        # Timestamp for exchange does not include seconds.
        dt = timestamp_converter(row[0] + ":00")

        if (dt > now) and ((dt - now) < timedelta(seconds=300)):
            # NYISO exchanges CSV (and only the exchanges CSV) includes data
            # up to 5 minutes in the future (but only 5 minutes in the future).
            # This also happens on their official website.
            # Electricity Map raises error with data in the future, so skip
            # that datapoint. If it's more than 5 minutes in the future,
            # it's weird/unexpected and thus worthy of failure and logging.
            continue

        exchange = {
            "sortedZoneKeys": sorted_zone_keys,
            "datetime": dt,
            "netFlow": flow,
            "source": "nyiso.com",
        }

        exchange_5min.append(exchange)

    return exchange_5min
Exemple #38
0
def setting():
    form = SettingForm()
    promo_form = PromoCodeForm()
    change_email_form = ChangeEmailForm()

    email_change = EmailChange.get_by(user_id=current_user.id)
    if email_change:
        pending_email = email_change.new_email
    else:
        pending_email = None

    if request.method == "POST":
        if request.form.get("form-name") == "update-email":
            if change_email_form.validate():
                # whether user can proceed with the email update
                new_email_valid = True
                if (sanitize_email(change_email_form.email.data) !=
                        current_user.email and not pending_email):
                    new_email = sanitize_email(change_email_form.email.data)

                    # check if this email is not already used
                    if personal_email_already_used(new_email) or Alias.get_by(
                            email=new_email):
                        flash(f"Email {new_email} already used", "error")
                        new_email_valid = False
                    elif not email_can_be_used_as_mailbox(new_email):
                        flash(
                            "You cannot use this email address as your personal inbox.",
                            "error",
                        )
                        new_email_valid = False
                    # a pending email change with the same email exists from another user
                    elif EmailChange.get_by(new_email=new_email):
                        other_email_change: EmailChange = EmailChange.get_by(
                            new_email=new_email)
                        LOG.warning(
                            "Another user has a pending %s with the same email address. Current user:%s",
                            other_email_change,
                            current_user,
                        )

                        if other_email_change.is_expired():
                            LOG.d("delete the expired email change %s",
                                  other_email_change)
                            EmailChange.delete(other_email_change.id)
                            db.session.commit()
                        else:
                            flash(
                                "You cannot use this email address as your personal inbox.",
                                "error",
                            )
                            new_email_valid = False

                    if new_email_valid:
                        email_change = EmailChange.create(
                            user_id=current_user.id,
                            code=random_string(
                                60),  # todo: make sure the code is unique
                            new_email=new_email,
                        )
                        db.session.commit()
                        send_change_email_confirmation(current_user,
                                                       email_change)
                        flash(
                            "A confirmation email is on the way, please check your inbox",
                            "success",
                        )
                        return redirect(url_for("dashboard.setting"))
        if request.form.get("form-name") == "update-profile":
            if form.validate():
                profile_updated = False
                # update user info
                if form.name.data != current_user.name:
                    current_user.name = form.name.data
                    db.session.commit()
                    profile_updated = True

                if form.profile_picture.data:
                    file_path = random_string(30)
                    file = File.create(user_id=current_user.id, path=file_path)

                    s3.upload_from_bytesio(
                        file_path, BytesIO(form.profile_picture.data.read()))

                    db.session.flush()
                    LOG.d("upload file %s to s3", file)

                    current_user.profile_picture_id = file.id
                    db.session.commit()
                    profile_updated = True

                if profile_updated:
                    flash("Your profile has been updated", "success")
                    return redirect(url_for("dashboard.setting"))

        elif request.form.get("form-name") == "change-password":
            flash(
                "You are going to receive an email containing instructions to change your password",
                "success",
            )
            send_reset_password_email(current_user)
            return redirect(url_for("dashboard.setting"))

        elif request.form.get("form-name") == "notification-preference":
            choose = request.form.get("notification")
            if choose == "on":
                current_user.notification = True
            else:
                current_user.notification = False
            db.session.commit()
            flash("Your notification preference has been updated", "success")
            return redirect(url_for("dashboard.setting"))

        elif request.form.get("form-name") == "delete-account":
            # Schedule delete account job
            LOG.warning("schedule delete account job for %s", current_user)
            Job.create(
                name=JOB_DELETE_ACCOUNT,
                payload={"user_id": current_user.id},
                run_at=arrow.now(),
                commit=True,
            )

            flash(
                "Your account deletion has been scheduled. "
                "You'll receive an email when the deletion is finished",
                "success",
            )
            return redirect(url_for("dashboard.setting"))

        elif request.form.get("form-name") == "change-alias-generator":
            scheme = int(request.form.get("alias-generator-scheme"))
            if AliasGeneratorEnum.has_value(scheme):
                current_user.alias_generator = scheme
                db.session.commit()
            flash("Your preference has been updated", "success")
            return redirect(url_for("dashboard.setting"))

        elif request.form.get(
                "form-name") == "change-random-alias-default-domain":
            default_domain = request.form.get("random-alias-default-domain")

            if default_domain:
                sl_domain: SLDomain = SLDomain.get_by(domain=default_domain)
                if sl_domain:
                    if sl_domain.premium_only and not current_user.is_premium(
                    ):
                        flash("You cannot use this domain", "error")
                        return redirect(url_for("dashboard.setting"))

                    current_user.default_alias_public_domain_id = sl_domain.id
                    current_user.default_alias_custom_domain_id = None
                else:
                    custom_domain = CustomDomain.get_by(domain=default_domain)
                    if custom_domain:
                        # sanity check
                        if (custom_domain.user_id != current_user.id
                                or not custom_domain.verified):
                            LOG.exception("%s cannot use domain %s",
                                          current_user, default_domain)
                        else:
                            current_user.default_alias_custom_domain_id = (
                                custom_domain.id)
                            current_user.default_alias_public_domain_id = None

            else:
                current_user.default_alias_custom_domain_id = None
                current_user.default_alias_public_domain_id = None

            db.session.commit()
            flash("Your preference has been updated", "success")
            return redirect(url_for("dashboard.setting"))

        elif request.form.get("form-name") == "random-alias-suffix":
            scheme = int(request.form.get("random-alias-suffix-generator"))
            if AliasSuffixEnum.has_value(scheme):
                current_user.random_alias_suffix = scheme
                db.session.commit()
            flash("Your preference has been updated", "success")
            return redirect(url_for("dashboard.setting"))

        elif request.form.get("form-name") == "change-sender-format":
            sender_format = int(request.form.get("sender-format"))
            if SenderFormatEnum.has_value(sender_format):
                current_user.sender_format = sender_format
                current_user.sender_format_updated_at = arrow.now()
                db.session.commit()
                flash("Your sender format preference has been updated",
                      "success")
            db.session.commit()
            return redirect(url_for("dashboard.setting"))

        elif request.form.get("form-name") == "replace-ra":
            choose = request.form.get("replace-ra")
            if choose == "on":
                current_user.replace_reverse_alias = True
            else:
                current_user.replace_reverse_alias = False
            db.session.commit()
            flash("Your preference has been updated", "success")
            return redirect(url_for("dashboard.setting"))

        elif request.form.get("form-name") == "sender-in-ra":
            choose = request.form.get("enable")
            if choose == "on":
                current_user.include_sender_in_reverse_alias = True
            else:
                current_user.include_sender_in_reverse_alias = False
            db.session.commit()
            flash("Your preference has been updated", "success")
            return redirect(url_for("dashboard.setting"))

        elif request.form.get("form-name") == "expand-alias-info":
            choose = request.form.get("enable")
            if choose == "on":
                current_user.expand_alias_info = True
            else:
                current_user.expand_alias_info = False
            db.session.commit()
            flash("Your preference has been updated", "success")
            return redirect(url_for("dashboard.setting"))

        elif request.form.get("form-name") == "export-data":
            return redirect(url_for("api.export_data"))
        elif request.form.get("form-name") == "export-alias":
            return redirect(url_for("api.export_aliases"))

    manual_sub = ManualSubscription.get_by(user_id=current_user.id)
    apple_sub = AppleSubscription.get_by(user_id=current_user.id)
    coinbase_sub = CoinbaseSubscription.get_by(user_id=current_user.id)

    return render_template(
        "dashboard/setting.html",
        form=form,
        PlanEnum=PlanEnum,
        SenderFormatEnum=SenderFormatEnum,
        promo_form=promo_form,
        change_email_form=change_email_form,
        pending_email=pending_email,
        AliasGeneratorEnum=AliasGeneratorEnum,
        manual_sub=manual_sub,
        apple_sub=apple_sub,
        coinbase_sub=coinbase_sub,
        FIRST_ALIAS_DOMAIN=FIRST_ALIAS_DOMAIN,
        ALIAS_RAND_SUFFIX_LENGTH=ALIAS_RANDOM_SUFFIX_LENGTH,
    )
Exemple #39
0
def domain_detail(custom_domain_id):
    custom_domain: CustomDomain = CustomDomain.get(custom_domain_id)
    mailboxes = current_user.mailboxes()

    if not custom_domain or custom_domain.user_id != current_user.id:
        flash("You cannot see this page", "warning")
        return redirect(url_for("dashboard.index"))

    if request.method == "POST":
        if request.form.get("form-name") == "switch-catch-all":
            custom_domain.catch_all = not custom_domain.catch_all
            Session.commit()

            if custom_domain.catch_all:
                flash(
                    f"The catch-all has been enabled for {custom_domain.domain}",
                    "success",
                )
            else:
                flash(
                    f"The catch-all has been disabled for {custom_domain.domain}",
                    "warning",
                )
            return redirect(
                url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
            )
        elif request.form.get("form-name") == "set-name":
            if request.form.get("action") == "save":
                custom_domain.name = request.form.get("alias-name").replace("\n", "")
                Session.commit()
                flash(
                    f"Default alias name for Domain {custom_domain.domain} has been set",
                    "success",
                )
            else:
                custom_domain.name = None
                Session.commit()
                flash(
                    f"Default alias name for Domain {custom_domain.domain} has been removed",
                    "info",
                )

            return redirect(
                url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
            )
        elif request.form.get("form-name") == "switch-random-prefix-generation":
            custom_domain.random_prefix_generation = (
                not custom_domain.random_prefix_generation
            )
            Session.commit()

            if custom_domain.random_prefix_generation:
                flash(
                    f"Random prefix generation has been enabled for {custom_domain.domain}",
                    "success",
                )
            else:
                flash(
                    f"Random prefix generation has been disabled for {custom_domain.domain}",
                    "warning",
                )
            return redirect(
                url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
            )
        elif request.form.get("form-name") == "update":
            mailbox_ids = request.form.getlist("mailbox_ids")
            # check if mailbox is not tempered with
            mailboxes = []
            for mailbox_id in mailbox_ids:
                mailbox = Mailbox.get(mailbox_id)
                if (
                    not mailbox
                    or mailbox.user_id != current_user.id
                    or not mailbox.verified
                ):
                    flash("Something went wrong, please retry", "warning")
                    return redirect(
                        url_for(
                            "dashboard.domain_detail", custom_domain_id=custom_domain.id
                        )
                    )
                mailboxes.append(mailbox)

            if not mailboxes:
                flash("You must select at least 1 mailbox", "warning")
                return redirect(
                    url_for(
                        "dashboard.domain_detail", custom_domain_id=custom_domain.id
                    )
                )

            # first remove all existing domain-mailboxes links
            DomainMailbox.filter_by(domain_id=custom_domain.id).delete()
            Session.flush()

            for mailbox in mailboxes:
                DomainMailbox.create(domain_id=custom_domain.id, mailbox_id=mailbox.id)

            Session.commit()
            flash(f"{custom_domain.domain} mailboxes has been updated", "success")

            return redirect(
                url_for("dashboard.domain_detail", custom_domain_id=custom_domain.id)
            )

        elif request.form.get("form-name") == "delete":
            name = custom_domain.domain
            LOG.d("Schedule deleting %s", custom_domain)

            # Schedule delete domain job
            LOG.w("schedule delete domain job for %s", custom_domain)
            Job.create(
                name=JOB_DELETE_DOMAIN,
                payload={"custom_domain_id": custom_domain.id},
                run_at=arrow.now(),
                commit=True,
            )

            flash(
                f"{name} scheduled for deletion."
                f"You will receive a confirmation email when the deletion is finished",
                "success",
            )

            if custom_domain.is_sl_subdomain:
                return redirect(url_for("dashboard.subdomain_route"))
            else:
                return redirect(url_for("dashboard.custom_domain"))

    nb_alias = Alias.filter_by(custom_domain_id=custom_domain.id).count()

    return render_template("dashboard/domain_detail/info.html", **locals())
Exemple #40
0
    def get_data(self, user, password, dn_no, emp_no):
        ora = ReadConf().ora()
        login = Login_Postgres(user=user, password=password)
        is_login = json.loads(login.login().decode('utf-8'))
        if is_login['login'] == 'True' and (
                '|csdplan|hrconnect|hr|line'.find(user) > 0):
            conn = None
            data = {}
            try:

                # ---- 1 หา DN ว่ามีใน postgresql12 dn_timestamp หรือไม่ -------------------------------------
                # ------   1.1 ถ้ามี อ่านค่า dn_timestamp.md5 (ทุก row ต้องมีค่าเท่ากัน)
                # ------   1.2 ถ้าไม่เคยมีมาก่อน คำนวณ md5 จาก dn+':'+dn_date ตั่วอย่าง 2003110093:11/03/2020 นำไปคำนวณค่า md5
                # ------ pg = ReadConf().postgres12()  # new server postgresql v 12

                # ---- 0 หา DN_CHAIN ที่เกี่ยวเนื่อง tonkm_package.find_dn_chain(dn_no) และตัด string เอา dn ตัวแรก
                dsn_tns = cx_Oracle.makedsn(ora['server'], ora['port'],
                                            ora['service'])
                ora_conn = cx_Oracle.connect(ora['user'],
                                             ora['password'],
                                             dsn_tns,
                                             encoding="UTF-8")
                ora_cursor = ora_conn.cursor()
                ora_qryStr = ReadConf().find_dn_dhain()['Query']
                parameters = {'dn_no': dn_no}
                ora_cursor.execute(ora_qryStr, parameters)
                first_dn = ' '
                first_dn = ora_cursor.fetchone()[0]
                if type(first_dn) == type(None):
                    first_dn = ' '
                # print('first_dn {}'.format(first_dn))
                pg = ReadConf().postgres()
                conn = psycopg2.connect(host=pg['server'],
                                        port=pg['port'],
                                        database=pg['database'],
                                        user=user,
                                        password=password)
                cursor = conn.cursor()
                qryStr = ReadConf().qry_in_out()['Query']
                qryStr = qryStr.replace('{{dn_no}}', first_dn)
                qryStr = qryStr.replace('{{emp_no}}', emp_no)
                cursor.execute(qryStr)
                result = cursor.fetchone()
                if isinstance(result, type(None)):
                    t = arrow.now()
                    dn_no_date = dn_no + ':' + t.format('DD/MM/YYYY')
                    m = hashlib.md5(dn_no_date.encode())
                    md5 = m.hexdigest()
                    last_dn_order = 5
                    last_in_out = '0'
                else:
                    md5 = result[0]
                    last_dn_order = result[1]
                    last_in_out = result[2]

                # ------ 2 หาดำดับเส้นทางลงเวลา ขึ้นสินค้า ลงสินค้า --------------------------------------
                dsn_tns = cx_Oracle.makedsn(ora['server'], ora['port'],
                                            ora['service'])
                conn = cx_Oracle.connect(ora['user'],
                                         ora['password'],
                                         dsn_tns,
                                         encoding="UTF-8")
                conn.autocommit = False
                cursor = conn.cursor()
                cursor.execute(
                    "ALTER SESSION SET NLS_DATE_FORMAT = 'DD/MM/YYYY'")
                cursor.callproc('tonkm_package.QRY_ALLOWANCE_BY_DN', [dn_no])
                qryStr = ReadConf().qryForTimestamp()['Query']
                cursor.execute(qryStr)
                data = collections.OrderedDict()
                emp = collections.OrderedDict()
                dn = []
                i = 0
                records = cursor.fetchall()
                for row in records:
                    emp['EMP_NO'] = row[0]
                    emp['ID_CARD'] = row[1]
                    emp['EMP_NAME'] = row[2]
                    t = collections.OrderedDict()
                    t['TRUCK_NO'] = row[3]
                    t['ENERGY_TYPE'] = row[4]
                    t['JOB_NO'] = row[5]
                    t['DN_NO'] = row[6]
                    t['DN_ORDER'] = row[7]
                    t['DN_DATE'] = row[8]
                    t['MD5'] = md5
                    t['SOURCE_POINT'] = row[9]
                    t['SOURCE_NAME'] = row[10]
                    t['SOURCE_ADDR'] = row[15]
                    t['SOURCE_ADDR3'] = row[11]
                    t['LAT_LNG'] = row[12]
                    t['REMARK'] = row[13]
                    # ---- หา max(IN_OUT ของ (postgres12) dn_timestamp.IN_OUT ที่ dn_no และ source_point เท่ากัน
                    #  -- row[7] หมายถึง DN_ORDER
                    if last_in_out == '0':
                        if row[7] == last_dn_order:
                            t['IN'] = True
                            t['OUT'] = False
                        else:
                            t['IN'] = False
                            t['OUT'] = False
                    elif last_in_out == '1':
                        if row[7] < last_dn_order:
                            t['IN'] = False
                            t['OUT'] = False
                        elif row[7] == last_dn_order:
                            t['IN'] = False
                            t['OUT'] = True
                        else:
                            t['IN'] = False
                            t['OUT'] = False
                    elif last_in_out == '2':
                        if row[7] < last_dn_order:
                            t['IN'] = False
                            t['OUT'] = False
                            i = i + 1
                        elif row[7] == last_dn_order:
                            t['IN'] = False
                            t['OUT'] = False
                            i = i + 1
                        elif row[7] > last_dn_order:
                            if i > 0:
                                t['IN'] = True
                                t['OUT'] = False
                                i = 0
                            else:
                                t['IN'] = False
                                t['OUT'] = False
                    else:
                        t['IN'] = False
                        t['OUT'] = False
                    dn.append(t)
                emp['DN'] = dn
                cursor.close()
                return json.dumps(emp, indent=" ",
                                  ensure_ascii=False).encode('utf-8')
            except cx_Oracle.DatabaseError as e:
                data['driver'] = e.args[0].message
                data['truck_no'] = 'ไม่พบข้อมูล'
                data['dn_chain'] = 'ไม่พบข้อมูล'
                data['source_point'] = 'ไม่พบข้อมูล'
                data['receiver'] = 'ไม่พบข้อมูล'
                data['ton_km'] = 0
                data['fuel_quan'] = 0
                return json.dumps(data, indent=" ",
                                  ensure_ascii=False).encode('utf-8')
            finally:
                if conn is not None:
                    conn.commit()
                    conn.close()
        else:
            return json.dumps({
                'login': '******'
            }).encode('utf-8')
Exemple #41
0
def randtime():
    return arrow.now() - timedelta(seconds=randrange(86400))
Exemple #42
0
    def __init__(self, email, host='localhost', port=27017, dbn='slavem', username=None, password=None, serverChan=None,
                 loggingconf=None, ):
        """
        :param host:
        :param port:
        :param dbn:
        :param username:
        :param password:
        :param serverChan:
        :param loggingconf: logging 的配置 Dict()
        """
        now = arrow.now()
        self.mongoSetting = {
            'host': host,
            'port': port,
            'dbn': dbn,
            'username': username,
            'password': password,
        }

        self.log = logging.getLogger()
        self.initLog(loggingconf)

        # serverChan 的汇报地址
        # self.serverChan = serverChan or {}
        # if self.serverChan:
        #     for account, url in self.serverChan.items():
        #         serverChanUrl = requests.get(url).text
        #         self.serverChan[account] = serverChanUrl
        # else:
        #     self.log.warning(u'没有配置 serverChan 的 url')

        self.email = EMail(serverChan=serverChan, **email)

        self.mongourl = 'mongodb://{username}:{password}@{host}:{port}/{dbn}?authMechanism=SCRAM-SHA-1'.format(
            **self.mongoSetting)

        self.__active = False
        self._inited = False

        # 下次查看是否已经完成任务的时间
        self.nextWatchTime = now

        # 下次检查心跳的时间
        self.nextCheckHeartBeatTime = now
        self.nextRemoveOutdateReportTime = now

        # 关闭服务的信号
        for sig in [signal.SIGINT,  # 键盘中 Ctrl-C 组合键信号
                    signal.SIGHUP,  # nohup 守护进程发出的关闭信号
                    signal.SIGTERM,  # 命令行数据 kill pid 时的信号
                    ]:
            signal.signal(sig, self.shutdown)

        self.authed = False

        # 定时检查日志中LEVEL >= WARNING
        self.threadWarningLog = Thread(target=self.logWarning, name='logWarning')
        self.lastWarningLogTime = now

        logMongoConf = loggingconf['handlers']['mongo']
        self.logDB = MongoClient(
            logMongoConf['host'],
            logMongoConf['port'],
        )[logMongoConf['database_name']]
        self.logDB.authenticate(logMongoConf['username'], logMongoConf['password'])

        # 初始化日志的 collection
        self.initLogCollection()
Exemple #43
0
    def checkTask(self):
        """
        有任务达到检查时间了,开始检查任务
        :return:
        """
        # 获取所有 deadline 时间到的任务实例

        taskList = []
        firstLanuchTime = None
        now = arrow.now()
        for t in self.tasks:
            assert isinstance(t, Task)
            if now >= t.deadline:
                taskList.append(t)
                try:
                    # 最早开始的一个任务
                    if firstLanuchTime < t.lanuchTime:
                        firstLanuchTime = t.lanuchTime
                except TypeError:
                    firstLanuchTime = t.lanuchTime

        self.log.info(u'查询启动报告时间 > {}'.format(firstLanuchTime))

        # 查询 >firstLanuchTime 的启动报告
        sql = {
            'datetime': {
                '$gte': firstLanuchTime,
            }
        }

        reportCol = self.reportCollection
        cursor = reportCol.find(sql)

        if __debug__:
            self.log.debug(u'查询到 {} 条报告'.format(cursor.count()))

        # 核对启动报告
        for report in cursor:
            try:
                for t in taskList:
                    assert isinstance(t, Task)
                    if t.isReport(report):
                        # 完成了,刷新deadline
                        self.log.info(u'{} 服务启动完成 {}'.format(t.name, t.lanuchTime))
                        if t.isLate:
                            # 迟到的启动报告, 也需要发通知
                            self.noticeDealyReport(t)
                        t.finishAndRefresh()
                        taskList.remove(t)
                        break
            except Exception:
                self.log.error(traceback.format_exc())

        # 未能准时启动的服务
        for t in taskList:
            if t.isTimeToNoticeDelay():
                self.noticeUnreport(t)
                t.refreshLastDelayNoticeTime()

            # 设置为启动迟到
            t.setLate()
            # 未完成,将 deadline 延迟到1分钟后
            t.delayDeadline()
Exemple #44
0
    ('2018-123', '2018-05-03 00:00:00'),  # ordinal dates
    ('2018-04-10 12:30:43', '2018-04-10 12:30:43'),
    ('2018-04-10T12:30:43', '2018-04-10 12:30:43'),
    ('2018-04-10 12:30:43Z', '2018-04-10 12:30:43'),
    ('2018-04-10 12:30:43.1233', '2018-04-10 12:30:43'),
    ('2018-04-10 12:30:43+03:00', '2018-04-10 12:30:43'),
    ('2018-04-10 12:30:43-07:00', '2018-04-10 12:30:43'),
    ('2018-04-10T12:30:43-07:00', '2018-04-10 12:30:43'),
    ('2018-04-10 12:30', '2018-04-10 12:30:00'),
    ('2018-04-10T12:30', '2018-04-10 12:30:00'),
    ('2018-04-10 12', '2018-04-10 12:00:00'),
    ('2018-04-10T12', '2018-04-10 12:00:00'),
    (
        '14:05:12',
        arrow.now()
        .replace(hour=14, minute=5, second=12)
        .format('YYYY-MM-DD HH:mm:ss')
    ),
    (
        '14:05',
        arrow.now()
        .replace(hour=14, minute=5, second=0)
        .format('YYYY-MM-DD HH:mm:ss')
    ),
    ('2018-W08', '2018-02-19 00:00:00'),  # week dates
    ('2018W08', '2018-02-19 00:00:00'),
    ('2018-W08-2', '2018-02-20 00:00:00'),
    ('2018W082', '2018-02-20 00:00:00'),
]

INVALID_DATES_DATA = [
Exemple #45
0
def should_disable(alias: Alias) -> bool:
    """Disable an alias if it has too many bounces recently"""
    # Bypass the bounce rule
    if alias.cannot_be_disabled:
        LOG.warning("%s cannot be disabled", alias)
        return False

    yesterday = arrow.now().shift(days=-1)
    nb_bounced_last_24h = (
        db.session.query(EmailLog)
        .join(Contact, EmailLog.contact_id == Contact.id)
        .filter(
            EmailLog.bounced.is_(True),
            EmailLog.is_reply.is_(False),
            EmailLog.created_at > yesterday,
        )
        .filter(Contact.alias_id == alias.id)
        .count()
    )
    # if more than 12 bounces in 24h -> disable alias
    if nb_bounced_last_24h > 12:
        LOG.d("more than 12 bounces in the last 24h, disable alias %s", alias)
        return True

    # if more than 5 bounces but has bounces last week -> disable alias
    elif nb_bounced_last_24h > 5:
        one_week_ago = arrow.now().shift(days=-8)
        nb_bounced_7d_1d = (
            db.session.query(EmailLog)
            .join(Contact, EmailLog.contact_id == Contact.id)
            .filter(
                EmailLog.bounced.is_(True),
                EmailLog.is_reply.is_(False),
                EmailLog.created_at > one_week_ago,
                EmailLog.created_at < yesterday,
            )
            .filter(Contact.alias_id == alias.id)
            .count()
        )
        if nb_bounced_7d_1d > 1:
            LOG.debug(
                "more than 5 bounces in the last 24h and more than 1 bounces in the last 7 days, "
                "disable alias %s",
                alias,
            )
            return True
    else:
        # if bounces at least 9 days in the last 10 days -> disable alias
        query = (
            db.session.query(
                func.date(EmailLog.created_at).label("date"),
                func.count(EmailLog.id).label("count"),
            )
            .join(Contact, EmailLog.contact_id == Contact.id)
            .filter(Contact.alias_id == alias.id)
            .filter(
                EmailLog.created_at > arrow.now().shift(days=-10),
                EmailLog.bounced.is_(True),
                EmailLog.is_reply.is_(False),
            )
            .group_by("date")
        )

        if query.count() >= 9:
            LOG.d(
                "Bounces every day for at least 9 days in the last 10 days, disable alias %s",
                alias,
            )
            return True

    return False
Exemple #46
0
    def paddle():
        LOG.debug(
            "paddle callback %s %s %s %s %s",
            request.form.get("alert_name"),
            request.form.get("email"),
            request.form.get("customer_name"),
            request.form.get("subscription_id"),
            request.form.get("subscription_plan_id"),
        )

        # make sure the request comes from Paddle
        if not paddle_utils.verify_incoming_request(dict(request.form)):
            LOG.error("request not coming from paddle. Request data:%s",
                      dict(request.form))
            return "KO", 400

        if (request.form.get("alert_name") == "subscription_created"
            ):  # new user subscribes
            user_email = request.form.get("email")
            user = User.get_by(email=user_email)

            if (int(request.form.get("subscription_plan_id")) ==
                    PADDLE_MONTHLY_PRODUCT_ID):
                plan = PlanEnum.monthly
            else:
                plan = PlanEnum.yearly

            sub = Subscription.get_by(user_id=user.id)

            if not sub:
                LOG.d("create a new sub")
                Subscription.create(
                    user_id=user.id,
                    cancel_url=request.form.get("cancel_url"),
                    update_url=request.form.get("update_url"),
                    subscription_id=request.form.get("subscription_id"),
                    event_time=arrow.now(),
                    next_bill_date=arrow.get(
                        request.form.get("next_bill_date"),
                        "YYYY-MM-DD").date(),
                    plan=plan,
                )
            else:
                LOG.d("update existing sub %s", sub)
                sub.cancel_url = request.form.get("cancel_url")
                sub.update_url = request.form.get("update_url")
                sub.subscription_id = request.form.get("subscription_id")
                sub.event_time = arrow.now()
                sub.next_bill_date = arrow.get(
                    request.form.get("next_bill_date"), "YYYY-MM-DD").date()
                sub.plan = plan

            LOG.debug("User %s upgrades!", user)

            db.session.commit()

        elif request.form.get("alert_name") == "subscription_updated":
            subscription_id = request.form.get("subscription_id")
            LOG.debug("Update subscription %s", subscription_id)

            sub: Subscription = Subscription.get_by(
                subscription_id=subscription_id)
            sub.event_time = arrow.now()
            sub.next_bill_date = arrow.get(request.form.get("next_bill_date"),
                                           "YYYY-MM-DD").date()

            db.session.commit()

        elif request.form.get("alert_name") == "subscription_cancelled":
            subscription_id = request.form.get("subscription_id")
            LOG.debug("Cancel subscription %s", subscription_id)

            sub: Subscription = Subscription.get_by(
                subscription_id=subscription_id)
            sub.cancelled = True

            db.session.commit()

        return "OK"
Exemple #47
0
def fake_data():
    LOG.d("create fake data")
    # Remove db if exist
    if os.path.exists("db.sqlite"):
        LOG.d("remove existing db file")
        os.remove("db.sqlite")

    # Create all tables
    db.create_all()

    # Create a user
    user = User.create(
        email="*****@*****.**",
        name="John Wick",
        password="******",
        activated=True,
        is_admin=True,
        otp_secret="base32secret3232",
    )
    db.session.commit()

    # Create a subscription for user
    Subscription.create(
        user_id=user.id,
        cancel_url="https://checkout.paddle.com/subscription/cancel?user=1234",
        update_url="https://checkout.paddle.com/subscription/update?user=1234",
        subscription_id="123",
        event_time=arrow.now(),
        next_bill_date=arrow.now().shift(days=10).date(),
        plan=PlanEnum.monthly,
    )
    db.session.commit()

    api_key = ApiKey.create(user_id=user.id, name="Chrome")
    api_key.code = "code"

    GenEmail.create_new(user.id, "e1@")
    GenEmail.create_new(user.id, "e2@")
    GenEmail.create_new(user.id, "e3@")

    CustomDomain.create(user_id=user.id, domain="ab.cd", verified=True)
    CustomDomain.create(user_id=user.id,
                        domain="very-long-domain.com.net.org",
                        verified=True)
    db.session.commit()

    # Create a client
    client1 = Client.create_new(name="Demo", user_id=user.id)
    client1.oauth_client_id = "client-id"
    client1.oauth_client_secret = "client-secret"
    client1.published = True
    db.session.commit()

    RedirectUri.create(client_id=client1.id, uri="https://ab.com")

    client2 = Client.create_new(name="Demo 2", user_id=user.id)
    client2.oauth_client_id = "client-id2"
    client2.oauth_client_secret = "client-secret2"
    client2.published = True
    db.session.commit()

    db.session.commit()
    buffer.append({
        'rank': src['data_result_rank'],
        'image': src['image_main'],
        'node_id': src['node'],
        'asin': src['asin'],
        'title': src['title'],
        'price': src['price'],
        'offers_count': src['offers_count'],
        'review_count': src['review_count'],
        'brand': src['brand'],
        'seller': src['seller'],
        'fulfillment_channel': src['fulfillment_channel'],
        'best_sellers_rank': best_sellers_rank,
        'best_sellers_category': best_sellers_category,
        'image_main': src['image_main'],
        'scraped_time': arrow.now().datetime,
        'country': src['country']
    })
    if len(buffer) == size:
        session.bulk_insert_mappings(AmazonProduct.__mapper__, buffer)
        session.commit()
        written += len(buffer)
        buffer = []
        print('written %s entries' % written)
session.bulk_insert_mappings(AmazonProduct.__mapper__, buffer)
session.commit()
written += len(buffer)
buffer = []
print('written %s entries' % written)
Exemple #49
0
 def inject_stage_and_region():
     return dict(YEAR=arrow.now().year,
                 URL=URL,
                 SENTRY_DSN=SENTRY_DSN,
                 VERSION=SHA1)
Exemple #50
0
def tim_now_str():
    dss = arrow.now().format('YYYY-MM-DD HH:mm:ss')
    return dss
Exemple #51
0
    def __init__(self,
                 newsletter_id=None,
                 newsletter_id_name=None,
                 config=None,
                 email_config=None,
                 start_date=None,
                 end_date=None,
                 subject=None,
                 body=None,
                 message=None,
                 email_msg_id=None,
                 email_reply_msg_id=None):
        self.config = self.set_config(config=config,
                                      default=self._DEFAULT_CONFIG)
        self.email_config = self.set_config(config=email_config,
                                            default=self._DEFAULT_EMAIL_CONFIG)
        self.uuid = generate_newsletter_uuid()

        self.email_msg_id = email_msg_id
        self.email_reply_msg_id = email_reply_msg_id

        self.newsletter_id = newsletter_id
        self.newsletter_id_name = newsletter_id_name or ''
        self.start_date = None
        self.end_date = None

        if end_date:
            try:
                self.end_date = arrow.get(end_date,
                                          'YYYY-MM-DD',
                                          tzinfo='local').ceil('day')
            except ValueError:
                pass

        if self.end_date is None:
            self.end_date = arrow.now()

        if start_date:
            try:
                self.start_date = arrow.get(start_date,
                                            'YYYY-MM-DD',
                                            tzinfo='local').floor('day')
            except ValueError:
                pass

        if self.start_date is None:
            if self.config['time_frame_units'] == 'days':
                self.start_date = self.end_date.shift(
                    days=-self.config['time_frame'] + 1).floor('day')
            else:
                self.start_date = self.end_date.shift(
                    hours=-self.config['time_frame']).floor('hour')

        self.end_time = self.end_date.timestamp
        self.start_time = self.start_date.timestamp

        self.parameters = self.build_params()
        self.subject = subject or self._DEFAULT_SUBJECT
        self.body = body or self._DEFAULT_BODY
        self.message = message or self._DEFAULT_MESSAGE
        self.filename = self.config['filename'] or self._DEFAULT_FILENAME

        if not self.filename.endswith('.html'):
            self.filename += '.html'

        self.subject_formatted, self.body_formatted, self.message_formatted = self.build_text(
        )
        self.filename_formatted = self.build_filename()

        self.data = {}
        self.newsletter = None

        self.is_preview = False
        self.template_error = None
Exemple #52
0
 def _timestamp(self):
     d1 = arrow.now().replace(days=-1)
     d2 = arrow.now()
     d1 = arrow.get(d1.year, d1.month, d1.day).timestamp
     d2 = arrow.get(d2.year, d2.month, d2.day).timestamp
     return d1, d2
def is_timezone_ahead_of_utc(tz):
    offset = arrow.now(tz).utcoffset()
    # If offset is 0 timezone is ahead for US/Pacific it will -1
    return offset.days == 0
Exemple #54
0
import pipelines
import simplejson as json
from models import FeedSources, Feeds
import feedparser
import time
from sqlalchemy.sql.expression import func
from cleanr import cleanr
import arrow

newsFeed = pipelines.NewsFeedPipeline()
session = newsFeed.Session()

srcs = session.query(Feeds).all()

for x in srcs:
    try:
        date = arrow.get(x.published, 'ddd, DD MMM YYYY HH:mm:ss Z')
    except arrow.parser.ParserError, e:
        try:
            date = arrow.get(x.published.replace('GMT', ''),
                             'ddd, DD MMM YYYY HH:mm:ss')
        except arrow.parser.ParserError, e:
            date = arrow.get(x.published)

    if date.format('DD-MM-YYYY') == arrow.now().format('DD-MM-YYYY'):
        print date.format('DD-MM-YYYY'), '| [{0}] -- '.format(
            x.source.encode('utf-8')), x.title
CONCAT_KEYS = ['RANK_ROUND_MO', 'RANK_ROUND_YR']
RANK_KEY = 'EVAL_RANK'

KNACK_PARAMS = {
    'REFERENCE_OBJECTS': [REFERENCE_OBJECT],
    'FIELD_NAMES': [
        PRIMARY_KEY, RANK_KEY, STATUS_KEY, SCORE_KEY, 'RANK_ROUND_MO',
        'RANK_ROUND_YR', 'EXCLUDE_FROM_RANKING'
    ],
    'APPLICATION_ID':
    secrets.KNACK_CREDENTIALS['APP_ID'],
    'API_KEY':
    secrets.KNACK_CREDENTIALS['API_KEY']
}

now = arrow.now()


def main(date_time):

    try:
        field_dict = knack_helpers.get_fields(KNACK_PARAMS)

        field_lookup = knack_helpers.create_field_lookup(field_dict,
                                                         parse_raw=True)

        knack_data = knack_helpers.get_object_data(REFERENCE_OBJECT,
                                                   KNACK_PARAMS)

        knack_data = knack_helpers.parse_data(knack_data,
                                              field_dict,