def add_features(self): ''' Adds new columns to DataFrame which are need for csv file output. ''' # add columns needed for analysis self.df['pub_date'] = pd.to_datetime(self.df['pub_date']) self.df['pub_week'] = self.df.pub_date.map(lambda x: date.isocalendar(x)[1]) self.df['pub_year'] = self.df.pub_date.map(lambda x: date.isocalendar(x)[0]) self.df['pub_month'] = self.df.pub_date.map(lambda x: x.month) self.df['pub_week_date'] = \ self.df.pub_date.map(lambda x : x.date() + tdel(0-x.date().weekday())) # 0 -> Monday of the pub_week self.df['pub_week_date_str'] = \ self.df.pub_date.map(lambda x : (x.date() + tdel(0-x.date().weekday())) .strftime("%Y-%m-%d"))
def read_date_lat_lon_file(acinfo_filename, date_fmt='str'): with open(acinfo_filename, 'r') as acfile: # Skip line 1 - header acfile.readline() acdates = [] aclats = [] aclons = [] for line in acfile: if line.startswith('#'): continue elif '#' in line: line = line.split('#')[0].strip() line_parts = line.split(',') date_str = line_parts[0] date1 = dtime.strptime(date_str, '%Y-%m-%d') if date_fmt == 'str': date2 = date1 + tdel(days=1) acdates.append( date1.strftime('%Y%m%d') + '-' + date2.strftime('%Y%m%d')) elif date_fmt == 'datetime': acdates.append(date1) else: raise ValueError('date_fmt must be either "str" or "datetime"') aclats.append(float(line_parts[1])) aclons.append(float(line_parts[2])) return aclons, aclats, acdates
def _prior_helper(ph_f, ph_out_dir, gas_rec): _fbase = os.path.basename(ph_f) print('Processing {}, saving to {}'.format(_fbase, ph_out_dir)) tccon_priors.generate_single_tccon_prior(ph_f, tdel(hours=0), gas_rec, write_map=ph_out_dir, use_eqlat_strat=True)
def iter_file_list(region, profile_mode, start_date, end_date): start_dt = dtime.strptime(start_date, '%Y-%m-%d') end_dt = dtime.strptime(end_date, '%Y-%m-%d') version = get_current_behr_version() one_day = tdel(days=1) curr_dt = start_dt while curr_dt <= end_dt: file_name = name_fmt_string.format(profile_mode.upper(), region.upper(), version, curr_dt.strftime('%Y%m%d')) yield file_name curr_dt += one_day
def _parse_time_string_dhms(time_str): """ Parse a time string in the format [Nd][Nh][Nm][Ns] into a timedelta. :param time_str: the time string to parse :type time_str: str :return: the corresponding time delta :rtype: `datetime.timedelta` """ parts = { 'days': re.compile(r'\d+(?=d)'), 'hours': re.compile(r'\d+(?=h)'), 'minutes': re.compile(r'\d+(?=m)'), 'seconds': re.compile(r'\d+(?=s)') } durations = dict() for part, regex in parts.items(): user_dur = regex.search(time_str) if user_dur is not None: durations[part] = int(user_dur.group()) return tdel(**durations)
def make_mod_files(acdates, aclons, aclats, geos_dir, out_dir, chem_dir=None, nprocs=0): if chem_dir is None: chem_dir = geos_dir print('Will save to', out_dir) mod_dir = make_full_mod_dir(out_dir, 'fpit') print(' (Listing GEOS files...)') geos_files = sorted(glob(os.path.join(geos_dir, 'Nv', 'GEOS*.nc4'))) geos_dates = set([ dtime.strptime(re.search(r'\d{8}', f).group(), '%Y%m%d') for f in geos_files ]) geos_chm_files = sorted(glob(os.path.join(chem_dir, 'Nv', 'GEOS*.nc4'))) geos_chm_dates = set([ dtime.strptime(re.search(r'\d{8}', f).group(), '%Y%m%d') for f in geos_chm_files ]) mm_args = [] print(' (Making list of .mod files to generate...)') for (dates, lon, lat) in zip(acdates, aclons, aclats): start_date, end_date = [ dtime.strptime(d, '%Y%m%d') for d in dates.split('-') ] if start_date not in geos_dates or start_date not in geos_chm_dates: print('Cannot run {}, missing either met or chem GEOS data'.format( start_date)) continue files_complete = [] for hr in range(0, 24, 3): date = start_date.replace(hour=hr) mod_file = mod_utils.mod_file_name('FPIT', date, tdel(hours=3), lat, lon, 'E' if lon > 0 else 'W', 'N' if lat > 0 else 'S', out_dir, round_latlon=False, in_utc=True) files_complete.append( os.path.isfile(os.path.join(mod_dir, mod_file))) if all(files_complete) and len(files_complete) == 8: print('All files for {} at {}/{} complete, skipping'.format( dates, lon, lat)) continue else: print('One or more files for {} at {}/{} needs generated'.format( dates, lon, lat)) these_args = ([start_date, end_date], lon, lat, geos_dir, chem_dir, out_dir, nprocs) mm_args.append(these_args) if nprocs == 0: print('Making .mod files in serial mode') for args in mm_args: mm_helper(*args) else: print('Making .mod file in parallel mode with {} processors'.format( nprocs)) with Pool(processes=nprocs) as pool: pool.starmap(mm_helper, mm_args)
async def post(): channel = client.get_channel("******") #active channel channel2 = client.get_channel("******") #test channel t2 = dtime.utcnow() - tdel(minutes=15) new_runs = [] for board in boards: try: cat = api.get("runs?category=" + board + "&max=500") for j in cat: if (j['status']['status'] == 'verified' and j['status']['verify-date'] is not None): vtime = dtime.strptime(j['status']['verify-date'], '%Y-%m-%dT%H:%M:%SZ') if (vtime >= t2): new_runs.append(j['id']) except: await channel2.send("category " + board + "has been deleted") if len(new_runs) > 0: for i in new_runs: p = 0 r = api.get("runs/" + i) n = api.get('users/' + r['players'][0]['id'])['names']['international'] g = api.get('games/' + r['game'])['names']['international'] c = g + " " + api.get('categories/' + r['category'])['name'] if r['category'] in subcat: try: if r['category'] in multiruns: c = c + " " + api.get("variables/789k439l")['values'][ 'values'][r['values']['789k439l']]['label'] if r['category'] in HP4: c = c + " " + api.get("variables/j84k0x2n")['values'][ 'values'][r['values']['j84k0x2n']]['label'] if r['category'] in HP5: c = c + " " + api.get("variables/0nw7gk8q")['values'][ 'values'][r['values']['0nw7gk8q']]['label'] if r['category'] in lego: c = c + " " + api.get( "variables/" + str(list(r['values'].keys())[0]) )['values']['values'][r['values'][str( list(r['values'].keys())[0])]]['label'] except: await channel2.send("run " + i + " has broken subcategories") t = convert(int(r['times']['primary_t'])).strftime('%H:%M:%S') if r['level'] is None: b = api.get('leaderboards/' + r['game'] + '/category/' + r['category']) for j in range(len(b['runs'])): if b['runs'][j]['run']['id'] == i: p = b['runs'][j]['place'] if p == 1: m = "<:GoldScar:619662499381379072> [" + n + "](" + api.get( 'users/' + r['players'][0]['id'] )['weblink'] + ") got a new WR in [" + c + "](" + api.get( 'categories/' + r['category'] )['weblink'] + ") with a time of [" + t + "](" + api.get( "runs/" + i)['weblink'] + ")" elif p == 0: m = "[" + n + "](" + api.get( 'users/' + r['players'][0]['id'] )['weblink'] + ") got a new PB in [" + c + "](" + api.get( 'categories/' + r['category'] )['weblink'] + ") with a time of [" + t + "](" + api.get( "runs/" + i)['weblink'] + ")" else: m = "[" + n + "](" + api.get( 'users/' + r['players'][0]['id'] )['weblink'] + ") got a new PB in [" + c + "](" + api.get( 'categories/' + r['category'] )['weblink'] + ") with a time of [" + t + "](" + api.get( "runs/" + i)['weblink'] + ") [" + ordinal(p) + "]" em = discord.Embed(colour=discord.Colour(0xffd700), url="https://discordapp.com", description=m) await channel.send(embed=em) await client.close()
Timesheet logging Script Created on Mon Oct 8 16:47:29 2018 @author: j_osborne """ ##Head #required imports from datetime import datetime as dt from datetime import timedelta as tdel #functional definitions ##Body #Sets up day specific variables path = "C:/Users/joseph.osborne/Logging/" + ( dt.now() - tdel(days=dt.now().weekday())).strftime("%d-%m-%y") + ".txt" morn = input("Day Begin? y for yes ") if morn == "y": with open(path, "a") as f: f.write('\n' + '\n' + '--' + dt.now().strftime("%A") + '--' + '\n') #Sets up defaults jn = 'JN00000' inline = "Ready" #Takes input from user while inline != "x": inline = input("Entry: ") if inline == "x": break timedline = dt.now().strftime("%H:%M") + ' ' + inline
def shift_back(t, h=1, m=0): return t- tdel(hours=h, minutes=m)
def shift_fwd(t, h=1, m=0): return t+ tdel(hours=h, minutes=m)