def index(): db = get_db() if request.method == 'POST': date = request.form['date'] dt = datetime.strptime(date, '%Y-%m-%d') try: database_date = datetime.strftime(dt, '%Y%m%d') db.execute('INSERT INTO log_date (entry_date) values (?)', [database_date]) db.commit() except Exception as e: close_db(e) cur = db.execute( "SELECT entry_date FROM log_date ORDER BY entry_date DESC ") result = cur.fetchall() pretty_results = [] for i in result: single_date = {} d = datetime.strptime(str(i['entry_date']), '%Y%m%d') single_date['entry_date'] = datetime.strftime(d, '%B %d, %Y') pretty_results.append(single_date) print(pretty_results) return render_template('home.html', results=pretty_results)
def adjust_args_format(arg): arg_list = arg.split(" ") logger.info("バッチの引数を修正開始。" + arg) for i, temp in enumerate(arg_list): if "システム日付" in temp: today = datetime.now().strftime("%Y/%m/%d") arg_list[i] = today elif "当月末" in temp: today = datetime.today() last_day = (today + relativedelta(months=1)).replace( day=1) - timedelta(days=1) last_day = datetime.strftime(last_day, '%Y/%m/%d') arg_list[i] = last_day elif "前月末" in temp: today = datetime.today() last_day_last_month = today.replace(day=1) - timedelta(days=1) last_day_last_month = datetime.strftime(last_day_last_month, '%Y/%m/%d') arg_list[i] = last_day_last_month elif "***" in temp: slash = temp.index("/") user_code = temp[0:slash] arg_list[i] = user_code + "/" + user_code elif temp.isdecimal(): arg_list[i] = str(int(temp)) return " ".join(arg_list)
def __init__( self, logger, ): self.logger = logging.getLogger(logger) self.logger.setLevel(logging.INFO) rp = datetime.strftime(datetime.now(), "%Y%m%d") file_path = os.path.join(os.getcwd(), "Log") + "\\" + str(rp) + '.logs' dd = logging.handlers.TimedRotatingFileHandler(file_path, when='d', interval=1, backupCount=3) fh = logging.FileHandler(file_path, encoding='utf-8') ch = logging.StreamHandler() format = logging.Formatter( "%(asctime)s-%(name)s-%(lineno)d-%(filename)s-%(funcName)s-%(message)s" ) # fh.setFormatter(format) ch.setFormatter(format) dd.setFormatter(format) self.logger.addHandler(dd) self.logger.addHandler(ch)
def get_expire(self, domain): try: f = StringIO() comm = f"curl -Ivs https://{domain} --connect-timeout 10" result = subprocess.getstatusoutput(comm) f.write(result[1]) m = re.search( 'start date: (.*?)\n.*?expire date: (.*?)\n.*?common name: (.*?)\n.*?issuer: CN=(.*?)\n', f.getvalue(), re.S) start_date = m.group(1) expire_date = m.group(2) # time 字符串转时间数组 start_date = time.strptime(start_date, "%b %d %H:%M:%S %Y GMT") start_date_st = time.strftime("%Y-%m-%d %H:%M:%S", start_date) # datetime 字符串转时间数组 expire_date = datetime.strptime(expire_date, "%b %d %H:%M:%S %Y GMT") expire_date_st = datetime.strftime(expire_date, "%Y-%m-%d %H:%M:%S") # 剩余天数 remaining = (expire_date - datetime.now()).days print('开始时间:', start_date_st) print('到期时间:', expire_date_st) print(f'剩余时间: {remaining}天') return True, 200, { 'expire_time': str(expire_date_st), 'expire_days': remaining } except Exception as e: return False, 500, str(e)
def click_incluir_disco(self): #Creamos una variable que contendrá los atributos de la clase Album disco = Album() #Cargamos en la variable los valores que haya introducido el usuario disco.banda = self.ui_incl_disco.combo_disco_nom_banda.currentText() #Actualizamos el valor con el número de ID en lugar del nombre disco.banda = Obtener_ID_Banda(disco.banda) disco.nombre_disco = self.ui_incl_disco.txt_disco_nom_album.text() disco.genero = self.ui_incl_disco.combo_disco_genero.currentText() #Actualizamos el valor con el número de ID en lugar del género disco.genero = Obtener_ID_Genero(disco.genero) disco.num_pistas = self.ui_incl_disco.txt_disco_num_pistas.text() disco.year = self.ui_incl_disco.date_disco_anho_publicacion.text() #Damos formato a la fecha disco.year = datetime.strptime(disco.year, '%d/%m/%Y') disco.year = datetime.strftime(disco.year, '%Y-%m-%d') #Si los campos no están vacíos, se ejecuta la función que llama a la bd if disco.nombre_disco != "" and disco.banda != "" and disco.genero != "" and disco.num_pistas != "" and disco.year != "": Insertar_Album(disco) #end if QMessageBox.about(self, "Información", "Registro realizado.") #Reseteamos la ventana self.incluir_disco()
def save(self, entries: list): serializable = [] for entry in entries: serializable.append(entry.copy()) for entry in serializable: # Save it in local timezone format, because who cares entry["tolab"] = datetime.strftime(entry["tolab"], "%Y-%m-%d %H:%M") self.oc.put_file_contents(self.tolab_path, json.dumps(serializable, indent=2).encode('utf-8'))
def __special_seconds(bday, s_age, chat_data, add=0, repdigit=False): number = __next_big( s_age, add=add) if not repdigit else __calculate_repdigit(int(s_age)) next = number * int(math.ceil(s_age / number)) next_d = bday + timedelta(seconds=next) result = '\n\n*' + str('{0:,}'.format(next)) + ' ' + strings[ chat_data["lang"]]["seconds"] + ':\n*' + datetime.strftime( next_d, "%d.%m.%Y %H:%M:%S") if strings[chat_data['lang']]['seperator'] == "dot": result = result.replace(',', '.') return result, next_d
def __init__(self): self.current_time = datetime.now() # format dd.mm.yyyy hh:mm:ss self.current_time_string = datetime.strftime(datetime.now(), "%d.%m.%Y %H:%M:%S") self.current_date = self.current_time_string[:10] self.current_year = self.current_time_string[6:10] self.current_month = self.current_time_string[3:5] self.current_day = self.current_time_string[0:2] self.current_hour = self.current_time_string[11:13] self.current_minute = self.current_time_string[14:16] self.current_second = self.current_time_string[17:]
def view(date): db = get_db() qry_date = db.execute( "SELECT id, entry_date FROM log_date WHERE entry_date = ?", [date]) result_date = qry_date.fetchone() if request.method == 'POST': try: value = request.form['food-select'] db.execute( 'INSERT INTO food_date (food_id, log_date_id) values ( ?,?)', [value, result_date['id']]) db.commit() except Exception as e: print(e) db.rollback() qry_foods = db.execute("SELECT id, name FROM food") list_foods = qry_foods.fetchall() print(list_foods) d = datetime.strptime(str(result_date['entry_date']), '%Y%m%d') pretty_date = datetime.strftime(d, '%B %d, %Y') log_cur = db.execute( '\ SELECT food.name, food.protein, food.carbohydrates, food.fat, food.calories \ FROM log_date JOIN food_date ON food_date.log_date_id = log_date.id JOIN food ON food.id = food_date.food_id \ WHERE log_date.entry_date = ?', [date]) log_result = log_cur.fetchall() print(log_result) totals = {} totals['protein'] = 0 totals['carbohydrates'] = 0 totals['fat'] = 0 totals['calories'] = 0 for t in log_result: totals['protein'] += t['protein'] totals['carbohydrates'] += t['carbohydrates'] totals['fat'] += t['fat'] totals['calories'] += t['calories'] print(totals) return render_template('day.html', dates=pretty_date, foods=list_foods, log_results=log_result, total=totals)
def populate_treeview(self, argument=None): """Parse filenames and send to view.display_treeview.""" data = list() for initial, changed in zip(self.initial_filenames, self.changed_filenames): date_creation = datetime.fromtimestamp(os.path.getmtime(initial)) date_modified = datetime.fromtimestamp(os.path.getctime(initial)) new_name, ext = os.path.splitext(os.path.basename(changed)) name_modified = arguments_parsing(argument, new_name, ext) _data = dict() _data["old_name"] = os.path.basename(initial) _data["new_name"] = name_modified _data["size"] = get_human_readable_size(os.path.getsize(initial)) _data["created_at"] = datetime.strftime(date_creation, "%Y/%m/%d %H:%M:%S") _data["modified_at"] = datetime.strftime(date_modified, "%Y/%m/%d %H:%M:%S") _data["location"] = os.path.abspath(initial) data.append(_data) self.view.display_treeview(data)
def write_csv(new_data, volt, consumption_max, consumption_min, electricity_price, filepath, ten_min_counter, push_timedelta, push_temp_time, logger): logger.info('Writing data....') data = pd.read_csv(filepath) # calculating consumption seconds_duration = (dt.now() - starttime) # checking the notifcation time restrictions if push_timedelta >= ten_min_counter: if seconds_duration > datetime.timedelta(seconds=25): heating_on = data.loc[data['heater_onOff'] == 0] # how long the heating was on t_heating = heating_on.shape[0] * 5 # time in seconds consumption = (t_heating * int(electricity_price) * int(volt) * 5 * 3600) / 1000 # missing baseline for arduino new_data['consumption'] = consumption if float(new_data['consumption']) > int(consumption_max) or float( new_data['consumption']) < int(consumption_min): push_message = cons_push_messages(consumption) notification_station(push_message, logger) logger.info(str(push_message)) push_temp_time = dt.now() # debug option # model_graph(filepath, logger) else: consumption = 0 new_data['consumption'] = consumption line = [ dt.strftime(dt.now(), '%d.%M.%Y %H:%M:%S'), new_data['t_mean'], new_data['Temperature'], new_data['Humidity'], new_data['heater_onOff'], new_data['T_set'], new_data['is_sitting'], new_data['T00_is_chair'], new_data['T01_is_chair'], new_data['T02_is_chair'], new_data['PIDOutput'], new_data['consumption'] ] with open(filepath, 'a') as data_file: writer = csv.writer(data_file) writer.writerow(line) return push_temp_time
def mostrar_discos(self, nom_banda=0): #Este método podrá recibir el nombre de la banda que debe mostrar _translate = QtCore.QCoreApplication.translate #Cargamos en la variable nom_banda la selección del usuario if nom_banda == 0: #Si no se ha recibido ninguno, mostrará el que esté seleccionado nom_banda = self.combo_list_banda_album.currentText() else: pass #Obtenemos, a través de una consulta, el ID de la banda id_banda = Obtener_ID_Banda(nom_banda) #discos será una tupla que contendrá el listado completo de discos #con ID id_banda discos = Listar_Discos(id_banda) #Generamos tantas filas discos tenga el grupo self.tbl_listado_album.setRowCount(len(discos)) #Declaramos las variables i y j que será las posiciones de las celdas i = 0 j = 0 #En este primer bucle separamos cada disco del grupo for tupla in discos: #Creamos una cabecera para la fila por cada disco item = QtWidgets.QTableWidgetItem() self.tbl_listado_album.setVerticalHeaderItem(i, item) #Creamos una lista... lista_valor = [] #...que contendrá los valores de la tupla para poder manipularlos lista_valor = list(tupla) #El índice 1 es un integer, así que lo transformamos en string lista_valor[1] = str(lista_valor[1]) #El índice 2 es una fecha, se transforma a string y le damos formato lista_valor[2] = datetime.strftime(lista_valor[2], '%d/%m/%Y') #En este segundo bucle se tratará cada elemento de la lista for valor in lista_valor: #Creamos un Item por cada elemento siendo i la fila, j la columna #y valor elemento que se mostrará item = QtWidgets.QTableWidgetItem() self.tbl_listado_album.setItem(i, j, item) item = self.tbl_listado_album.item(i, j) item.setText(_translate("MainWindow", valor)) j += 1 #Aumentamos la fila y reseteamos la columna i += 1 j = 0
def get_sample(self): """ 随机生成身份证号,sex = 0表示女性,sex = 1表示男性 """ # 随机生成一个区域码(6位数) id_number = str(random.randint(110000, 659001)) # 限定出生日期范围(8位数) start, end = datetime.strptime("1960-01-01", "%Y-%m-%d"), datetime.strptime( "2050-12-30", "%Y-%m-%d") birth_days = datetime.strftime( start + timedelta(random.randint(0, (end - start).days + 1)), "%Y%m%d") id_number += str(birth_days) # 顺序码(2位数) id_number += str(random.randint(10, 99)) # 性别码(1位数) if random.random() < 0.5: sex = 0 # 女性 else: sex = 1 id_number += str(random.randrange(sex, 10, step=2)) # 校验码(1位数) return id_number + str(self.get_check_digit(id_number))
def querybooking(request): # company=request.GET['comany'] # storecode=request.GET['storecode'] params = request.GET['roomid'] # roomid = request.GET['roomid'] company = params.split(',')[0] storecode = params.split(',')[1] roomid = params.split(',')[2] print(company, storecode, roomid) thisdate = datetime.strftime(datetime.now(), '%Y%m%d') print(thisdate) booking_objs = Bookingevent.objects.filter( companyid=company, storecode=storecode, roomid=roomid, bookingstartdate=thisdate).values('bookingeventid', 'vcode', 'instrumentid', 'bookingstarttime', 'bookingendtime', 'ecode') data = json.dumps(list(booking_objs)) return HttpResponse(data, content_type="application/json")
def savebid(request, offset): auction = get_object_or_404(Auction, id=offset) if request.method == "POST": form = AddBid(request.POST) latest_bid = request.POST["latest_bid"].strip() if form.is_valid(): if not request.user.is_staff: cleandata = form.cleaned_data a_bid_value = cleandata["bid"] if Decimal(a_bid_value) < Decimal(0.01) + Decimal(latest_bid): messages.add_message( request, messages.ERROR, _("bid value must be at least 0.01 higher than previous bid." )) return render(request, 'bid.html', { 'form': form, 'auction': auction }) if (check_endingtime(auction.endtime - timedelta(minutes=5))): auction.endtime = auction.endtime + timedelta(minutes=5) auction.save() if check_endingtime(auction.endtime): messages.add_message(request, messages.ERROR, _("biding time has ended.")) auction.auction_status = 'C' auction.save() return render(request, 'bid.html', { 'form': form, 'auction': auction }) a_bidder = request.user a_bid_datetime = datetime.now() print(a_bid_value) bid = Bid(bidder=a_bidder, auction_id=auction, bid_value=a_bid_value, bid_datetime=a_bid_datetime) bid.save() auction.latest_bid = a_bid_value auction.lockedby = "#" auction.save() messages.add_message(request, messages.INFO, _("bid successfully saved.")) mail_subject = _( "A new bid was placed in auction were you are involved.") msg = _("New bid with value of " + str(a_bid_value) + " was placed on auction " + auction.title + ". bidding is endind " + datetime.strftime(auction.endtime, '%Y-%m-%d %H:%M')) bids = Bid.objects.filter(auction_id=auction).distinct() bidders = [p.bidder for p in bids] emails_addresses = list(set([p.email for p in bidders])) emails_addresses.append((auction.seller).email) sendEmail(mail_subject, msg, emails_addresses) return HttpResponseRedirect(reverse("home")) else: messages.add_message(request, messages.ERROR, _("Data in form is not valid")) auction = Auction.objects.filter(id=offset) return render(request, 'bid.html', { 'form': form, 'auction': auction })
def prac_16(self): print(calendar.month(2016, 3)) print(time.strftime("%d-%m-%y", time.localtime())) print(datetime.now()) print(datetime.strftime(datetime(2016, 3, 6), "%d--%m--%Y"))
def main(): # encapsulated into main otherwise entrypoint is not working ### Command line parser = ArgumentParser(description='Converts tmx-files') parser.add_argument( '--log', '-l', help='Setting logging level \'warning\' (default), \'info\', \'debug\'' ) parser.add_argument( '--exportdate', '-e', help= 'Provide the export date of the ABAP files with format: \'YYYY-MM-DD\'' ) args = parser.parse_args() loglevelmap = { 'warning': logging.WARNING, 'debug': logging.DEBUG, 'info': logging.INFO } loglevel = logging.WARNING if args.log == None else loglevelmap[args.log] if not args.exportdate: exportdate = datetime.now() logging.warning( 'No Export date given. For default taking todays date: {}'.format( datetime.strftime(exportdate, '%Y-%m-%d'))) else: exportdate = datetime.strptime(args.exportdate, '%Y-%m-%d') ### Logging logging.basicConfig(format='%(levelname)s: %(asctime)s %(message)s', level=loglevel) start_timestamp = datetime.now() logging.info('Conversion started: {}'.format( start_timestamp.strftime('%H:%M:%S%f'))) ### # configuration ### logging.info('Open configuraion file {}'.format('config.yaml')) with open('config.yaml') as yamls: params = yaml.safe_load(yamls) # language mapping file langmapcodes = read_code_mapping(params['LANGUAGE_CODE_MAPPING']) # db config if params['OUTPUT_HDB']: logging.info('Setting DB connection parameter.') db = { 'host': params['HDB_HOST'], 'user': params['HDB_USER'], 'pwd': params['HDB_PWD'], 'port': params['HDB_PORT'] } batchsize = int(params['BATCHSIZE']) text_max_len = 100000 if params['MAX_LEN'] == 0 else params['MAX_LEN'] # regex regex_pattern = list() regex_dropouts = list() if params['REGEX']: logging.info('Reading regex pattern file {}'.format( params['INPUT_REGEX'])) regex_pattern = read_regex(params['INPUT_REGEX'], params['OUTPUT_REGEX_LOG']) # files to be processed files = listdir(params['ABAP_INPUT_FOLDER']) # ABAP CSV header headers = [ 'transl_system', 'abap_package', 'central_system', 'objtype', 'objname', 'orig_lang', 'domain', 'max_length', 'ach_comp', 'sw_comp', 'sw_comp_version', 'source_text', 'source_lang', 'target_text', 'target_lang', 'pp_type', 'pp_qstatus', 'last_usage', 'changed' ] # test parameters max_number_files = 0 if params['TEST']: logging.info('Run in TEST-mode') exclusive_file = params['EXCLUSIVE_FILE'] #exclusive_filename = None max_number_files = params['MAX_NUMBER_FILES'] if exclusive_file: max_number_files = 0 max_number_files = len( files) if max_number_files == 0 or max_number_files > len( files) else max_number_files all_records = 0 for i, filename in enumerate(files): b = re.match('.+\.zip$', filename) if not (re.match('.+\.zip$', filename) or re.match('.+\.csv$', filename)): continue if params['TEST']: # for development only if exclusive_file and not filename == exclusive_file: continue if i > max_number_files: break df = pd.read_csv(path.join(params['ABAP_INPUT_FOLDER'], filename), names=headers, escapechar='\\', encoding='utf-8') df['origin'] = filename.split('.')[0] df['source'] = 'ABAP' df['exported'] = exportdate if params['OUTPUT_HDB']: save_db(source='ABAP', records=df, db=db, batchsize=batchsize) logging.info('ABAP data saved in DB: {}'.format(filename)) if params['REGEX']: with open(params['OUTPUT_REGEX_LOG'], 'a') as file: csvwriter = csv.writer(file) for line in regex_dropouts: csvwriter.writerow(line) # time calculation end_timestamp = datetime.now() duration = end_timestamp - start_timestamp logging.info('Number of all records: {}'.format(all_records)) logging.info('Conversion ended: {} (Time: {})'.format( end_timestamp, str(duration)))
def changestatus(request): # company=request.GET['comany'] # storecode=request.GET['storecode'] # roomid = request.GET['roomid'] params = request.GET['roomid'] # roomid = request.GET['roomid'] company = params.split(',')[0] storecode = params.split(',')[1] roomid = params.split(',')[2] print(company, storecode, roomid) bookingevnentid = request.GET['bookingeventid'] status = request.GET['status'] if status == '10': print('10') if status == '200': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.comeintime = datetime.strftime(datetime.now(), '%H%M%S') if status == '210': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.roomstarttime = datetime.strftime(datetime.now(), '%H%M%S') if status == '220': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.emplstarttime = datetime.strftime(datetime.now(), '%H%M%S') if status == '224': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.instrumentstarttime = datetime.strftime( datetime.now(), '%H%M%S') if status == '227': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.instrumentendtime = datetime.strftime( datetime.now(), '%H%M%S') if status == '230': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.emplendtime = datetime.strftime(datetime.now(), '%H%M%S') if status == '240': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.roomendtime = datetime.strftime(datetime.now(), '%H%M%S') if status == '250': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.callcleantime = datetime.strftime(datetime.now(), '%H%M%S') if status == '260': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.cleanstarttime = datetime.strftime( datetime.now(), '%H%M%S') if status == '270': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.cleanendtime = datetime.strftime(datetime.now(), '%H%M%S') if status == '290': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.leavetime = datetime.strftime(datetime.now(), '%H%M%S') if status == '300': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.roomstarttime = '' bookingevent.roomendtime = '' bookingevent.emplstarttime = '' bookingevent.emplendtime = '' bookingevent.callcleantime = '' bookingevent.cleanstarttime = '' bookingevent.cleanendtime = '' bookingevent.instrumentstarttime = '' bookingevent.instrumentendtime = '' bookingevent.canceltime = '' if status == '310': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.instrumentstarttime = datetime.strftime( datetime.now(), '%H%M%S') if status == '320': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.instrumentendtime = datetime.strftime( datetime.now(), '%H%M%S') if status == '390': bookingevent = Bookingevent.objects.get(companyid=company, storecode=storecode, bookingeventid=bookingevnentid) bookingevent.bookingstatus = status bookingevent.canceltime = datetime.strftime(datetime.now(), '%H%M%S') bookingevent.save() return HttpResponse(200, content_type="application/json")
def queryroom(request): room_set = Room.objects.all().values('roomid', 'roomname') room_list = room_set[:] data = json.dumps(list(room_list)) print(datetime.strftime(datetime.now(), '%H%M%S')) return HttpResponse(data, content_type="application/json")
def getCurrentTime(): # 获取系统当前时间 return datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S')
) # fh.setFormatter(format) ch.setFormatter(format) dd.setFormatter(format) self.logger.addHandler(dd) self.logger.addHandler(ch) def get_logger(self): return self.logger log = Logger("mylog").get_logger() if __name__ == '__main__': print(os.getcwd()) rp = datetime.strftime(datetime.now(), "%Y%m%d%H%M") print(os.path.dirname(os.getcwd())) print(rp) print(os.path.abspath(__file__)) print(os.path.dirname(os.path.abspath(__file__))) print("*" * 20) print(os.path.abspath('.')) print(os.path.dirname(os.path.abspath('.'))) file_path = os.path.join(os.path.dirname(os.getcwd()), "Log") + "\\" + str(rp) + '.logs' print(file_path) print('111') print(os.path.abspath(os.curdir)) print(os.path.abspath('..'))
from _datetime import datetime current_day = datetime.strftime("21/11/06 16:30", "%d/%m/%y %H:%M") print(current_day)
def date_to_string(date): try: return datetime.strftime(date, "%d.%m.%Y") except ValueError: return datetime.strftime(date, "%d.%m.%y")
def write_log(*args): with open('log.txt', 'a') as log_file: date = datetime.strftime(datetime.now(), '%d.%m.%Y %H:%M:%S') error = str(f'{date} {args}') log_file.write(error + '\n')
dir_input = "/dados/radar/saoroque/cappi/prec_tamanduatei/2019/03" dir_ouput = "/dados/radar/saoroque/cappi/prec_tamanduatei_hourly/2019/03" out = open('acum_prec_hourly.csv','w') #start = datetime.strptime(sys.argv[1], "%Y%m%d%H") #end = datetime.strptime(sys.argv[2], "%Y%m%d%H") start = datetime.strptime("2019031001", "%Y%m%d%H") end = datetime.strptime("2019032000", "%Y%m%d%H") dict_hourly = {} dict_daily = {} datehour = start #acum = np.full((NY, NX), 0, dtype=np.float32) while datehour <= end: acum = np.full((NY, NX), 0, dtype=np.float32) pattern1 = datetime.strftime(datehour, "*%Y%m%d%H00*.*") pattern2 = datetime.strftime(datehour - timedelta(hours=1), "*%Y%m%d%H*.*") files = glob.glob(os.path.join(dir_input, pattern1)) + glob.glob(os.path.join(dir_input, pattern2)) nfiles = len(files) for file in sorted(files): prec = np.fromfile(file.strip(), dtype=np.float32).reshape(NY, NX) np.place(prec, prec==-99, 0.0) np.place(prec, prec<1, 0.0) acum = np.add(prec, acum) if nfiles == 0: datehour = datehour + timedelta(hours=1) continue acum = acum/nfiles
from _datetime import datetime import locale ##gunu ve ayi turkce gormek icin bu kutuphaneyi ekledik locale.setlocale(locale.LC_ALL,"") ##ve bu modulu kullandık #print(datetime.now()) ##bize suanki zamani ve tarihi verir suan=datetime.now() print(suan.year) #sadece yili verir print(suan.month) #sadece ayi verir print(suan.day) #sadece gunu verir print(datetime.ctime(suan)) ##boyle yaparsak daha guzel bir cikti verir print(datetime.strftime(suan,"%Y")) ##sadece yil bilgisini verir print(datetime.strftime(suan,"%B")) ##sadece ay bilgisini verir print(datetime.strftime(suan,"%A")) ##gun ismini verir print(datetime.strftime(suan,"%X")) ##saat bilgisini verir print(datetime.strftime(suan,"%D")) ##suanin tarihini verir tarih=datetime(1998,10,11) tarih2=datetime(1998,7,31) print(tarih2-tarih) ##tarihler arasindaki farki bulduk
cnx.close() remove('small_db') exit() else: org_param = input( "Enter the ID of the organization you want for your depth chart\n(Or 'quit' to exit) : " ) # Get org name cursor.execute("SELECT name || ' ' || nickname FROM teams WHERE team_id=" + org_param) result = cursor.fetchall() team_name = result[0][0] # Copy New xlsx with the Date and Org Name str_date = datetime.strftime(small_db.game_date, '%m-%d-%Y') new_file_name = team_name + "-" + str_date + '_depth_chart.xlsx' # src_file = Path.cwd() / 'depth_chart_template.xlsx' # Not needed because xlswriter will create new workbook dest_path = Path.cwd() / 'output' # copy(src_file, dest_path) # Not needed because xlswriter will create new workbook ############################### # xlsxwriter create workbook # ############################### wb = xlsxwriter.Workbook(dest_path / new_file_name) ##################### # xlsxwriter styles # ##################### visible_header = wb.add_format({
elif dt == (year_val+'-06-01T00:00:00'): count_months = 4 past_dates=[year_val+'-05-29T00:00:00',year_val+'-05-30T00:00:00'] # past_dates=[year_val+'-05-28',year_val+'-05-29'] elif dt == (year_val+'-07-01T00:00:00') or dt == (year_val+'-08-01T00:00:00') or dt == (year_val+'-09-01T00:00:00'): count_months -= 1 elif dt == (year_val+'-10-01T00:00:00'): break if dt!=(year_val+'-06-01T00:00:00') and count_months!=0: past_dates[0] = past_dates[1] prev_date = str(past_dates[0]).replace('T', '') #print(prev_date) next_date = datetime.strptime(prev_date, '%Y-%m-%d%H:%M:%S') next_date = next_date + timedelta(1) next_date = datetime.strftime(next_date, '%Y-%m-%d') next_date = next_date + 'T00:00:00' past_dates[1] = next_date lat = 5.0 longitude = 65.0 print(dt) dict_grid[dt]={} list_values.append(dt) list_values_prev1.append(past_dates[0]) list_values_prev2.append(past_dates[1]) if dt == '2013-06-01T00:00:00': list_header.append('time') list_header_prev1.append('past_day1_time') list_header_prev2.append('past_day2_time')
def create(self, validated_data): report_name = validated_data['name'] validated_data['name'] = report_name + '_' + datetime.strftime( datetime.now(), '%Y%m%d%H%M%S') return Reports.objects.create(**validated_data)
def formatPdf2Txt(filepath, sensitivity='private', id=""): file_name = './pdf/examples_latin_1/' + filepath # file_name = filepath now = datetime.utcnow() fileType = "PR - Project Proposal" source = "http://sec.iadb.org/Site/Documents/ListDocBySeries.aspx?pCLS_ROWID=15&pOrgCode=IDB&pCode=PR" sensitivity = "public" df_date_lookup = pd.read_csv("enlaces_sec_cleaned_date.csv", keep_default_na=False) df_date_lookup['code'] = df_date_lookup['code'].map( lambda x: x.strip()[0:7]) sourceDate = "" generatedDate = now.strftime("%Y-%m-%dT%H:%M:%S%z") + now.strftime( '.%f')[:4] + 'Z' PR_code = "" regex_PR_code = re.compile('(PR-\\d{1,4})') try: parsed = parser.from_file(file_name, xmlContent=False) parsed_txt = parsed["content"] str_len = len(parsed_txt) # Trying to remove Table of Contents if (parsed_txt.lower().find("contents") != -1): str_start = parsed_txt.lower().find("contents") parsed_txt = parsed_txt[str_start:str_len] # Search and removing References starting from the half of the document if (parsed_txt.find("References", int(str_len * .5), str_len) != -1): str_end = parsed_txt.find("References", int(str_len * .5), str_len) to_cut = str_len - str_end parsed_txt = parsed_txt[0:-to_cut] parsed_txt = re.sub( r"(?:https?|ftp)://[\w_-]+(?:\.[\w_-]+)+(?:[\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?", "", parsed_txt) parsed_txt = re.sub(r"-\n\n", "", parsed_txt) parsed_txt = re.sub(r"-\n", "", parsed_txt) # transforming all the text into a chunk of text by removing all breaks. # We will afterwards try to re-create the paragraphs parsed_txt = re.sub(r"\n", " ", parsed_txt) # removing extra spaces parsed_txt = re.sub(r"\s{2,}", " ", parsed_txt) #removing excesive punctuation parsed_txt = re.sub(r"\n(\.{3,})", "\\1", parsed_txt) # trying to remove content of table of contents parsed_txt = re.sub(r"(\.{2,} \d{1,}) ([^.!?]*[.!?])", "", parsed_txt) # creating paragraphs of 6 sentences - 6 was a random number parsed_txt = re.sub(r"(([^.!?]*[.!?]){1,6})", "\\1\n", parsed_txt) PR_code = regex_PR_code.search(parsed_txt).group(1) row = df_date_lookup.loc[df_date_lookup['code'] == PR_code] sourceDate = row['date'].values[0] if sourceDate != '': sourceDate = datetime.strptime(sourceDate, '%y/%m/%d') sourceDate = datetime.strftime(sourceDate, '%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z' lang_det = LanguageDetector() lang = lang_det.detect(parsed_txt) data = { "type": fileType, "employeeId": '', "source": source, "sensitivity": sensitivity, "sourceDate": sourceDate, "generatedDate": generatedDate, "tags": [PR_code], "content": parsed_txt, "language": lang } if not os.path.isdir('output'): os.mkdir('output') with open('output/PR/%s.json' % (PR_code + "_" + lang), 'w', encoding='utf-8') as f: json.dump(data, f, ensure_ascii=False) except Exception as e: print("An exception occurred: ", e)