def make_key(envx): d = datetime.datetime.fromtimestamp(0) + datetime.timedelta(days = envx["profile_creation"]) if not util.is_valid_date(d): raise ValueError buckets = { 7 : "week 1", 14 : "week 2", 21 : "week 3", 28 : "week 4", 91 : "1 - 3 months", 182 : "3 - 6 months", 365 : "6 - 12 months", 730 : "1 - 2 years", 1095 : "2 - 3 years", sys.maxint : "3+ years" } bucket_limits = buckets.keys() bucket_limits.sort() bi = bisect_right(bucket_limits, envx["profile_creation"]) bkt = buckets[bucket_limits[bi]] key_fields = [ snapshot_date, '3', envx["product"], envx["version"], envx["channel"], envx.get("locale", '#unknown'), envx["country"] ] + \ envx["os"] + \ [bkt] return sep.join(map(util.strip_invalid_chars, key_fields))
def set(self, data): if type(data) == str: if is_valid_date(data): self.delete(0, END) self.__insert(data) elif type(data) == datetime.datetime: self.__insert(formatar_data(data, format='%d/%m/%Y')) else: self.insert("")
def convert_data_to_bd(self, data): ''' Pega cada item do dicionario passado (data) e convert para o tipo específico de cada coluna no banco de dados. É necessário que as chaves do dicionário passado sejam iguais aos nomes dos campos no banco de dados Parâmetros: (data:dictionarie) Dicionario com os valores a serem convertidos para o tipos de cada campo corres- pondente do banco. ''' datum = {} for key in data.keys(): col = self.data_table.c.get(self.controls[key].field_name) if col.type._type_affinity in [sqltypes.Date, sqltypes.DateTime]: if type( data[key] ) == datetime.datetime: #se o tipo já for datetime.datetime datum[col.name] = data[key] else: if is_valid_date(data[key]): datum[col.name] = string_to_date(data[key]) else: try: #verifica se é datetime dt_time = datetime.datetime.strptime( data[key], '%d/%m/%Y %H:%M') datetime_valid = True except ValueError: dt_time = datetime.datetime.strptime( data[key], '%Y-%m-%d %H:%M:%S') datetime_valid = True if datetime_valid: datum[col.name] = dt_time else: datum[col.name] = None elif col.type._type_affinity in [sqltypes.Integer]: try: datum[col.name] = int(data[key]) except ValueError: datum[col.name] = None elif col.type._type_affinity in [sqltypes.Float, sqltypes.Numeric]: try: datum[col.name] = float(data[key]) except ValueError: datum[col.name] = None elif col.type._type_affinity in [sqltypes.String]: datum[col.name] = data[key] else: raise (Exception( f'Tipo do campo tratado: {col.type._type_affinity}')) return datum
def make_key(envx): d = datetime.datetime.fromtimestamp(0) + datetime.timedelta(days = envx["profile_creation"]) if not util.is_valid_date(d): raise ValueError age = (datetime.datetime.strptime(snapshot_date, "%Y-%m-%d") - d).days key_fields = [ snapshot_date, '3', d.strftime("%Y-%m-%d"), envx["product"], envx["version"], envx["channel"], envx.get("locale", '#unknown'), envx["country"] ] + \ envx["os"] + \ [str(age)] return sep.join(map(util.strip_invalid_chars, key_fields))
def make_key(envx, used_on_dt): try: d = datetime.datetime.strptime(used_on_dt, "%Y-%m-%d") if not util.is_valid_date(d): raise ValueError except ValueError: mark_invalid_date() raise key_fields = [ snapshot_date, '3', d.strftime("%Y-%m-%d"), envx["product"], envx["version"], envx["channel"], envx.get("locale", '#unknown'), envx["country"] ] + \ envx["os"] + \ [envx["arch"]] return sep.join(map(util.strip_invalid_chars, key_fields))
def make_key(envx): d = datetime.datetime.fromtimestamp(0) + datetime.timedelta( days=envx["profile_creation"]) if not util.is_valid_date(d): raise ValueError age = (datetime.datetime.strptime(snapshot_date, "%Y-%m-%d") - d).days key_fields = [ snapshot_date, '3', d.strftime("%Y-%m-%d"), envx["product"], envx["version"], envx["channel"], envx.get("locale", '#unknown'), envx["country"] ] + \ envx["os"] + \ [str(age)] return sep.join(map(util.strip_invalid_chars, key_fields))
def check_form_for_update(self, insert=False): ''' Verifica se os dados do formulário estão aptos a serem salvos, conforme a estrutura do banco de dados ''' for key in self.controls.keys(): col = self.data_table.c.get(self.controls[key].field_name) data = self.get_widget_data(self.controls[key].widget) if (not col.nullable and not data and not insert) or (insert and not col.nullable and not data and not col.autoincrement): return (key, 'null') elif col.type._type_affinity in [sqltypes.Date, sqltypes.DateTime]: if data: if type(data) != datetime.datetime: if not is_valid_date(data): return (key, 'invalid_data') elif col.type._type_affinity in [sqltypes.Integer]: if data: try: int(data) except ValueError: return (key, 'invalid_data') elif col.type._type_affinity in [sqltypes.Float, sqltypes.Numeric]: if data: pass print(key, 'numérico(float)') elif col.type._type_affinity in [sqltypes.String]: if not data: pass else: raise (Exception( f'Tipo do campo tratado: {col.type._type_affinity}')) return ('', '')
def make_key(envx): d = datetime.datetime.fromtimestamp(0) + datetime.timedelta( days=envx["profile_creation"]) if not util.is_valid_date(d): raise ValueError buckets = { 7: "week 1", 14: "week 2", 21: "week 3", 28: "week 4", 91: "1 - 3 months", 182: "3 - 6 months", 365: "6 - 12 months", 730: "1 - 2 years", 1095: "2 - 3 years", sys.maxint: "3+ years" } bucket_limits = buckets.keys() bucket_limits.sort() bi = bisect_right(bucket_limits, envx["profile_creation"]) bkt = buckets[bucket_limits[bi]] key_fields = [ snapshot_date, '3', envx["product"], envx["version"], envx["channel"], envx.get("locale", '#unknown'), envx["country"] ] + \ envx["os"] + \ [bkt] return sep.join(map(util.strip_invalid_chars, key_fields))
def test_dates(self): self.assertEqual("1999/12/03", util.get_date_string(date(1999, 12, 3))) self.assertEqual(date(1999, 12, 3), util.get_date("1999/12/03")) self.assertTrue(util.is_valid_date("2016/10/26")) self.assertTrue(util.is_valid_date("2016/1/5")) self.assertFalse(util.is_valid_date("2016/5/5 10:23"))
def is_transaction_start(line): match = re.match(LedgerThing.TOP_LINE_REGEX, line) if match: return util.is_valid_date(match.groups()[0]) else: return False
def getStudentData(StudentId, schoolKey, selectedDate=''): print('getStudentData') studentData = pd.DataFrame() try: school = dfGroupedOriginal.get_group(schoolKey) studentData = school[school['StudentId'] == StudentId] studentData['Finish'] = studentData['CreatedAt'] studentData['Start'] = studentData['Finish'] - pd.to_timedelta( studentData[featureSessionDuration], unit='s') studentData['CodeDesc'] = studentData['Code'].str.replace('\n', '<br>') studentData[featureDescription] = getPracticeDescription( studentData, False) studentData[featureDescription] = '<b>Title</b>:' + studentData[ 'Title'].astype( str) + '<br>' + studentData[featureDescription].astype(str) studentData[ featureDescription] = studentData[featureDescription].astype( str) + '<br><b>Code</b>:' + studentData['CodeDesc'].astype(str) studentData = studentData.sort_values(by='Start') studentData[ 'GroupBy'] = constants.TaskTypePractice + '-' + studentData[ 'PracticeTaskId'].astype(str) studentData['Task'] = constants.TaskTypePractice + '-' + studentData[ 'PracticeTaskId'].astype(str) studentData['IndexCol'] = studentData['Task'] + '-' + studentData[ 'Result'].astype('Int64').astype(str) studentData['Finish'] = np.where(studentData['Finish'].isnull(), studentData['Start'].shift(-1), studentData['Finish']) studentData['Difference'] = ( studentData['Finish'] - studentData['Start']).astype('timedelta64[s]') studentData[constants.featureTaskType] = constants.TaskTypePractice except Exception as e: print(e) try: schoolTheory = dfGroupedPlayerStrategyTheory.get_group(schoolKey) schoolTheoryStudent = schoolTheory[schoolTheory['StudentId'] == StudentId] schoolTheoryStudent['Finish'] = schoolTheoryStudent['CreatedAt'] schoolTheoryStudent[ 'Start'] = schoolTheoryStudent['Finish'] - pd.to_timedelta( schoolTheoryStudent[featureSessionDuration], unit='s') schoolTheoryStudent = schoolTheoryStudent.sort_values(by='Start') schoolTheoryStudent['Difference'] = ( schoolTheoryStudent['Finish'] - schoolTheoryStudent['Start']).astype('timedelta64[s]') schoolTheoryStudent.loc[ schoolTheoryStudent['Difference'] > schoolTheoryStudent[featureSessionDuration], 'Difference'] = schoolTheoryStudent[ schoolTheoryStudent['Difference'] > schoolTheoryStudent[ featureSessionDuration]][featureSessionDuration] schoolTheoryStudent = schoolTheoryStudent.merge( right=dfTheoryTaskDetails[['TheoryTaskId', 'Title', 'Description']], left_on='TheoryTaskId', right_on='TheoryTaskId', left_index=False, right_index=False, how='inner') schoolTheoryStudent.rename( columns={'Description': 'TheoryTaskDescription'}, inplace=True) schoolTheoryStudent[featureDescription] = getTheoryDescription( schoolTheoryStudent, False) schoolTheoryStudent[ featureDescription] = '<b>Title</b>:' + schoolTheoryStudent[ 'Title'].astype(str) + '<br>' + schoolTheoryStudent[ featureDescription].astype(str) schoolTheoryStudent[ 'GroupBy'] = constants.TaskTypeTheory + '-' + schoolTheoryStudent[ 'TheoryTaskId'].astype(str) schoolTheoryStudent[ 'Task'] = constants.TaskTypeTheory + '-' + schoolTheoryStudent[ 'TheoryTaskId'].astype(str) schoolTheoryStudent['IndexCol'] = schoolTheoryStudent[ 'Task'] + '-' + schoolTheoryStudent['Result'].astype(str) schoolTheoryStudent[ constants.featureTaskType] = constants.TaskTypeTheory if schoolTheoryStudent is not None and schoolTheoryStudent.empty == False: studentData = pd.concat([studentData, schoolTheoryStudent], ignore_index=True) except Exception as e: print(e) if studentData is None or studentData.empty: return studentData if None is not selectedDate and not selectedDate == '' and util.is_valid_date( selectedDate): studentDataGroupedDate = studentData.groupby( [studentData['Start'].dt.date]) studentData = studentDataGroupedDate.get_group(selectedDate) studentData['StartStr'] = '@' + studentData['Start'].dt.strftime( '%Y-%m-%d %H:%M:%S') + '-' + studentData['IndexCol'].astype(str) return studentData