def __init__(self, name, value, expires=None, path=None, domain=None, secure=None): """ Create a Netscape cookie for name with the given value. If expires is given, the cookie will be a temporary cookie which expires after a certain amount of time. expires may be given as integer (seconds relative to the current time), DateTime instance (absolute date/time) or RelativeDateTime instance (relative date/time to current time). path, domain, secure work according to the Netscape specification. """ self.name = name self.value = value if expires is not None: # Long living cookie if isinstance(expires, DateTime.DateTimeType): self.expires = expires.gmtime() elif isinstance(expires, DateTime.RelativeDateTime): self.expires = DateTime.gmtime() + expires else: self.expires = DateTime.gmtime() + \ expires * DateTime.oneSecond if path: self.path = path if domain: self.domain = domain if secure: self.secure = 1
def _time_to_expire(self, cr, uid, ids, field_name, arg, context={}): res = {} now = DateTime.now() date = DateTime.DateTime(now.year, now.month, now.day) for fleet in self.browse(cr, uid, ids, context): res[fleet.id] = fleet.expire_time and int((DateTime.strptime(fleet.expire_time, '%Y-%m-%d') - date).days) or False return res
def test_get_quarantine(entity, initial_account, quar_x, quar_y): entity.add_entity_quarantine(quar_x, initial_account.entity_id, start=dt.now() + 1) quars = entity.get_entity_quarantine() assert len(quars) == 1 quars = entity.get_entity_quarantine(qtype=quar_y) assert len(quars) == 0 entity.add_entity_quarantine(quar_y, initial_account.entity_id, start=dt.now() - 1) entity.disable_entity_quarantine(quar_y, dt.now() + 1) quars = entity.get_entity_quarantine() assert len(quars) == 2 quars = entity.get_entity_quarantine(qtype=quar_x) assert len(quars) == 1 assert quars[0]['quarantine_type'] == quar_x quars = entity.get_entity_quarantine(qtype=quar_x, only_active=True) assert len(quars) == 0 quars = entity.get_entity_quarantine(qtype=quar_y, filter_disable_until=True) assert len(quars) == 0
def _dateConvertFromDB(d): if d==None: return None for format in ('%Y-%m-%d', # Y/M/D '%H:%M:%S', # hh:mm:ss '%H:%M', # hh:mm '%Y-%m'): # Y-M try: return DateTime.strptime(d, format) except: pass dashind = max(d.rfind('-'), d.rfind('+')) tz = d[dashind:] d = d[:dashind] #maybe it has a miliseconds ? dotind = string.rfind(d, '.') if dotind > 0: d = d[:dotind] try: return DateTime.strptime(d, '%H:%M:%S'), tz # timetz except: pass if 1:#try: # why is tz returned above and not here? return DateTime.strptime(d, '%Y-%m-%d %H:%M:%S') # full date
def Ymd(date=None): if date is None: return DateTime.today() elif type(date) in (str, unicode): return DateTime.strptime(date, '%Y-%m-%d') elif type(date) in (type(DateTime.today()), datetime.datetime): return date.strftime('%Y-%m-%d')
def collect_expired_roles(age): """Collectis and returns all roles that belong to people who have had their last affiliation from SAP deleted age days ago.""" now = DateTime.now() oldness = now - int(age) logger.debug('Collecting expired roles') logger.debug('Selecting by affiliation deleted before %s' % str(oldness)) expired_person_ids = [] roles = {} for row in er.list_roles(): roles.setdefault(row['person_id'], []).extend([row]) for p_id in roles.keys(): exp = True for aff in pe.list_affiliations(person_id=p_id, source_system=co.system_sap, include_deleted=True): if not aff['deleted_date'] or aff['deleted_date'] > oldness: exp = False if exp: expired_person_ids.append(p_id) for key in roles.keys(): if not key in expired_person_ids: del roles[key] logger.debug('%d roles collected in %s' % (len(roles), str(DateTime.now() - now))) return roles
def _calc_dates(self, cr, uid, data, context): res = {} period_length = data["form"]["period_length"] if period_length <= 0: raise wizard.except_wizard(_("UserError"), _("You must enter a period length that cannot be 0 or below !")) start = datetime.date.fromtimestamp(time.mktime(time.strptime(data["form"]["date1"], "%Y-%m-%d"))) start = DateTime(int(start.year), int(start.month), int(start.day)) if data["form"]["direction_selection"] == "past": for i in range(5)[::-1]: stop = start - RelativeDateTime(days=period_length) res[str(i)] = { "name": str((5 - (i + 1)) * period_length) + "-" + str((5 - i) * period_length), "stop": start.strftime("%Y-%m-%d"), "start": stop.strftime("%Y-%m-%d"), } start = stop - RelativeDateTime(days=1) else: for i in range(5): stop = start + RelativeDateTime(days=period_length) res[str(5 - (i + 1))] = { "name": str((i) * period_length) + "-" + str((i + 1) * period_length), "start": start.strftime("%Y-%m-%d"), "stop": stop.strftime("%Y-%m-%d"), } start = stop + RelativeDateTime(days=1) return res
def test_reldat(self): # Picking values for relative datetime tests is difficult - the clock # is running, and first-of-month (etc) change the result. ds = self._get_reldate_ds() self.assertRaises(ValueError, Filter.DatasetFilter, ds, 'test_filter', 'a >= reldate(days=-1, months=-1)') self.assertRaises(TypeError, Filter.DatasetFilter, ds, 'test_filter', 'a >= reldate(poo=1)') self.assertRaises(ValueError, Filter.DatasetFilter, ds, 'test_filter', 'a >= reldate(align="xxx")') self._test(ds, 'a >= reldate(days=+1)', [0]) self._test(ds, 'a >= reldate()', [0, 1]) self._test(ds, 'a >= reldate(days=0)', [0, 1]) self._test(ds, 'a >= reldate(days=-1)', [0, 1, 2]) if DateTime.now().day_of_week == 0: self._test(ds, 'a >= reldate(days=-7)', [0, 1, 2, 3, 4]) # this fails on Mondays! else: self._test(ds, 'a >= reldate(days=-7)', [0, 1, 2, 3]) # this fails on Mondays! self._test(ds, 'a >= reldate(days=-7, align="monday")', [0, 1, 2, 3, 4]) if DateTime.now().day == 1: expect = [0, 1, 2, 3, 4, 5, 6] else: expect = [0, 1, 2, 3, 4, 5] self._test(ds, 'a >= reldate(months=-1)', expect) self._test(ds, 'a >= reldate(months=-1, align="bom")', [0, 1, 2, 3, 4, 5, 6]) if DateTime.now().day == 1 and DateTime.now().month == DateTime.January: expect = [0, 1, 2, 3, 4, 5, 6, 7, 8] else: expect = [0, 1, 2, 3, 4, 5, 6, 7] self._test(ds, 'a >= reldate(years=-1)', expect) self._test(ds, 'a >= reldate(years=-1, align="boy")', [0, 1, 2, 3, 4, 5, 6, 7, 8]) if DateTime.now().day_of_week == 0: self._test(ds, 'a between(reldate(days=-7), reldate(days=-2))', [3, 4]) # also fails on Mondays else: self._test(ds, 'a between(reldate(days=-7), reldate(days=-2))', [3])
def _get_prod_stock_before(self, cr, uid, ids, name, arg, context={}): res = {} prod_obj = self.pool.get('product.product') loc_ids = 11 for line in self.browse(cr, uid, ids, context=context): # print 'fechaxxx: ',line.name startf = datetime.datetime.fromtimestamp(time.mktime( time.strptime(line.name, "%Y-%m-%d:%H:%M:%S"))) # print 'ffff: ',startf start = DateTime(int(startf.year), 1, 1) # end = # DateTime(int(startf.year),int(startf.month),int(startf.day)) end = startf - datetime.timedelta(seconds=1) d1 = start.strftime('%Y-%m-%d %H:%M:%S') d2 = end.strftime('%Y-%m-%d %H:%M:%S') # print 'd1xxxxxxx: ',d1 # print 'd2yyyyyyy: ',d2 c = context.copy() c.update({'location': loc_ids, 'from_date': d1, 'to_date': d2}) res.setdefault(line.id, 0.0) if line.product_id and line.product_id.id: prd = prod_obj.browse(cr, uid, line.product_id.id, context=c) res[line.id] = prd.qty_available return res
def __verify_detect_packet(self, packet): lines = string.split(packet, cClinitek50.EOL) # product ID: 6510 = Clinitek 50 tmp = lines[1][:4] if tmp != cClinitek50.dev_id: _log.Log(gmLog.lErr, 'device does not seem to be a Clinitek 50, product ID is [%s], expected [%s]' % (tmp, cClinitek50.dev_id)) _log.Log(gmLog.lData, lines) return None # product revision tmp = lines[1][4:6] if tmp not in cClinitek50.known_good_dev_revs: _log.Log(gmLog.lWarn, 'product revision [%s] untested, trying to continue anyways' % tmp) # software version tmp = lines[1][6:11] if tmp not in cClinitek50.known_good_sw_versions: _log.Log(gmLog.lWarn, 'software version [%s] untested, trying to continue anyways' % tmp) # date/time timestamp = mxDT.strptime(lines[1][12:22], self.__date_format + cClinitek50.time_format) _log.Log(gmLog.lInfo, 'device timestamp: %s' % timestamp) _log.Log(gmLog.lInfo, 'system timestamp: %s' % mxDT.now()) age = mxDT.Age(mxDT.now(), timestamp) if age.hours > 6: _log.Log(gmLog.lErr, 'device time is off by %s, please correct that' % age) return None # language-unit profile (lang, units) = string.split(lines[2], ' - ') _log.Log(gmLog.lInfo, 'language: %s' % lang) _log.Log(gmLog.lInfo, 'unit system: %s' % units) # STIX type stix_type = string.strip(lines[3]) if not stix_type in cClinitek50.known_stix_types: _log.Log(gmLog.lErr, "don't know how to handle stix of type %s" % stix_type) return None # seems valid return 1
def GetValue(self, manager, value, value_type = None): retval = None if manager == "ScanCoordinator": if value == "receiver": retval = "Rcvr1_2" elif value == "nextScanNumber": retval = "2" elif value == "startTime,MJD": gmt = DateTime.gmt() loci = Loci() mjd, _ = loci.DateTime2TimeStamp(gmt) retval = str(mjd) elif value == "startTime,seconds": gmt = DateTime.gmt() loci = Loci() _, secs = loci.DateTime2TimeStamp(gmt) retval = str(secs) elif value == "projectId": retval = 'TAPI_FRANK' elif manager == "DCR": if value.find("Channel,") == 0 or value.find("CH1_16,Tsys") == 0: retval = "1" elif manager == "Antenna": if value == "azWrapMode": retval = 'Auto' elif manager == "Antenna,AntennaManager": if value == "ccuData,Az,indicated": retval = "180.0" if retval is None: retval = "%s's value" % value self.Comment("GetValue(%s, %s) returning %s\n" % (manager, value, retval)) return retval
def guest_remove(self, operator, username): """ Set a new expire-quarantine that starts now. The guest account will be blocked from export to any system. """ account = self._get_account(username) self.ba.can_remove_personal_guest(operator.get_entity_id(), guest=account) # Deactivate the account (expedite quarantine) and adjust expire_date try: end_date = account.get_entity_quarantine( self.const.quarantine_guest_old)[0]['start_date'] if end_date < DateTime.now(): raise CerebrumError("Account '%s' is already deactivated" % account.account_name) account.delete_entity_quarantine(self.const.quarantine_guest_old) except IndexError: self.logger.warn('Guest %s didn\'t have expire quarantine, ' 'deactivated anyway.', account.account_name) account.add_entity_quarantine(qtype=self.const.quarantine_guest_old, creator=operator.get_entity_id(), description='New guest account', start=DateTime.now()) account.expire_date = DateTime.now() account.write_db() return 'Ok, %s quarantined, will be removed' % account.account_name
def IsScriptValid(self, script): hasAccess = self.access if hasAccess: self.SetAccessToFunctionality(0) self.output.AppendText("*** Begin Validation - %s ***\n" % str(DateTime.now())) isValid, error = self.GetDocument().IsScriptValid(script, self.output) if not isValid: self.SetStatus(ILLICIT) for e in error[-4:]: self.output.AppendText(e + "\n") else: self.SetStatus(VALID) if isValid: self.output.AppendText("\nYour observing script is syntactically correct!\n\n") self.output.AppendText("*** End Validation - %s ***\n" % str(DateTime.now())) self.output.AppendText("\n\n") if hasAccess: self.SetAccessToFunctionality(1) return isValid
def button_dummy(self, cr, uid, ids, context): for sheet in self.browse(cr, uid, ids, context): if DateTime.strptime(sheet.date_current, "%Y-%m-%d") <= DateTime.strptime(sheet.date_from, "%Y-%m-%d"): self.write(cr, uid, [sheet.id], {"date_current": sheet.date_from}) elif DateTime.strptime(sheet.date_current, "%Y-%m-%d") >= DateTime.strptime(sheet.date_to, "%Y-%m-%d"): self.write(cr, uid, [sheet.id], {"date_current": sheet.date_to}) return True
def format_report(roles, names, ansatt_nr, stedkoder): """Generate a report.""" start_time = DateTime.now() logger.debug('Generating report') lines = ['%-35s %-10s %-8s %-10s %-8s %s\n' % ('Navn', 'AnsattNr', 'Rolle', 'Journal', 'Stedkode', 'Standardrolle')] for p_id in roles.keys(): try: l = '%-35s %-10s ' % (names[p_id], ansatt_nr[p_id]) except KeyError: l = '%-35s %-10s ' % (names[p_id], '') rs = roles[p_id] role = rs.pop(0) l += '%-8s %-10s %-8s %s\n' % (str(co.EphorteRole(role['role_type'])), str(co.EphorteJournalenhet(role['journalenhet'])), stedkoder[role['adm_enhet']], role['standard_role']) for role in rs: l += '%-47s%-8s %-10s %-8s %s\n' % ('', str(co.EphorteRole(role['role_type'])), str(co.EphorteJournalenhet(role['journalenhet'])), stedkoder[role['adm_enhet']], role['standard_role']) lines.append(l) logger.debug('Report generated in %s' % str(DateTime.now() - start_time)) return lines
def get_internal_seniority(self,cr,uid,ids,*args): start_date = datetime.date.today() end_date = datetime.date.today() # if the last contract has no en date, en date = today internal_seniority = 0.0 internal_year_seniority = 0.0 internal_month_seniority = 0.0 # Get contracts for employee contract_pool = self.pool.get('hr.contract') contract_ids = contract_pool.search(cr,uid,[('employee_id','=',ids[0])],order='date_start desc') # contracts from today to first based on start date contracts = contract_pool.browse(cr, uid, contract_ids) # Get seniority for each contract for contract in contracts: seniority_rate = 1 # default seniority start_date = DateTime.strptime(contract.date_start,'%Y-%m-%d') if contract.seniority_rate: seniority_rate = contract.seniority_rate if contract.date_end: end_date = DateTime.strptime(contract.date_end,'%Y-%m-%d') internal_year_seniority += (end_date.year - start_date.year)*seniority_rate*1.0 # *1.0 to get a float internal_month_seniority += (end_date.month - start_date.month + 1)*seniority_rate*1.0 # +1 : a started month is counted as a full month end_date = start_date # if previous contract (in time scale) has no end date, its supposed end date is the current contract start date # set seniority in years internal_seniority = internal_year_seniority + internal_month_seniority/12 + internal_month_seniority//12 # Update internal seniority field self.write(cr,uid,ids,{'internal_seniority':internal_seniority}) return True
def test_list_permission(self): """Testing permission listing.""" person_id = self.db_tools.create_person(self.person_ds().next()) account_id = self.db_tools.create_account(self.account_ds().next()) ou_id = self.db_tools.create_ou( {'name': 'ephorte-test', 'acronym': 'ET', 'short_name': 'ePhorte-test', 'display_name': 'Test OU for ePhorte'}) self.assertFalse(self._ep.list_permission(), 'Listed permission, should be none') self._ep.add_permission( person_id, self._co.ephorte_perm_ar, ou_id, account_id) self._ep.add_permission( person_id, self._co.ephorte_perm_ua, ou_id, account_id) self.assertEqual( self._ep.list_permission(person_id), [(person_id, self._co.ephorte_perm_ar, ou_id, account_id, DateTime.today(), None), (person_id, self._co.ephorte_perm_ua, ou_id, account_id, DateTime.today(), None)], 'Failed listing added roles for person') self.assertEqual(len(self._ep.list_permission()), 2, 'Number of permissions listed not equal')
def get_sales_lines(self,cr,uid,ids,contaxt=None): product_obj=self.pool.get('product.product') invoice_line=self.pool.get('account.invoice.line') invoice_obj=self.pool.get('account.invoice') for sl in self.browse(cr, uid, ids): prods = [] date_fin=DateTime.strptime(sl.date_fin, '%Y-%m-%d') date_debut=DateTime.strptime(sl.date_debut, '%Y-%m-%d') for p in sl.product_id: prods.append(p.id) prods=tuple(prods) invoice_ids=invoice_obj.search(cr,uid,[('date_invoice','>=',date_debut),('date_invoice','<=',date_fin),('type','in',['out_invoice','out_refund'])]) invoice_lines=invoice_line.search(cr,uid,[('invoice_id','in',invoice_ids),('product_id','in',prods)]) invoice_lines=invoice_line.browse(cr,uid,invoice_lines) stock_line_obj=self.pool.get('product.line') for line in invoice_lines: prod=product_obj.browse(cr, uid, line.product_id.id) val = { 'date':line.invoice_id.date_invoice, 'quantite':line.quantity, 'name' : line.product_id.id, 'sale_price':line.price_unit, 'amount':line.price_subtotal, 'cost':line.product_id.standard_price*line.quantity, 'cost_unit':line.product_id.standard_price, 'virtual_quantity':prod.virtual_available, 'pub_id':sl.id } # print self stock_line_obj.create(cr,uid,val) return True
def _create_ou(self, input): """Create the project OU based on given input.""" pname = input['p_id'] ou.clear() pid = ou.create_project(pname) logger.debug("New project %s named: %s", pid, pname) # The gateway should not be informed about new projects before they're # approved, so if we should create the project in the GW, we must also # execute: gateway.freeze_project(pid) longname = input['p_name'] logger.debug("Storing project name: %s", longname) ou.add_name_with_language(name_variant=co.ou_name_long, name_language=co.language_en, name=longname) shortname = input['p_shortname'] logger.debug("Storing project short name: %s", shortname) ou.add_name_with_language(name_variant=co.ou_name_long, name_language=co.language_en, name=shortname) ou.write_db() # Always start projects quarantined, needs to be approved first! logger.debug("Project %s starting in quarantine 'not_approved'", pid) ou.add_entity_quarantine(qtype=co.quarantine_not_approved, creator=systemaccount_id, description='Project not approved yet', start=DateTime.now()) ou.write_db() # Storing the start and end date: endtime = input['project_end'] if endtime < DateTime.now(): raise BadInputError("End date of project has passed: %s" % endtime) ou.add_entity_quarantine(qtype=co.quarantine_project_end, creator=systemaccount_id, description='Initial requested lifetime for project', start=endtime) ou.write_db() starttime = input['project_start'] # We always set the start time quarantine, even if the start time has # passed. This is to let the administrators see the start time in bofh. ou.add_entity_quarantine(qtype=co.quarantine_project_start, creator=systemaccount_id, description='Initial requested starttime for project', start=DateTime.now() - 1000, end=starttime) ou.write_db() ou.populate_trait(co.trait_project_institution, target_id=ou.entity_id, strval=input['inst_address']) ou.populate_trait(co.trait_project_rek, target_id=ou.entity_id, strval=input['legal_notice']) # TODO: Should we have a mapping of vm_descr? ou.populate_trait(co.trait_project_vm_type, target_id=ou.entity_id, strval=input['vm_descr']) ou.write_db() logger.debug("New project created successfully: %s", pid) # The project will not be properly set up before it gets approved. The # gateway will for instance not hear about it before it's approved. return ou
def create(self, cr, uid, ids, datas, context={}): io = StringIO.StringIO() if 'date_start' not in datas: cr.execute('select min(date_start) from project_task where id in %s', (tuple(ids),)) dt = cr.fetchone()[0] if dt: datas['date_start'] = dt[:10] else: datas['date_start'] = time.strftime('%Y-%m-%d') if 'date_stop' not in datas: cr.execute('select max(date_start),max(date_close) from project_task where id in %s', (tuple(ids),)) res = cr.fetchone() datas['date_stop'] = (res[0] and res[0][:10]) or time.strftime('%Y-%m-%d') if res[1] and datas['date_stop']<res[1]: datas['date_stop'] = res[1][:10] date_to_int = lambda x: int(x.ticks()) int_to_date = lambda x: '/a60{}'+DateTime.localtime(x).strftime('%d/%m/%Y') datas = _burndown.compute_burndown(cr, uid, ids, datas['date_start'], datas['date_stop']) canv = canvas.init(fname=io, format='pdf') canv.set_author("Open ERP") max_hour = reduce(lambda x,y: max(y[1],x), datas, 0) date_to_int = lambda x: int(x.ticks()) int_to_date = lambda x: '/a60{}'+DateTime.localtime(x).strftime('%d %m %Y') def _interval_get(*args): result = set() for i in range(20): d = DateTime.localtime(datas[0][0] + (((datas[-1][0]-datas[0][0])/20)*(i+1))) res = DateTime.DateTime(d.year, d.month, d.day).ticks() result.add(res) return list(result) if datas[-1][0] == datas[0][0]: x_range = (datas[0][0],datas[-1][0]+1) else: x_range = (datas[0][0],datas[-1][0]) ar = area.T(x_grid_style=line_style.gray50_dash1, x_axis=axis.X(label="Date", format=int_to_date), y_axis=axis.Y(label="Burndown Chart - Planned Hours"), x_grid_interval=_interval_get, x_range = x_range, y_range = (0,max_hour), legend = None, size = (680,450)) ar.add_plot(line_plot.T(data=datas)) ar.draw(canv) canv.close() self.obj = external_pdf(io.getvalue()) self.obj.render() return (self.obj.pdf,'pdf')
def _get_records(self, cr, uid, data, context={}): inv_obj = pooler.get_pool(cr.dbname).get('account.invoice') shop_obj = pooler.get_pool(cr.dbname).get('sale.shop') ids = [] if data['form']['draft']==True: states = ['draft','open','paid'] else: states = ['open','paid'] title = _("Statistiques Article - ") if data['form']['period'] == 'm': if int(data['form']['month']) < 10: title += "0" title += data['form']['month'] + "/" + data['form']['year'] day_min = datetime.date(int(data['form']['year']), int(data['form']['month']), 1) nextmonth = int(data['form']['month']) + 1 year = int(data['form']['year']) if nextmonth == 13: nextmonth = 1 year += 1 day_max = datetime.date(year, nextmonth, 1) - datetime.timedelta(days=1) elif data['form']['period'] == 's': day_min = DateTime.strptime(data['form']['date_from'], '%Y-%m-%d') day_max = DateTime.strptime(data['form']['date_to'], '%Y-%m-%d') title += _("De ") + day_min.strftime('%d/%m/%Y') + _(" A ") + day_max.strftime('%d/%m/%Y') else: day_min = datetime.date(int(data['form']['year']), 1, 1) day_max = datetime.date(int(data['form']['year']), 12, 31) title += data['form']['year'] inv_ids = inv_obj.search(cr,uid,[ ('date_invoice','>=',day_min.strftime('%Y-%m-%d')), ('date_invoice','<=',day_max.strftime('%Y-%m-%d')), ('type','in',['out_invoice','out_refund','sale_refund']), ('state','in',states), ]) ids = [] # print inv_ids shop_id=data['form']['shop_id'] if shop_id: title += _(" - Souche ") + shop_obj.browse(cr,uid,shop_id).name if inv_ids: lines=inv_obj.browse(cr,uid,inv_ids) for x in lines: for inv_line in x.invoice_line: # print inv_line.product_id.name # put other tests here if you want to filter lines if (inv_line.price_subtotal <> 0.0): if shop_id: if inv_line.invoice_id.user_id.shop.id==shop_id: # if inv_line.price_subtotal==119675000: # print inv_line.id,inv_line.price_subtotal ids.append(inv_line.id) else: ids.append(inv_line.id) return {'ids' : ids , 'title' : title, 'period' : data['form']['period'], 'detail' : not data['form']['hideproducts'], 'year':data['form']['year']}
def _project_compute(cr, uid, project_id): project = pooler.get_pool(cr.dbname).get('project.project').browse(cr, uid, project_id) if project.date_start: date_begin = DateTime.strptime(project.date_start, '%Y-%m-%d') else: date_begin = DateTime.now() tasks, last_date = _compute_project(cr, uid, project, date_begin) return tasks, last_date
def setTime(self, target, from_or_till): today = DateTime.today() col = self._template.getColumn((target, from_or_till)) row = self.getRowForDate(today) now = DateTime.now() value = now.abstime / 60 / 60 / 24 print "+ %s %s: Setting time at (%s, %s) to %s" % (from_or_till, target, row, col, now) self._xlSheet.setCell(row, col, value)
def __init__(self, db, const, id=None): self._db = db self.co = const midnight = DateTime.today() now = DateTime.now() # if we are past 22:00 this day, schedule for tomorrow evening if (now - midnight) > DateTime.TimeDeltaFrom(hours=22): self.batch_time = midnight + DateTime.TimeDeltaFrom(days=1, hours=22) # ... otherwise, schedule for this day else: self.batch_time = midnight + DateTime.TimeDeltaFrom(hours=22) self.now = now # "None" means _no_ conflicts, there can even be several of # that op pending. All other ops implicitly conflict with # themselves, so there can only be one of each op. self.conflicts = { int(const.bofh_move_user): [const.bofh_move_student, const.bofh_move_user_now, const.bofh_move_request, const.bofh_delete_user], int(const.bofh_move_student): [const.bofh_move_user, const.bofh_move_user_now, const.bofh_move_request, const.bofh_delete_user], int(const.bofh_move_user_now): [const.bofh_move_student, const.bofh_move_user, const.bofh_move_request, const.bofh_delete_user], int(const.bofh_move_request): [const.bofh_move_user, const.bofh_move_user_now, const.bofh_move_student, const.bofh_delete_user], int(const.bofh_move_give): None, int(const.bofh_archive_user): [const.bofh_move_user, const.bofh_move_user_now, const.bofh_move_student, const.bofh_delete_user], int(const.bofh_delete_user): [const.bofh_move_user, const.bofh_move_user_now, const.bofh_move_student, const.bofh_email_create], int(const.bofh_email_create): [const.bofh_email_delete, const.bofh_delete_user], int(const.bofh_email_delete): [const.bofh_email_create], int(const.bofh_email_convert): [const.bofh_email_delete], int(const.bofh_sympa_create): [const.bofh_sympa_remove], int(const.bofh_sympa_remove): [const.bofh_sympa_create], int(const.bofh_quarantine_refresh): None, int(const.bofh_email_restore): [const.bofh_email_create], int(const.bofh_homedir_restore): [const.bofh_move_user, const.bofh_move_user_now, const.bofh_move_student, const.bofh_delete_user] }
def date_today(self, cr, uid, ids, context): for sheet in self.browse(cr, uid, ids, context): if DateTime.now() <= DateTime.strptime(sheet.date_from, "%Y-%m-%d"): self.write(cr, uid, [sheet.id], {"date_current": sheet.date_from}) elif DateTime.now() >= DateTime.strptime(sheet.date_to, "%Y-%m-%d"): self.write(cr, uid, [sheet.id], {"date_current": sheet.date_to}) else: self.write(cr, uid, [sheet.id], {"date_current": time.strftime("%Y-%m-%d")}) return True
def __init__(self, id): """Feed the authenticator instance with the identifier of the authenticated entity.""" if not id: raise AuthenticationError('Empty identitifer is not allowed') self.authenticated = True self.id = id self.started = DateTime.now() self.lastAccessed = DateTime.now()
def _get_records(self, cr, uid, data, context={}): inv_obj = pooler.get_pool(cr.dbname).get('account.invoice.line') ids = [] states = ['posted','valid'] title = _("Impayes clients - ") ids = [] # print inv_ids partner_id=data['form']['partner_id'] moveline_obj = pooler.get_pool(cr.dbname).get('account.invoice.line') #conditions de date if data['form']['invoice_date']=='': invoice_date= DateTime.strptime(data['form']['invoice_date'], '%Y-%m-%d') else: invoice_date=date.today().strftime('%Y-%m-%d') print 'invoice_date',invoice_date if data['form']['maturity_date']=='': maturity_date= DateTime.strptime(data['form']['maturity_date'], '%Y-%m-%d') else: maturity_date=date.today().strftime('%Y-%m-%d') print 'maturity date',maturity_date # print partners if data['form']['partner_id']: movelines = moveline_obj.search(cr, uid, [('partner_id', '=', partner_id), ]) elif data['form']['code']: partners=pooler.get_pool(cr.dbname).get('res.partner').search(cr,uid,[('ref','like','%'+data['form']['code']+'%')]) movelines = moveline_obj.search(cr, uid, [('partner_id', 'in', partners), ]) else: movelines = moveline_obj.search(cr, uid, [#('partner_id', '=', partner.id), ]) movelines = moveline_obj.browse(cr, uid, movelines) if movelines: lines=movelines for x in lines: # for inv_line in x.move_line_id: # print inv_line.product_id.name # put other tests here if you want to filter lines ids.append(x.id) # print ids title+=_(" ") if data['form']['detail']==0: detail=False else: detail=True return {'ids' : ids , 'title' : title, 'period' : data['form']['period'], 'detail' : detail, 'year':data['form']['year']}
def _is_expired(self, cr, uid, ids, field_name, arg, context={}): res = {} now = DateTime.now() date = DateTime.DateTime(now.year, now.month, now.day, 0, 0, 0.0) for fleet in self.browse(cr, uid, ids, context): if fleet.expire_time: res[fleet.id] = date > DateTime.strptime(fleet.expire_time, '%Y-%m-%d') else: res[fleet.id] = True #by default no maintenance expire terms means no coverage return res
def _default_date_to(self, cr, uid, context={}): user = self.pool.get("res.users").browse(cr, uid, uid, context) r = user.company_id and user.company_id.timesheet_range or "month" if r == "month": return (DateTime.now() + DateTime.RelativeDateTime(months=+1, day=1, days=-1)).strftime("%Y-%m-%d") elif r == "week": return (DateTime.now() + DateTime.RelativeDateTime(weekday=(DateTime.Sunday, 0))).strftime("%Y-%m-%d") elif r == "year": return time.strftime("%Y-12-31") return time.strftime("%Y-%m-%d")
def __xform_8303(self, request_data): """8303: Berichtszeit""" if self.__request['results_reported_when'] is None: self.__request['results_reported_when'] = mxDT.now() self.__request['results_reported_when'] = mxDT.strptime( request_data['8303'][0].strip(), '%H%M', self.__request['results_reported_when'] ) return None
def filedate(path, stat=os.stat): """ Return the modification date/time as DateTime instance. Needs mxDateTime to be installed. """ from mx import DateTime return DateTime.localtime(stat(path)[8])
def check_date(option, opt, value): """check a file value return the filepath """ try: return DateTime.strptime(value, "%Y/%m/%d") except DateTime.Error: raise OptionValueError("expected format of %s is yyyy/mm/dd" % opt)
def ac_type_add(self, account_id, affiliation, ou_id): """Adds an account to special groups which represent an affiliation at an OU. Make the group if it's not present.""" self._make_str2const() if self._ac is None: self._ac = Factory.get('Account')(self.db) self._ac.clear() ou = Factory.get("OU")(self.db) ou.find(ou_id) aff2txt = { int(self._co.affiliation_ansatt): 'Tilsette', int(self._co.affiliation_teacher): 'Tilsette', int(self._co.affiliation_elev): 'Elevar' } # Look up the group grp_name = "%s %s" % (self._get_ou_acronym(ou), aff2txt[int(affiliation)]) if not self._group: self._group = Factory.get('Group')(self.db) if not self.default_creator_id: if self._ac is None: self._ac = Factory.get('Account')(self.db) self._ac.clear() self._ac.find_by_name(cereconf.INITIAL_ACCOUNTNAME) self.default_creator_id = self._ac.entity_id try: self._group.clear() self._group.find_by_name(grp_name) self.logger.debug("ac_type_add: Group '%s' found." % grp_name) except Errors.NotFoundError: self._group.populate( creator_id=self.default_creator_id, visibility=self._co.group_visibility_all, name=grp_name, description=grp_name, # TODO: Should probably have a custom type! group_type=self._co.group_type_unknown, ) self._group.write_db() for spread in procconf.AC_TYPE_GROUP_SPREAD: if not self._group.has_spread(int(self.str2const[spread])): self._group.add_spread(int(self.str2const[spread])) self._group.write_db() self.logger.info("ac_type_add: Group '%s' created." % grp_name) if not self._group.get_trait(self._co.trait_group_affiliation): self._group.populate_trait(self._co.trait_group_affiliation, date=DateTime.now()) self._group.write_db() if not self._group.has_member(account_id): self._group.add_member(account_id) self._group.write_db() self.logger.info("ac_type_add: Account '%s' added to group '%s'." % (account_id, grp_name))
def get_file_to_play(zeit): # log("Suche Datei fuer " + str(zeit)) f = open(player_config, "r") lines = f.readlines() f.close() for x in lines: cfg = string.split(x) if cfg[0] == "%04d" % zeit.year: if cfg[1] == "%02d" % zeit.month: if cfg[2] == "%02d" % zeit.day: if cfg[3] == "%02d" % zeit.hour: if cfg[4] == "W": break if cfg[4] == "V": break else: return get_endlosband() # log("Eintrag gefunden: %s %s %s %s" % (cfg[5], cfg[6], cfg[7], cfg[8])) # Vorproduktion: Datei abspielen if cfg[4] == "V": # log("Spiele vorproduzierte Datei: " + cfg[5]) return "/opt/dserv/data/vorprod/%s" % (cfg[5]) else: # Wiederholung: zu Wiederholende Datei suchen # finde die letzte passende Datei: # pdt = DateTime(2000, 8, 14, 18, 0, 0) pdt = DateTime(string.atoi(cfg[5], 10), string.atoi(cfg[6], 10), string.atoi(cfg[7], 10), string.atoi(cfg[8], 10), zeit.minute, zeit.second) # log("Suche Datei fuer: " + str(pdt)) while pdt >= DateTime(string.atoi(cfg[5], 10), string.atoi(cfg[6], 10), string.atoi(cfg[7], 10), string.atoi(cfg[8], 10), 0, 0): filename = "/opt/dserv/data/%s/%s/%s" % (cfg[5], cfg[6], cfg[7]) filename += "/%s-%02d-%02d.mp3" % (cfg[8], pdt.minute, pdt.second) if os.path.exists(filename): # log("Datei gefunden: " + filename) return filename pdt = pdt - RelativeDateTime(seconds=1) # log("Keine Datei für den Eintrag gefunden! Spiele Endlosband") return get_endlosband()
def _get_records(self, cr, uid, data, context={}): inv_obj = pooler.get_pool(cr.dbname).get('account.invoice') ids = [] if data['form']['draft']==True: states = ['draft','open','paid'] else: states = ['open','paid'] title = _("Statistique Clients - ") if data['form']['period'] == 'm': if int(data['form']['month']) < 10: title += "0" title += data['form']['month'] + "/" + data['form']['year'] day_min = datetime.date(int(data['form']['year']), int(data['form']['month']), 1) nextmonth = int(data['form']['month']) + 1 year = int(data['form']['year']) if nextmonth == 13: nextmonth = 1 year += 1 day_max = datetime.date(year, nextmonth, 1) - datetime.timedelta(days=1) elif data['form']['period'] == 's': day_min = DateTime.strptime(data['form']['date_from'], '%Y-%m-%d') day_max = DateTime.strptime(data['form']['date_to'], '%Y-%m-%d') title += _("De ") + day_min.strftime('%d/%m/%Y') + _(" A ") + day_max.strftime('%d/%m/%Y') else: day_min = datetime.date(int(data['form']['year']), 1, 1) day_max = datetime.date(int(data['form']['year']), 12, 31) title += data['form']['year'] print states inv_ids = inv_obj.search(cr,uid,[ ('date_invoice','>=',day_min.strftime('%Y-%m-%d')), ('date_invoice','<=',day_max.strftime('%Y-%m-%d')), ('type','in',['out_invoice','out_refund','sale_refund']), ('state','in',states), ]) ids = [] if inv_ids: lines=inv_obj.browse(cr,uid,inv_ids) for x in lines: for inv_line in x.invoice_line: if (inv_line.price_subtotal <> 0.0): ids.append(inv_line.id) return {'ids' : ids , 'title' : title, 'period' : data['form']['period'], 'year':data['form']['year']}
def testGetLatestData(self): sd = SamplerData("Weather-Weather2-weather2") now = datetime.utcnow() start = now - timedelta(hours = 1) s, e = (start.utctimetuple()[:6], now.utctimetuple()[:6]) startDateTime = DateTime.DateTime(s[0],s[1],s[2],s[3],s[4],s[5]) endDateTime = DateTime.DateTime(e[0],e[1],e[2],e[3],e[4],e[5]) latestKeys = sd.GetLogFilesInRange(startDateTime, endDateTime) self.assertTrue(len(latestKeys) > 0) # now just look there and see what the latest are dir = "/home/gbtlogs/Weather-Weather2-weather2" files = os.listdir(dir) files.sort() latestFile = files[-1] # we better have picked up the latest file self.assertEquals(latestFile, latestKeys[-1])
def _get_guest_info(self, entity_id): """ Get info about a given guest user. @type entity_id: int @param entity_id: The guest account entity_id @rtype: dict @return: A dictionary with relevant information about a guest user. Keys: 'username': <string>, 'created': <DateTime>, 'expires': <DateTime>, 'name': <string>, 'responsible': <int>, 'status': <string>, 'contact': <string>' """ account = self.Account_class(self.db) account.clear() account.find(entity_id) try: guest_name = account.get_trait( self.const.trait_guest_name)['strval'] responsible_id = account.get_trait( self.const.trait_guest_owner)['target_id'] except TypeError: self.logger.debug('Not a guest user: %s', account.account_name) raise CerebrumError('%s is not a guest user' % account.account_name) # Get quarantine date try: end_date = account.get_entity_quarantine( self.const.quarantine_guest_old)[0]['start_date'] except IndexError: self.logger.warn('No quarantine for guest user %s', account.account_name) end_date = account.expire_date # Get contect info mobile = None try: mobile = account.get_contact_info( source=self.const.system_manual, type=self.const.contact_mobile_phone)[0]['contact_value'] except IndexError: pass # Get account state status = 'active' if end_date < DateTime.now(): status = 'expired' return { 'username': account.account_name, 'created': account.created_at, 'expires': end_date, 'name': guest_name, 'responsible': self._get_account_name(responsible_id), 'status': status, 'contact': mobile }
def offset( self, info, buffer): """Calculate the time zone offset as a date-time delta""" (tag, left, right, sublist) = info set = singleMap( sublist, self, buffer ) direction = set.get('offset_sign',1) hour = set.get( "hour", 0) minute = set.get( "minute", 0) delta = DateTime.DateTimeDelta( 0, hour*direction, minute*direction) return delta
def check_account(self, account): """ Check if account is OK. """ if account is None: return False if account.expire_date and account.expire_date < DateTime.now(): return False # TODO: Check quarantined? # TODO: Should we be able to whitelist certain quarantines return True
def freeze_user(self, pid, username, when=None): """See .Gateway.GatewayClient.""" self.logger.info("Freezing account: %s", username) self._get_project_idx(pid) idx = self._get_user_idx(username, pid) if self._users[idx]['frozen']: raise Err("User already frozen") self._users[idx]['frozen'] = DateTime.now() return self._users[idx]
def create_project(self, pid): """See .Gateway.GatewayClient.""" self.logger.info("Creating project: %s", pid) try: self._get_project_idx(pid) except Err: pass else: raise Err("Project %s exists" % pid) p = { 'frozen': None, 'expires': DateTime.now() + 10, 'name': pid, 'created': DateTime.now(), } self._projects.append(p) return p
def truncate_log(to_date, logfilename, person_id=None): pq_util = PPQUtil.PPQUtil(db) ppq = PaidPrinterQuotas.PaidPrinterQuotas(db) to_date = DateTime.Date( *([int(x) for x in (to_date + '-0-0-0').split('-')])) from_date = DateTime.Date(1980, 1, 1, 1, 1, 1) persons = {} if person_id: persons[person_id] = True else: # find potential victims for row in ppq.get_pagecount_stats(from_date, to_date, group_by=('person_id', )): if row['person_id'] is not None: persons[long(row['person_id'])] = True for row in ppq.get_payment_stats(from_date, to_date, group_by=('person_id', )): if row['person_id'] is not None: persons[long(row['person_id'])] = True out = open(logfilename, 'a') out.write("Truncate job started at %s\n" % time.asctime()) for person_id in persons.keys() + [None]: removed, new_status = pq_util.truncate_log( person_id, to_date, 'quota_tools', reset_balance=(person_id is None)) if not removed: continue logger.debug( "removed %i entries for %s" % (len(removed), db.pythonify_data(removed[0]['person_id']))) out.write("new balance: %s\n" % repr(new_status)) for row in removed: row = dict([(k, db.pythonify_data(v)) for k, v in row.items()]) row['tstamp'] = row['tstamp'].strftime('%Y-%m-%d %H:%M.%S') out.write("removed: %s\n" % repr(row)) try: db.commit() except: out.write("WARNING: Commit threw exception for this person\n") raise out.close()
def run_apple_et_specs (stn,accend,output): et_dict = {} #date range start_date_dt = accend + DateTime.RelativeDate(days=-7) + DateTime.RelativeDate(hour=0,minute=0,second=0.0) end_date_dt = accend + DateTime.RelativeDate(days=+6) + DateTime.RelativeDate(hour=23,minute=0,second=0.0) fcst_stn = copy.deepcopy(stn) if stn[0:3] == '42.' or stn[0:3] == '43.': station_type = 'ucc' elif stn[0:1] >= '1' and stn[0:1] <= '9' and stn[1:2] >= '0' and stn[1:2] <= '9': station_type = 'njwx' elif len(stn) == 4 and stn[0:1].upper() == 'K': station_type = 'icao' elif len(stn) == 4: station_type = 'oardc' elif stn[0:3] == 'cu_' or stn[0:3] == 'um_': station_type = 'cu_log' elif stn[0:3] == "ew_": stn = stn[3:] station_type = 'miwx' elif stn[0:5] == "nysm_": stn = stn[5:] station_type = 'nysm' elif len(stn) == 7 and stn[2:3] == "_": station_type = 'nwon' elif len(stn) == 3 or len(stn) == 6: station_type = 'newa' else: raise StationProblem('Cannot determine station type for %s'%stn) #need to get greentip date in DateTime format for leaf area adjustment biofix_dd = phen_events_dict['macph_greentip_43']['dd'][2] #green tip degree day accumulation hourly_data = {} jan1_dt = DateTime.DateTime(end_date_dt.year,1,1,0,0,0) fcst_data = get_fcst_data (fcst_stn, 'temp', jan1_dt, end_date_dt) hourly_data = get_hourly_data (stn, 'temp', jan1_dt, end_date_dt, hourly_data, fcst_data, station_type) biofix_dt, ddmiss = BaseTools().find_biofix (hourly_data, jan1_dt, end_date_dt, 'dd43be', biofix_dd) if biofix_dt and ddmiss <= 7: et_dict['greentip'] = '%d/%d/%d' % (biofix_dt.month,biofix_dt.day,biofix_dt.year) ucanid,station_name = get_metadata (stn, station_type) et_dict['station_name'] = station_name return newaTools_io.apple_et_specs(et_dict)
def apple_thin_json(thin_dict, biofix_dt, bloom_dt): results_list = [] notes_list = [] try: import json results_list = [] notes_list = [] tkeys = thin_dict['data'].keys() tkeys.sort() recommendEnd = bloom_dt + DateTime.RelativeDate(days=+35) if len(tkeys) >= 3: list4day = [miss, thin_dict['data'][0]['thinIndex'], thin_dict['data'][1]['thinIndex'], thin_dict['data'][2]['thinIndex']] else: list4day = [] for key in tkeys: t_dt = thin_dict['greentipDate'] + DateTime.RelativeDate(days=+key, hour=0, minute=0, second=0.0) fdate = "%d-%02d-%02d" % (t_dt.year,t_dt.month,t_dt.day) if thin_dict['data'][key]['maxt'] == miss or thin_dict['data'][key]['mint'] == miss or thin_dict['data'][key]['srad'] == miss: thin_dict['data'][key]['dlyCarbonBal'] = miss thin_dict['data'][key]['totalDemand'] = miss thin_dict['data'][key]['thinIndex'] = miss if key+3 < len(tkeys) and thin_dict['data'][key+3]['maxt'] != miss and thin_dict['data'][key+3]['mint'] != miss and thin_dict['data'][key+3]['srad'] != miss: list4day.append(thin_dict['data'][key+3]['thinIndex']) else: list4day.append(miss) list4day.pop(0) if len(list4day) == 4 and not miss in list4day: avg4day = round((sum(list4day)/4.0), 2) else: avg4day = "-" if t_dt >= bloom_dt and t_dt <= recommendEnd: recommend = get_recommend(avg4day) else: recommend = "-" results_list.append([fdate, ctof(thin_dict['data'][key]['maxt']), ctof(thin_dict['data'][key]['mint']),\ mround(thin_dict['data'][key]['srad'],1), mround(thin_dict['data'][key]['dlyCarbonBal'],2),\ mround(thin_dict['data'][key]['totalDemand'],2), mround(thin_dict['data'][key]['thinIndex'],2),\ avg4day, recommend]) if (bloom_dt - biofix_dt).days < 21: notes_list.append('Difference between Green tip and Bloom is less than 21 days. Results may be unreliable.') except: print_exception() json_dict = json.dumps({"data":results_list, "notes":notes_list}) return json_dict
def find_biofix (self, hourly_data, jan1_dt, end_date_dt, smry_type, biofix_dd): biofix_date = None ddmiss = None try: ddaccum = 0. ddmiss = 0 dly_max = -999 dly_min = 999 dly_miss = 0 ks = hourly_data.keys() ks.sort() for key_date in ks: theDate = DateTime.DateTime(*key_date) hourly_temp = hourly_data[key_date]['temp'][0] if hourly_temp != miss: if hourly_temp > dly_max: dly_max = copy.deepcopy(hourly_temp) if hourly_temp < dly_min: dly_min = copy.deepcopy(hourly_temp) else: dly_miss = dly_miss + 1 # end of day update if theDate.hour == 23: if dly_miss == 0: dly_dd = BaseTools().calc_degday(dly_max, dly_min, smry_type) else: dly_dd = miss # check to see if biofix gdd accum has been reached if dly_dd != miss: ddaccum = ddaccum + dly_dd else: ddmiss = ddmiss + 1 if round(ddaccum,0) >= biofix_dd: biofix_date = theDate + DateTime.RelativeDate(hours=0) break dly_max = -999 dly_min = 999 dly_miss = 0 except: print_exception() return biofix_date, ddmiss
def user_request_guest(self, operator, nr, date, groupname, comment): """ Request a number of guest users for a certain time. """ # date checking start_date, end_date = self._parse_date_from_to(date) today = DateTime.today() if start_date < today: raise CerebrumError("Start date shouldn't be in the past") # end_date in allowed interval? if end_date < start_date: raise CerebrumError("End date can't be earlier than start_date") max_date = start_date + DateTime.RelativeDateTime( days=cereconf.GUESTS_MAX_PERIOD) if end_date > max_date: raise CerebrumError("End date can't be later than %s" % max_date.date) if not nr.isdigit(): raise CerebrumError( "'Number of accounts' requested must be a number;" " %r isn't." % nr) try: self.ba.can_request_guests(operator.get_entity_id(), groupname) except Errors.NotFoundError: raise CerebrumError("Group %r not found" % groupname) owner = self.util.get_target(groupname, default_lookup="group") try: user_list = self.bgu.request_guest_users( int(nr), end_date, comment, owner.entity_id, operator.get_entity_id()) for uname, comment, e_id, passwd in user_list: operator.store_state("new_account_passwd", {'account_id': e_id, 'password': passwd}) ret = "OK, reserved guest users:\n%s\n" % \ self._pretty_print([x[0] for x in user_list]) ret += "Please use misc list_passwords to view the passwords\n" ret += "or use misc print_passwords to print the passwords." return ret except GuestAccountException as e: raise CerebrumError(e)
def _get_records(self, cr, uid, data, context={}): inv_obj = pooler.get_pool(cr.dbname).get('account.invoice') ids = [] if data['form']['draft'] == True: states = ['draft', 'open', 'paid'] else: states = ['open', 'paid'] title = _("Revenue per customer - ") if data['form']['period'] == 'm': if int(data['form']['month']) < 10: title += "0" title += data['form']['month'] + "/" + data['form']['year'] day_min = datetime.date(int(data['form']['year']), int(data['form']['month']), 1) nextmonth = int(data['form']['month']) + 1 year = int(data['form']['year']) if nextmonth == 13: nextmonth = 1 year += 1 day_max = datetime.date(year, nextmonth, 1) - datetime.timedelta(days=1) elif data['form']['period'] == 's': day_min = DateTime.strptime(data['form']['date_from'], '%Y-%m-%d') day_max = DateTime.strptime(data['form']['date_to'], '%Y-%m-%d') title += _("From ") + day_min.strftime('%d/%m/%Y') + _( " to ") + day_max.strftime('%d/%m/%Y') else: day_min = datetime.date(int(data['form']['year']), 1, 1) day_max = datetime.date(int(data['form']['year']), 12, 31) title += data['form']['year'] inv_ids = inv_obj.search(cr, uid, [ ('date_invoice', '>=', day_min.strftime('%Y-%m-%d')), ('date_invoice', '<=', day_max.strftime('%Y-%m-%d')), ('type', 'in', ['out_invoice', 'out_refund']), ('state', 'in', states), ]) return { 'ids': inv_ids, 'title': title, 'period': data['form']['period'], 'year': data['form']['year'] }
def get_relative_date(years=None, months=None, days=None): """ Calculates relative past date from current time based on input params. :param years: Number of years from current time :type: str :param months: Number of months from current time :type: str :param days: Number of days from current time :type: str :return: mx.DateTime.DateTime object """ exp_date = DateTime.now() if years is not None: exp_date = exp_date - DateTime.RelativeDate(years=int(years)) if months is not None: exp_date = exp_date - DateTime.RelativeDate(months=int(months)) if days is not None: exp_date = exp_date - DateTime.RelativeDate(days=int(days)) return exp_date
def parseLog(self, log=''): rexp = re.compile('[\[\]]') (logTime, logType, logMsg, logTrace) = rexp.split(log, 3) if logType not in self.LogTypes: raise exceptions.Exception, "unknown log type %s" % logType # lT = DateTime.ISO.ParseDateTime(logTime.strip()) return (DateTime.DateTimeFrom(logTime.strip()), logType.strip(), logMsg.strip(), \ logTrace.strip()[:-1])
def makeDatesFromFile(options): inf = open(options["inputfile"], "r") line1 = inf.readline() inf.close() idates = line1.strip().split("\t") if len(idates) == 1: cols = idates[0].split(",") dp = DateTime.DateTime(int(cols[0]), int(cols[1]), int(cols[2])) dateline = "%s %s, %s" % (dp.strftime("%B"), dp.day, dp.year) elif len(idates) == 2: cols = idates[0].split(",") sdp = DateTime.DateTime(int(cols[0]), int(cols[1]), int(cols[2])) cols = idates[1].split(",") edp = DateTime.DateTime(int(cols[0]), int(cols[1]), int(cols[2])) dateline = makeDateline(sdp, edp, options) else: print "Unexpected date line in input file", line1 sys.exit() return dateline
def register_from(setfn, integer_datetimes): if integer_datetimes: unpack_time = pgtype.unpack_int_time unpack_timestamp = pgtype.unpack_int_timestamp unpack_date = pgtype.unpack_int_date unpack_interval = pgtype.unpack_int_interval else: unpack_time = pgtype.unpack_flt_time unpack_timestamp = pgtype.unpack_flt_timestamp unpack_date = pgtype.unpack_flt_date unpack_interval = pgtype.unpack_flt_interval timestamp_epoch = DateTime.DateTime(2000,1,1) date_epoch = DateTime.Date(2000,1,1) def from_timestamp(buf): seconds = round(unpack_timestamp(buf) / pgtype.usec_mul, 2) delta = DateTime.DateTimeDeltaFromSeconds(seconds) return timestamp_epoch + delta setfn(pgoid.timestamp, from_timestamp) def from_time(buf): seconds = round(unpack_time(buf) / pgtype.usec_mul, 2) return DateTime.Time(seconds=seconds) setfn(pgoid.time, from_time) def from_date(buf): delta = DateTime.DateTimeDeltaFromDays(unpack_date(buf)) return date_epoch + delta setfn(pgoid.date, from_date) def from_interval(buf): microseconds, days, months = unpack_interval(buf) seconds = round(microseconds / pgtype.usec_mul, 2) # Unfortunately, we can't use divmod here... hours = int(seconds / 3600.0) seconds = math.fmod(seconds, 3600.0) minutes = int(seconds / 60.0) seconds = math.fmod(seconds, 60.0) years = int(months / 12.0) months = int(math.fmod(months, 12)) return DateTime.RelativeDateTime(years, months, days, hours, minutes, seconds) setfn(pgoid.interval, from_interval)
def tp_for_grf2(daily_data, start_date_dt, start_fcst_dt, useqpf=True): obs_dict = {} forecast_data = None start_fcst_dt = start_fcst_dt + DateTime.RelativeDate( hour=0, minute=0, second=0) try: mint = [] maxt = [] prcpl = [] obs_days = [] fmint = [] fmaxt = [] fprcp = [] fobs_days = [] for dly_dt, tave_hr, tmax, tmin, prcp, lwet, rhum, wspd, srad, qpf, st4x, st4n, dflags in daily_data: this_day_dt = DateTime.DateTime(dly_dt[0], dly_dt[1], dly_dt[2]) if this_day_dt < start_date_dt: continue if not useqpf: qpf = prcp if tmax != miss and tmin != miss: if this_day_dt < start_fcst_dt: mint.append(int(round(tmin, 0))) maxt.append(int(round(tmax, 0))) prcpl.append(qpf) obs_days.append("%d-%d-%d" % (dly_dt[0], dly_dt[1], dly_dt[2])) else: fmint.append(int(round(tmin, 0))) fmaxt.append(int(round(tmax, 0))) fprcp.append(qpf) fobs_days.append("%d-%d-%d" % (dly_dt[0], dly_dt[1], dly_dt[2])) obs_dict['maxt'] = maxt obs_dict['mint'] = mint obs_dict['prcp'] = prcpl obs_dict['obs_days'] = obs_days obs_dict['fmaxt'] = fmaxt obs_dict['fmint'] = fmint obs_dict['fprcp'] = fprcp obs_dict['frobs_days'] = fobs_days except: print_exception() return obs_dict
def _timeConvertFromDB(t): if t==None: return None for format in ('%H:%M:%S', '%H:%M'): try: return DateTime.strptime(t, format) except: pass raise DateTime.Error, "could not parse time: %s" % t
def _get_age(self, cr, uid, ids, field_name, arg, context={}): # print 'JE PASSE PAR _GET_AGE et CONTEXT is:', context _logger.info('in _get_age ...') res = {} records = self.browse(cr, uid, ids, context) date = DateTime.today() for record in records: age = '' res[record.id] = { 'age': '', } birthdate = False if record.dob: birthdate = DateTime.strptime(record.dob, '%Y-%m-%d') year, month, day = birthdate.year, birthdate.month, birthdate.day if birthdate: day = int(day) month = int(month) year = int(year) if (date.month > month) or (date.month == month and date.day >= day): if (date.year - year) >= 2: age = str(date.year - year) + _(' YO') else: if date.year == year: age = str(date.month - month) + _(' month') else: age = str(12 + date.month - month) + _(' month') else: if (date.year - year - 1) >= 2: age = str(date.year - year - 1) + _(' YO') else: months = date.month - month if date.month == month: months = -1 if date.year == year: age = str(months) + _(' month') elif date.year == year + 1: age = str(12 + months) + _(' month') elif date.year == year + 2: age = str(24 + months) + _(' month') res[record.id]['age'] += age return res
def getOffsetMonth(self, dt, offset): '''Return a DateTime object corresponding to some time in the month indicated by the integer offset from the current month.''' if offset==0: return dt else: # Base this from the middle of the month, so we don't get strange month skip # effects when the current day is near the beginning or end of the month. return DateTime.DateTime(dt.year, dt.month, 15) + 30.5 * DateTime.oneDay * offset
def loop_through_time(self): obsTime = DateTime.DateTime(self.sTime.year, self.sTime.month, self.sTime.day, self.sTime.hour) nexTime = obsTime + DateTime.RelativeDate(months=+1) while obsTime < self.eTime: if nexTime > self.eTime: end = (self.eTime.year, self.eTime.month, self.eTime.day, self.eTime.hour) else: end = (nexTime.year, nexTime.month, nexTime.day, nexTime.hour) start = (obsTime.year, obsTime.month, obsTime.day, obsTime.hour) try: self.get_temperature(start, end) self.get_precipitation(start, end) self.get_rh(start, end) self.get_lwet(start, end) except weatherError: break except: print_exception() break obsTime = DateTime.DateTime(nexTime.year, nexTime.month, nexTime.day, nexTime.hour) nexTime = obsTime + DateTime.RelativeDate(months=+1) self.stn.release_tmp() if self.tmpVar: self.tmpVar.release_tmp() self.stn.release_rh() if self.rhVar: self.rhVar.release_rh() self.stn.release_prcp() if self.prcpVar: self.prcpVar.release_prcp() self.stn.release_lwet() if self.lwetVar: self.lwetVar.release_lwet()
def _get_prod_stock_after(self, cr, uid, ids, name, arg, context={}): res = {} prod_obj = self.pool.get('product.product') loc_ids = 11 for line in self.browse(cr, uid, ids, context=context): startf = datetime.datetime.fromtimestamp( time.mktime(time.strptime(line.name, "%Y-%m-%d:%H:%M:%S"))) start = DateTime(int(startf.year), 1, 1) end = startf + datetime.timedelta(seconds=1) d1 = start.strftime('%Y-%m-%d %H:%M:%S') d2 = end.strftime('%Y-%m-%d %H:%M:%S') c = context.copy() c.update({'location': loc_ids, 'from_date': d1, 'to_date': d2}) res.setdefault(line.id, 0.0) if line.product_id and line.product_id.id: prd = prod_obj.browse(cr, uid, line.product_id.id, context=c) res[line.id] = prd.qty_available return res
def _get_qty(self, item): product = item.product_id.id start = DateTime.strptime(item.price_version_id.date_start, '%Y-%m-%d') end = DateTime.strptime(item.price_version_id.date_end, '%Y-%m-%d') qty = self.pool.get('sale.report').search( self.cr, self.uid, [('product_id', '=', product), ('date', '>=', start.date), ('date', '<=', end.date)]) if qty == []: return {'quantity': 0, 'value': 0} else: quantity = 0 value = 0 datas = self.pool.get('sale.report').read( self.cr, self.uid, qty, ['product_uom_qty', 'price_total']) for data in datas: quantity += data['product_uom_qty'] value += data['price_total'] * data['product_uom_qty'] return {'quantity': quantity, 'value': value}
def _get_records(self, cr, uid, data, context={}): inv_obj = pooler.get_pool(cr.dbname).get('account.voucher') ids = [] states = ['posted'] title = _("Journal de Caisse - ") if data['form']['period'] == 'm': if int(data['form']['month']) < 10: title += "0" title += data['form']['month'] + "/" + data['form']['year'] day_min = datetime.date(int(data['form']['year']), int(data['form']['month']), 1) nextmonth = int(data['form']['month']) + 1 year = int(data['form']['year']) if nextmonth == 13: nextmonth = 1 year += 1 day_max = datetime.date(year, nextmonth, 1) - datetime.timedelta(days=1) elif data['form']['period'] == 's': day_min = DateTime.strptime(data['form']['date_from'], '%Y-%m-%d') day_max = DateTime.strptime(data['form']['date_to'], '%Y-%m-%d') title += _("De ") + day_min.strftime('%d/%m/%Y') + _(" A ") + day_max.strftime('%d/%m/%Y') else: day_min = datetime.date(int(data['form']['year']), 1, 1) day_max = datetime.date(int(data['form']['year']), 12, 31) title += data['form']['year'] inv_ids = inv_obj.search(cr,uid,[ ('date','<=',day_max.strftime('%Y-%m-%d')), ('state','in',states), ]) ids = [] # print inv_ids if inv_ids: lines=inv_obj.browse(cr,uid,inv_ids) for x in lines: for inv_line in x.invoice_line: # print inv_line.product_id.name # put other tests here if you want to filter lines ids.append(inv_line.id) title+=_(" ") print title return {'ids' : ids , 'title' : title, 'period' : data['form']['period'], 'detail' : not data['form']['hideproducts'], 'year':data['form']['year']}
def fetch_all_relevant_accounts(qua_type, since, ignore_affs, system_accounts): """Fetch all accounts that matches the criterias for deactivation. :param QuarantineCode qua_type: The quarantine that the accounts must have to be targeted. :param int since: The number of days a quarantine must have been active for the account to be targeted. :type ignore_affs: set, list or tuple :param ignore_affs: A given list of `PersonAffiliationCode`. If given, we will ignore them, and process the persons' accounts as if they didn't have an affiliation, and could therefore be targeted for deactivation. :param bool system_accounts: If True, accounts owned by groups are also included in the resulting target list. :rtype: set :returns: The `entity_id` for all the accounts that match the criterias. """ max_date = dt.now() - since logger.debug("Search quarantines older than %s days, i.e. before %s", since, max_date.strftime('%Y-%m-%d')) targets = set(row['entity_id'] for row in account.list_entity_quarantines( entity_types=constants.entity_account, quarantine_types=qua_type, only_active=True) if row['start_date'] <= max_date) logger.debug("Found %d quarantine targets", len(targets)) if len(targets) == 0: return targets # Ignore those with person affiliations: persons = set(r['person_id'] for r in person.list_affiliations(include_deleted=False) if r['affiliation'] not in ignore_affs) logger.debug2("Found %d persons with affiliations", len(persons)) accounts_to_ignore = set( int(r['account_id']) for r in account.search(owner_type=constants.entity_person) if r['owner_id'] in persons) targets.difference_update(accounts_to_ignore) logger.debug2("Removed targets with person-affs (%d). Result: %d", len(accounts_to_ignore), len(targets)) # Ignore accounts owned by groups: if not system_accounts: targets.difference_update( r['account_id'] for r in account.search(owner_type=constants.entity_group)) logger.debug2("Removed system accounts. Result: %d", len(targets)) return targets