def update_from_db(self, cr): if not len(self): return # update the graph with values from the database (if exist) ## First, we set the default values for each package in graph additional_data = { key: { 'id': 0, 'state': 'uninstalled', 'dbdemo': False, 'installed_version': None } for key in pycompat.keys(self) } ## Then we get the values from the database cr.execute( 'SELECT name, id, state, demo AS dbdemo, latest_version AS installed_version' ' FROM ir_module_module' ' WHERE name IN %s', (tuple(additional_data), )) ## and we update the default values with values from the database additional_data.update((x['name'], x) for x in cr.dictfetchall()) for package in pycompat.values(self): for k, v in pycompat.items(additional_data[package.name]): setattr(package, k, v)
def signParams(parms): signing_string = ':'.join( escapeVal(v) for v in chain(pycompat.keys(parms), pycompat.values(parms)) ) hm = hmac.new(hmac_key, signing_string, hashlib.sha256) return base64.b64encode(hm.digest())
def __iter__(self): level = 0 done = set(pycompat.keys(self)) while done: level_modules = sorted((name, module) for name, module in pycompat.items(self) if module.depth==level) for name, module in level_modules: done.remove(name) yield module level += 1
def __iter__(self): level = 0 done = set(pycompat.keys(self)) while done: level_modules = sorted((name, module) for name, module in pycompat.items(self) if module.depth == level) for name, module in level_modules: done.remove(name) yield module level += 1
def sleep(self): try: # map of fd -> worker fds = {w.watchdog_pipe[0]: w for k, w in pycompat.items(self.workers)} fd_in = list(pycompat.keys(fds)) + [self.pipe[0]] # check for ping or internal wakeups ready = select.select(fd_in, [], [], self.beat) # update worker watchdogs for fd in ready[0]: if fd in fds: fds[fd].watchdog_time = time.time() try: # empty pipe while os.read(fd, 1): pass except OSError as e: if e.errno not in [errno.EAGAIN]: raise except select.error as e: if e.args[0] not in [errno.EINTR]: raise
def process(self, starting_node): """Process the graph to find ranks and order of the nodes @param starting_node node from where to start the graph search """ self.start_nodes = starting_node or [] self.partial_order = {} self.links = [] self.tree_list = {} if self.nodes: if self.start_nodes: #add dummy edges to the nodes which does not have any incoming edges tree = self.make_acyclic(None, self.start_nodes[0], 0, []) for node in self.no_ancester: for sec_node in self.transitions.get(node, []): if sec_node in pycompat.keys(self.partial_order): self.transitions[self.start_nodes[0]].append(node) break self.partial_order = {} tree = self.make_acyclic(None, self.start_nodes[0], 0, []) # if graph is disconnected or no start-node is given #than to find starting_node for each component of the node if len(self.nodes) > len(self.partial_order): self.find_starts() self.max_order = 0 #for each component of the graph find ranks and order of the nodes for s in self.start_nodes: self.start = s self.rank() # First step:Netwoek simplex algorithm self.order_in_rank() #Second step: ordering nodes within ranks
def update_events(self, lastSync=False): """ Synchronze events with google calendar : fetching, creating, updating, deleting, ... """ CalendarEvent = self.env['calendar.event'] CalendarAttendee = self.env['calendar.attendee'] my_partner_id = self.env.user.partner_id.id context_novirtual = self.get_context_no_virtual() if lastSync: try: all_event_from_google = self.get_event_synchro_dict( lastSync=lastSync) except urllib2.HTTPError as e: if e.code == 410: # GONE, Google is lost. # we need to force the rollback from this cursor, because it locks my res_users but I need to write in this tuple before to raise. self.env.cr.rollback() self.env.user.write( {'google_calendar_last_sync_date': False}) self.env.cr.commit() error_key = json.loads(str(e)) error_key = error_key.get('error', {}).get('message', 'nc') error_msg = _( "Google is lost... the next synchro will be a full synchro. \n\n %s" ) % error_key raise self.env['res.config.settings'].get_config_warning( error_msg) my_google_attendees = CalendarAttendee.with_context( context_novirtual).search([ ('partner_id', '=', my_partner_id), ('google_internal_event_id', 'in', pycompat.keys(all_event_from_google)) ]) my_google_att_ids = my_google_attendees.ids my_odoo_attendees = CalendarAttendee.with_context( context_novirtual).search([ ('partner_id', '=', my_partner_id), ('event_id.oe_update_date', '>', lastSync and fields.Datetime.to_string(lastSync) or self.get_minTime().fields.Datetime.to_string()), ('google_internal_event_id', '!=', False), ]) my_odoo_googleinternal_records = my_odoo_attendees.read( ['google_internal_event_id', 'event_id']) if self.get_print_log(): _logger.info( "Calendar Synchro - \n\nUPDATE IN GOOGLE\n%s\n\nRETRIEVE FROM OE\n%s\n\nUPDATE IN OE\n%s\n\nRETRIEVE FROM GG\n%s\n\n", all_event_from_google, my_google_att_ids, my_odoo_attendees.ids, my_odoo_googleinternal_records) for gi_record in my_odoo_googleinternal_records: active = True # if not sure, we request google if gi_record.get('event_id'): active = CalendarEvent.with_context( context_novirtual).browse( int(gi_record.get('event_id')[0])).active if gi_record.get( 'google_internal_event_id' ) and not all_event_from_google.get( gi_record.get('google_internal_event_id')) and active: one_event = self.get_one_event_synchro( gi_record.get('google_internal_event_id')) if one_event: all_event_from_google[one_event['id']] = one_event my_attendees = (my_google_attendees | my_odoo_attendees) else: domain = [ ('partner_id', '=', my_partner_id), ('google_internal_event_id', '!=', False), '|', ('event_id.stop', '>', fields.Datetime.to_string(self.get_minTime())), ('event_id.final_date', '>', fields.Datetime.to_string(self.get_minTime())), ] # Select all events from Odoo which have been already synchronized in gmail my_attendees = CalendarAttendee.with_context( context_novirtual).search(domain) all_event_from_google = self.get_event_synchro_dict(lastSync=False) event_to_synchronize = {} for att in my_attendees: event = att.event_id base_event_id = att.google_internal_event_id.rsplit('_', 1)[0] if base_event_id not in event_to_synchronize: event_to_synchronize[base_event_id] = {} if att.google_internal_event_id not in event_to_synchronize[ base_event_id]: event_to_synchronize[base_event_id][ att.google_internal_event_id] = SyncEvent() ev_to_sync = event_to_synchronize[base_event_id][ att.google_internal_event_id] ev_to_sync.OE.attendee_id = att.id ev_to_sync.OE.event = event ev_to_sync.OE.found = True ev_to_sync.OE.event_id = event.id ev_to_sync.OE.isRecurrence = event.recurrency ev_to_sync.OE.isInstance = bool(event.recurrent_id and event.recurrent_id > 0) ev_to_sync.OE.update = event.oe_update_date ev_to_sync.OE.status = event.active ev_to_sync.OE.synchro = att.oe_synchro_date for event in pycompat.values(all_event_from_google): event_id = event.get('id') base_event_id = event_id.rsplit('_', 1)[0] if base_event_id not in event_to_synchronize: event_to_synchronize[base_event_id] = {} if event_id not in event_to_synchronize[base_event_id]: event_to_synchronize[base_event_id][event_id] = SyncEvent() ev_to_sync = event_to_synchronize[base_event_id][event_id] ev_to_sync.GG.event = event ev_to_sync.GG.found = True ev_to_sync.GG.isRecurrence = bool(event.get('recurrence', '')) ev_to_sync.GG.isInstance = bool(event.get('recurringEventId', 0)) ev_to_sync.GG.update = event.get( 'updated', None) # if deleted, no date without browse event if ev_to_sync.GG.update: ev_to_sync.GG.update = ev_to_sync.GG.update.replace( 'T', ' ').replace('Z', '') ev_to_sync.GG.status = (event.get('status') != 'cancelled') ###################### # PRE-PROCESSING # ###################### for base_event in event_to_synchronize: for current_event in event_to_synchronize[base_event]: event_to_synchronize[base_event][current_event].compute_OP( modeFull=not lastSync) if self.get_print_log(): if not isinstance( event_to_synchronize[base_event][current_event].OP, NothingToDo): _logger.info(event_to_synchronize[base_event]) ###################### # DO ACTION # ###################### for base_event in event_to_synchronize: event_to_synchronize[base_event] = sorted( pycompat.items(event_to_synchronize[base_event]), key=operator.itemgetter(0)) for current_event in event_to_synchronize[base_event]: self.env.cr.commit() event = current_event[1] # event is an Sync Event ! actToDo = event.OP actSrc = event.OP.src # To avoid redefining 'self', all method below should use 'recs' instead of 'self' recs = self.with_context(curr_attendee=event.OE.attendee_id) if isinstance(actToDo, NothingToDo): continue elif isinstance(actToDo, Create): if actSrc == 'GG': self.create_from_google(event, my_partner_id) elif actSrc == 'OE': raise AssertionError( "Should be never here, creation for OE is done before update !" ) #TODO Add to batch elif isinstance(actToDo, Update): if actSrc == 'GG': recs.update_from_google(event.OE.event, event.GG.event, 'write') elif actSrc == 'OE': recs.update_to_google(event.OE.event, event.GG.event) elif isinstance(actToDo, Exclude): if actSrc == 'OE': recs.delete_an_event(current_event[0]) elif actSrc == 'GG': new_google_event_id = event.GG.event['id'].rsplit( '_', 1)[1] if 'T' in new_google_event_id: new_google_event_id = new_google_event_id.replace( 'T', '')[:-1] else: new_google_event_id = new_google_event_id + "000000" if event.GG.status: parent_event = {} if not event_to_synchronize[base_event][0][ 1].OE.event_id: main_ev = CalendarAttendee.with_context( context_novirtual).search( [('google_internal_event_id', '=', event.GG.event['id'].rsplit( '_', 1)[0])], limit=1) event_to_synchronize[base_event][0][ 1].OE.event_id = main_ev.event_id.id if event_to_synchronize[base_event][0][ 1].OE.event_id: parent_event['id'] = "%s-%s" % ( event_to_synchronize[base_event][0] [1].OE.event_id, new_google_event_id) res = recs.update_from_google( parent_event, event.GG.event, "copy") else: recs.create_from_google(event, my_partner_id) else: parent_oe_id = event_to_synchronize[base_event][0][ 1].OE.event_id if parent_oe_id: CalendarEvent.browse( "%s-%s" % (parent_oe_id, new_google_event_id) ).with_context( curr_attendee=event.OE.attendee_id).unlink( can_be_deleted=True) elif isinstance(actToDo, Delete): if actSrc == 'GG': try: # if already deleted from gmail or never created recs.delete_an_event(current_event[0]) except Exception as e: if e.code in ( 401, 410, ): pass else: raise e elif actSrc == 'OE': CalendarEvent.browse( event.OE.event_id).unlink(can_be_deleted=False) return True
def keys(self): return list(pycompat.keys(self.d))
def wrapper(___dbname, *args, **kwargs): """ Wraps around OSV functions and normalises a few exceptions """ dbname = ___dbname # NOTE: this forbid to use "___dbname" as arguments in http routes def tr(src, ttype): # We try to do the same as the _(), but without the frame # inspection, since we aready are wrapping an osv function # trans_obj = self.get('ir.translation') cannot work yet :( ctx = {} if not kwargs: if args and isinstance(args[-1], dict): ctx = args[-1] elif isinstance(kwargs, dict): if 'context' in kwargs: ctx = kwargs['context'] elif 'kwargs' in kwargs: # http entry points such as call_kw() ctx = kwargs['kwargs'].get('context') uid = 1 if args and isinstance(args[0], pycompat.integer_types): uid = args[0] lang = ctx and ctx.get('lang') if not (lang or hasattr(src, '__call__')): return src # We open a *new* cursor here, one reason is that failed SQL # queries (as in IntegrityError) will invalidate the current one. cr = False if hasattr(src, '__call__'): # callable. We need to find the right parameters to call # the orm._sql_message(self, cr, uid, ids, context) function, # or we skip.. # our signature is f(registry, dbname [,uid, obj, method, args]) try: if args and len(args) > 1: # TODO self doesn't exist, but was already wrong before (it was not a registry but just the object_service. obj = self.get(args[1]) if len(args) > 3 and isinstance(args[3], (pycompat.integer_types, list)): ids = args[3] else: ids = [] cr = odoo.sql_db.db_connect(dbname).cursor() return src(obj, cr, uid, ids, context=(ctx or {})) except Exception: pass finally: if cr: cr.close() return False # so that the original SQL error will # be returned, it is the best we have. try: cr = odoo.sql_db.db_connect(dbname).cursor() res = translate(cr, name=False, source_type=ttype, lang=lang, source=src) if res: return res else: return src finally: if cr: cr.close() def _(src): return tr(src, 'code') tries = 0 while True: try: if odoo.registry(dbname)._init and not odoo.tools.config['test_enable']: raise odoo.exceptions.Warning('Currently, this database is not fully loaded and can not be used.') return f(dbname, *args, **kwargs) except (OperationalError, QWebException) as e: if isinstance(e, QWebException): cause = e.qweb.get('cause') if isinstance(cause, OperationalError): e = cause else: raise # Automatically retry the typical transaction serialization errors if e.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY: raise if tries >= MAX_TRIES_ON_CONCURRENCY_FAILURE: _logger.info("%s, maximum number of tries reached" % errorcodes.lookup(e.pgcode)) raise wait_time = random.uniform(0.0, 2 ** tries) tries += 1 _logger.info("%s, retry %d/%d in %.04f sec..." % (errorcodes.lookup(e.pgcode), tries, MAX_TRIES_ON_CONCURRENCY_FAILURE, wait_time)) time.sleep(wait_time) except IntegrityError as inst: registry = odoo.registry(dbname) for key in pycompat.keys(registry._sql_error): if key in inst[0]: raise ValidationError(tr(registry._sql_error[key], 'sql_constraint') or inst[0]) if inst.pgcode in (errorcodes.NOT_NULL_VIOLATION, errorcodes.FOREIGN_KEY_VIOLATION, errorcodes.RESTRICT_VIOLATION): msg = _('The operation cannot be completed, probably due to the following:\n- deletion: you may be trying to delete a record while other records still reference it\n- creation/update: a mandatory field is not correctly set') _logger.debug("IntegrityError", exc_info=True) try: errortxt = inst.pgerror.replace('«','"').replace('»','"') if '"public".' in errortxt: context = errortxt.split('"public".')[1] model_name = table = context.split('"')[1] else: last_quote_end = errortxt.rfind('"') last_quote_begin = errortxt.rfind('"', 0, last_quote_end) model_name = table = errortxt[last_quote_begin+1:last_quote_end].strip() model = table.replace("_",".") if model in registry: model_class = registry[model] model_name = model_class._description or model_class._name msg += _('\n\n[object with reference: %s - %s]') % (model_name, model) except Exception: pass raise ValidationError(msg) else: raise ValidationError(inst[0])
def update_events(self, lastSync=False): """ Synchronze events with google calendar : fetching, creating, updating, deleting, ... """ CalendarEvent = self.env['calendar.event'] CalendarAttendee = self.env['calendar.attendee'] my_partner_id = self.env.user.partner_id.id context_novirtual = self.get_context_no_virtual() if lastSync: try: all_event_from_google = self.get_event_synchro_dict(lastSync=lastSync) except requests.HTTPError as e: if e.response.code == 410: # GONE, Google is lost. # we need to force the rollback from this cursor, because it locks my res_users but I need to write in this tuple before to raise. self.env.cr.rollback() self.env.user.write({'google_calendar_last_sync_date': False}) self.env.cr.commit() error_key = e.response.json() error_key = error_key.get('error', {}).get('message', 'nc') error_msg = _("Google is lost... the next synchro will be a full synchro. \n\n %s") % error_key raise self.env['res.config.settings'].get_config_warning(error_msg) my_google_attendees = CalendarAttendee.with_context(context_novirtual).search([ ('partner_id', '=', my_partner_id), ('google_internal_event_id', 'in', pycompat.keys(all_event_from_google)) ]) my_google_att_ids = my_google_attendees.ids my_odoo_attendees = CalendarAttendee.with_context(context_novirtual).search([ ('partner_id', '=', my_partner_id), ('event_id.oe_update_date', '>', lastSync and fields.Datetime.to_string(lastSync) or self.get_minTime().fields.Datetime.to_string()), ('google_internal_event_id', '!=', False), ]) my_odoo_googleinternal_records = my_odoo_attendees.read(['google_internal_event_id', 'event_id']) if self.get_print_log(): _logger.info("Calendar Synchro - \n\nUPDATE IN GOOGLE\n%s\n\nRETRIEVE FROM OE\n%s\n\nUPDATE IN OE\n%s\n\nRETRIEVE FROM GG\n%s\n\n", all_event_from_google, my_google_att_ids, my_odoo_attendees.ids, my_odoo_googleinternal_records) for gi_record in my_odoo_googleinternal_records: active = True # if not sure, we request google if gi_record.get('event_id'): active = CalendarEvent.with_context(context_novirtual).browse(int(gi_record.get('event_id')[0])).active if gi_record.get('google_internal_event_id') and not all_event_from_google.get(gi_record.get('google_internal_event_id')) and active: one_event = self.get_one_event_synchro(gi_record.get('google_internal_event_id')) if one_event: all_event_from_google[one_event['id']] = one_event my_attendees = (my_google_attendees | my_odoo_attendees) else: domain = [ ('partner_id', '=', my_partner_id), ('google_internal_event_id', '!=', False), '|', ('event_id.stop', '>', fields.Datetime.to_string(self.get_minTime())), ('event_id.final_date', '>', fields.Datetime.to_string(self.get_minTime())), ] # Select all events from Odoo which have been already synchronized in gmail my_attendees = CalendarAttendee.with_context(context_novirtual).search(domain) all_event_from_google = self.get_event_synchro_dict(lastSync=False) event_to_synchronize = {} for att in my_attendees: event = att.event_id base_event_id = att.google_internal_event_id.rsplit('_', 1)[0] if base_event_id not in event_to_synchronize: event_to_synchronize[base_event_id] = {} if att.google_internal_event_id not in event_to_synchronize[base_event_id]: event_to_synchronize[base_event_id][att.google_internal_event_id] = SyncEvent() ev_to_sync = event_to_synchronize[base_event_id][att.google_internal_event_id] ev_to_sync.OE.attendee_id = att.id ev_to_sync.OE.event = event ev_to_sync.OE.found = True ev_to_sync.OE.event_id = event.id ev_to_sync.OE.isRecurrence = event.recurrency ev_to_sync.OE.isInstance = bool(event.recurrent_id and event.recurrent_id > 0) ev_to_sync.OE.update = event.oe_update_date ev_to_sync.OE.status = event.active ev_to_sync.OE.synchro = att.oe_synchro_date for event in pycompat.values(all_event_from_google): event_id = event.get('id') base_event_id = event_id.rsplit('_', 1)[0] if base_event_id not in event_to_synchronize: event_to_synchronize[base_event_id] = {} if event_id not in event_to_synchronize[base_event_id]: event_to_synchronize[base_event_id][event_id] = SyncEvent() ev_to_sync = event_to_synchronize[base_event_id][event_id] ev_to_sync.GG.event = event ev_to_sync.GG.found = True ev_to_sync.GG.isRecurrence = bool(event.get('recurrence', '')) ev_to_sync.GG.isInstance = bool(event.get('recurringEventId', 0)) ev_to_sync.GG.update = event.get('updated', None) # if deleted, no date without browse event if ev_to_sync.GG.update: ev_to_sync.GG.update = ev_to_sync.GG.update.replace('T', ' ').replace('Z', '') ev_to_sync.GG.status = (event.get('status') != 'cancelled') ###################### # PRE-PROCESSING # ###################### for base_event in event_to_synchronize: for current_event in event_to_synchronize[base_event]: event_to_synchronize[base_event][current_event].compute_OP(modeFull=not lastSync) if self.get_print_log(): if not isinstance(event_to_synchronize[base_event][current_event].OP, NothingToDo): _logger.info(event_to_synchronize[base_event]) ###################### # DO ACTION # ###################### for base_event in event_to_synchronize: event_to_synchronize[base_event] = sorted(pycompat.items(event_to_synchronize[base_event]), key=operator.itemgetter(0)) for current_event in event_to_synchronize[base_event]: self.env.cr.commit() event = current_event[1] # event is an Sync Event ! actToDo = event.OP actSrc = event.OP.src # To avoid redefining 'self', all method below should use 'recs' instead of 'self' recs = self.with_context(curr_attendee=event.OE.attendee_id) if isinstance(actToDo, NothingToDo): continue elif isinstance(actToDo, Create): if actSrc == 'GG': self.create_from_google(event, my_partner_id) elif actSrc == 'OE': raise AssertionError("Should be never here, creation for OE is done before update !") #TODO Add to batch elif isinstance(actToDo, Update): if actSrc == 'GG': recs.update_from_google(event.OE.event, event.GG.event, 'write') elif actSrc == 'OE': recs.update_to_google(event.OE.event, event.GG.event) elif isinstance(actToDo, Exclude): if actSrc == 'OE': recs.delete_an_event(current_event[0]) elif actSrc == 'GG': new_google_event_id = event.GG.event['id'].rsplit('_', 1)[1] if 'T' in new_google_event_id: new_google_event_id = new_google_event_id.replace('T', '')[:-1] else: new_google_event_id = new_google_event_id + "000000" if event.GG.status: parent_event = {} if not event_to_synchronize[base_event][0][1].OE.event_id: main_ev = CalendarAttendee.with_context(context_novirtual).search([('google_internal_event_id', '=', event.GG.event['id'].rsplit('_', 1)[0])], limit=1) event_to_synchronize[base_event][0][1].OE.event_id = main_ev.event_id.id if event_to_synchronize[base_event][0][1].OE.event_id: parent_event['id'] = "%s-%s" % (event_to_synchronize[base_event][0][1].OE.event_id, new_google_event_id) res = recs.update_from_google(parent_event, event.GG.event, "copy") else: recs.create_from_google(event, my_partner_id) else: parent_oe_id = event_to_synchronize[base_event][0][1].OE.event_id if parent_oe_id: CalendarEvent.browse("%s-%s" % (parent_oe_id, new_google_event_id)).with_context(curr_attendee=event.OE.attendee_id).unlink(can_be_deleted=True) elif isinstance(actToDo, Delete): if actSrc == 'GG': try: # if already deleted from gmail or never created recs.delete_an_event(current_event[0]) except Exception as e: if e.code in (401, 410,): pass else: raise e elif actSrc == 'OE': CalendarEvent.browse(event.OE.event_id).unlink(can_be_deleted=False) return True
def delete_all(cls): """ Delete all the registries. """ with cls._lock: for db_name in list(pycompat.keys(cls.registries)): cls.delete(db_name)
def signParams(parms): signing_string = ':'.join( escapeVal(v) for v in chain(pycompat.keys(parms), pycompat.values(parms))) hm = hmac.new(hmac_key, signing_string, hashlib.sha256) return base64.b64encode(hm.digest())
def update_from_db(self, cr): if not len(self): return # update the graph with values from the database (if exist) ## First, we set the default values for each package in graph additional_data = {key: {'id': 0, 'state': 'uninstalled', 'dbdemo': False, 'installed_version': None} for key in pycompat.keys(self)} ## Then we get the values from the database cr.execute('SELECT name, id, state, demo AS dbdemo, latest_version AS installed_version' ' FROM ir_module_module' ' WHERE name IN %s',(tuple(additional_data),) ) ## and we update the default values with values from the database additional_data.update((x['name'], x) for x in cr.dictfetchall()) for package in pycompat.values(self): for k, v in pycompat.items(additional_data[package.name]): setattr(package, k, v)