Esempio n. 1
0
 def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False, timeout=None):
     if use_ssl:
         self.host = config.get_misc('httpsd', 'interface', False)
         self.port = config.get_misc('httpsd', 'port', 8071)
         if not self.host:
             self.host = config.get('xmlrpcs_interface')
             self.port = config.get('xmlrpcs_port')
     else:
         self.host = config.get_misc('httpd', 'interface')
         self.port = config.get_misc('httpd', 'port', 8069)
         if not self.host:
             self.host = config.get('xmlrpc_interface')
             self.port = config.get('xmlrpc_port') or self.port
     if self.host == '0.0.0.0' or not self.host:
         self.host = '127.0.0.1'
     self.port = int(self.port)
     if not config.get_misc('webdav','enable',True):
         raise Exception("WebDAV is disabled, cannot continue")
     self.davpath = '/' + config.get_misc('webdav','vdir','webdav')
     self.user = user
     self.passwd = passwd
     self.dbg = dbg
     self.timeout = timeout or 5.0 # seconds, tests need to respond pretty fast!
     self.hdrs = {}
     if useragent:
         self.set_useragent(useragent)
Esempio n. 2
0
def start_server():
    HOST = config.get('ftp_server_host', '127.0.0.1')
    PORT = int(config.get('ftp_server_port', '8021'))
    PASSIVE_PORTS = None
    pps = config.get('ftp_server_passive_ports', '').split(':')
    if len(pps) == 2:
        PASSIVE_PORTS = int(pps[0]), int(pps[1])

    class ftp_server(threading.Thread):
        def run(self):
            autho = authorizer.authorizer()
            ftpserver.FTPHandler.authorizer = autho
            ftpserver.max_cons = 300
            ftpserver.max_cons_per_ip = 50
            ftpserver.FTPHandler.abstracted_fs = abstracted_fs.abstracted_fs
            if PASSIVE_PORTS:
                ftpserver.FTPHandler.passive_ports = PASSIVE_PORTS

            ftpserver.log = lambda msg: _logger.info(msg)
            ftpserver.logline = lambda msg: None
            ftpserver.logerror = lambda msg: _logger.error(msg)

            ftpd = ftpserver.FTPServer((HOST, PORT), ftpserver.FTPHandler)
            ftpd.serve_forever()

    if HOST.lower() == 'none':
        _logger.info("\n Server FTP Not Started\n")
    else:
        _logger.info("\n Serving FTP on %s:%s\n" % (HOST, PORT))
        ds = ftp_server()
        ds.daemon = True
        ds.start()
Esempio n. 3
0
    def run(self):
        #paramiko.util.log_to_file('paramiko.log')
        # get host private key
        HOST = config.get('ftp_server_address', detect_ip_addr())
        PORT = int(config.get('ftp_server_port', '8022'))
        host_key = paramiko.RSAKey(filename=privateKey)
        # bind the socket
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        sock.bind((HOST, PORT))
        # listen for a connection
        sock.listen(50)
        # accept connections
        while True:
            client, addr = sock.accept()
            try:
                # set up server
                t = paramiko.Transport(client)
                t.load_server_moduli()
                t.add_server_key(host_key)

                # set up sftp handler
                t.set_subsystem_handler('sftp', SFTPServer.SFTPServer,
                                        SFTPServerInterface.SFTPServer)
                server = Server()
                event = threading.Event()
                # start ssh server session
                t.start_server(event, server)
            except Exception, e:
                try:
                    t.close()
                except:
                    pass
                raise
Esempio n. 4
0
    def run(self):        
        #paramiko.util.log_to_file('paramiko.log')
        # get host private key
        HOST = config.get('ftp_server_address', detect_ip_addr())
        PORT = int(config.get('ftp_server_port', '8022'))        
        host_key = paramiko.RSAKey(filename=privateKey)
        # bind the socket
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        sock.bind((HOST, PORT))
        # listen for a connection
        sock.listen(50)
        # accept connections
        while True:
            client, addr = sock.accept()
            try:
                # set up server
                t = paramiko.Transport(client)
                t.load_server_moduli()
                t.add_server_key(host_key)

                # set up sftp handler
                t.set_subsystem_handler('sftp', SFTPServer.SFTPServer, SFTPServerInterface.SFTPServer)
                server = Server()
                event = threading.Event()
                # start ssh server session
                t.start_server(event, server)
            except Exception, e:
                try:
                    t.close()
                except:
                    pass
                raise
Esempio n. 5
0
 def get_connection(self):
     self.client = Client(host=config.get('odoo_host',
                                          'todoo.somenergia.lan'),
                          dbname=config.get('odoo_dbname', 'odoo'),
                          user=config.get('odoo_user', 'admin'),
                          pwd=config.get('odoo_pwd', 'admin'),
                          port=config.get('odoo_port', 8080))
Esempio n. 6
0
    def setup(self, cursor, uid, username=None, password=None, company_id=None,
              cert_file=None, version=None, context=None):
        if not context:
            context = {}
        amon_setup_logging()
        self.company_id = config.get('empowering_company', company_id)
        self.cert_file = config.get('empowering_cert', cert_file)
        self.version = config.get('empowering_version', version)
        self.username = config.get('empowering_username', username)
        self.password = config.get('empowering_password', password)

        emp_conf = {}
        if self.company_id:
            emp_conf['company_id'] = self.company_id
        if self.cert_file:
            emp_conf['cert_file'] = self.cert_file
            emp_conf['key_file'] = self.cert_file
        if self.version:
            emp_conf['version'] = self.version
        if self.username and self.password:
            emp_conf['username'] = self.username
            emp_conf['password'] = self.password

        self.service = setup_empowering_api(**emp_conf)
        log("Setting Up Empowering Service (%s). Company-Id: %s Cert file: %s"
            % (self.service.apiroot, self.service.company_id,
               self.service.cert_file))
        for k, v in context.get('empowering_args', {}).items():
            log("%s => %s" % (k, v))
            setattr(self.service, k, v)
Esempio n. 7
0
def start_server():
    HOST = config.get('ftp_server_host', '127.0.0.1')
    PORT = int(config.get('ftp_server_port', '8021'))
    PASSIVE_PORTS = None
    pps = config.get('ftp_server_passive_ports', '').split(':')
    if len(pps) == 2:
        PASSIVE_PORTS = int(pps[0]), int(pps[1])

    class ftp_server(threading.Thread):
        
        def run(self):
            autho = authorizer.authorizer()
            ftpserver.FTPHandler.authorizer = autho
            ftpserver.max_cons = 300
            ftpserver.max_cons_per_ip = 50
            ftpserver.FTPHandler.abstracted_fs = abstracted_fs.abstracted_fs
            if PASSIVE_PORTS:
                ftpserver.FTPHandler.passive_ports = PASSIVE_PORTS

            ftpserver.log = lambda msg: _logger.info(msg)
            ftpserver.logline = lambda msg: None
            ftpserver.logerror = lambda msg: _logger.error(msg)

            ftpd = ftpserver.FTPServer((HOST, PORT), ftpserver.FTPHandler)
            ftpd.serve_forever()

    if HOST.lower() == 'none':
        _logger.info("\n Server FTP Not Started\n")
    else:
        _logger.info("\n Serving FTP on %s:%s\n" % (HOST, PORT))
        ds = ftp_server()
        ds.daemon = True
        ds.start()
Esempio n. 8
0
class document_ftp_configuration(osv.osv_memory):

    _name = 'document.ftp.configuration'
    _description = 'Auto Directory Configuration'
    _inherit = 'res.config'
    _rec_name = 'host'
    _columns = {
        'host':
        fields.char(
            'Address',
            size=64,
            help=
            "Server address or IP and port to which users should connect to for DMS access",
            required=True),
    }

    _defaults = {
        'host':
        config.get('ftp_server_host', 'localhost') + ':' +
        config.get('ftp_server_port', '8021'),
    }

    def execute(self, cr, uid, ids, context=None):
        conf = self.browse(cr, uid, ids[0], context=context)
        data_pool = self.pool.get('ir.model.data')
        # Update the action for FTP browse.
        aid = data_pool._get_id(cr, uid, 'document_ftp',
                                'action_document_browse')
        aid = data_pool.browse(cr, uid, aid, context=context).res_id
        self.pool.get('ir.actions.url').write(
            cr, uid, [aid], {
                'url':
                'ftp://' +
                (conf.host or 'localhost:8021') + '/' + cr.dbname + '/'
            })
Esempio n. 9
0
 def __init__(self,
              user=None,
              passwd=None,
              dbg=0,
              use_ssl=False,
              useragent=False,
              timeout=None):
     if use_ssl:
         self.host = config.get_misc('httpsd', 'interface', False)
         self.port = config.get_misc('httpsd', 'port', 8071)
         if not self.host:
             self.host = config.get('xmlrpcs_interface')
             self.port = config.get('xmlrpcs_port')
     else:
         self.host = config.get_misc('httpd', 'interface')
         self.port = config.get_misc('httpd', 'port', 8069)
         if not self.host:
             self.host = config.get('xmlrpc_interface')
             self.port = config.get('xmlrpc_port') or self.port
     if self.host == '0.0.0.0' or not self.host:
         self.host = '127.0.0.1'
     self.port = int(self.port)
     if not config.get_misc('webdav', 'enable', True):
         raise Exception("WebDAV is disabled, cannot continue")
     self.davpath = '/' + config.get_misc('webdav', 'vdir', 'webdav')
     self.user = user
     self.passwd = passwd
     self.dbg = dbg
     self.timeout = timeout or 5.0  # seconds, tests need to respond pretty fast!
     self.hdrs = {}
     if useragent:
         self.set_useragent(useragent)
Esempio n. 10
0
    def default_get(self, cr, uid, fields, context=None):
        pref_obj = self.pool.get('user.preference')
        pref_ids = pref_obj.browse(cr, uid ,context.get('rec_id',False), context=context)
        res = {}
        host = context.get('host')
        port = ''
        prefix = 'http://'
        if not config.get('xmlrpc'):
            if not config.get('netrpc'):
                prefix = 'https://'
                port = config.get('xmlrpcs_port', 8071)
            else:
                port = config.get('netrpc_port',8070)
        else:
            port = config.get('xmlrpc_port',8069)
        if not config.get_misc('webdav','enable',True):
            raise Exception("WebDAV is disabled, cannot continue")
        user_pool = self.pool.get('res.users')
        current_user = user_pool.browse(cr, uid, uid, context=context)
        #TODO write documentation
        res['description'] = self.__doc['other']
        if pref_ids:
            pref_ids = pref_ids[0]
            if pref_ids.device == 'iphone':
                url = host + ':' + str(port) + '/'+ pref_ids.service + '/' + cr.dbname + '/'+'calendars/'
            else :
                url = host + ':' + str(port) + '/'+ pref_ids.service + '/' + cr.dbname + '/'+'calendars/'+ 'users/'+ current_user.login + '/'+ pref_ids.collection.name+ '/'+ pref_ids.calendar.name

            res['description'] = self.__doc.get(pref_ids.device , self.__doc['other'])
        file = open(addons.get_module_resource('caldav','doc', 'caldav_doc.pdf'),'rb')
        res['caldav_doc_file'] = base64.encodestring(file.read())

        #res['doc_link'] = 'http://doc.openerp.com/'
        res['url'] = prefix+url
        return res
def _check_security_key(security_key):
    # Database parameter is absolutely required to prevent cold start errors.
    # For details, see ticket  #2: https: //github.com/Smile-SA/smile_openerp_addons_6.0/issues/2
    if not config.get('db_name'):
        raise osv.except_osv('smile_sso error !', "'db_name' is required in server configuration file")
    # Validate shared secret
    # TODO: improve it and provides a ssl certification check
    secret_match = int(security_key) == int(config.get('smile_sso.shared_secret_pin'))
    if not secret_match:
        logger = netsvc.Logger()
        logger.notifyChannel('smile_sso', netsvc.LOG_ERROR, "Server and web client don't share the same secret PIN number")
    return secret_match
Esempio n. 12
0
def wamp_start(*a):
    # Sanity check, ensure we have something to connect to
    wamp_url = unicode(config.get('wamp_url', False))
    wamp_realm = unicode(config.get('wamp_realm', False))
    if not (wamp_url and wamp_realm):
        _logger.warn(u"Not starting WAMP services as no configuration found.")
        return
    client = ZERPSession(
        url=wamp_url,
        realm=wamp_realm,
        username=unicode(config.get('wamp_login', '')),
        password=unicode(config.get('wamp_password', '')),
        timeout=int(config.get('wamp_timeout', 10)))
    client.start()
def get_ssh_connection():
    beedata_user = config.get("beedata_ssh_user", "")
    beedata_password = config.get("beedata_ssh_password", "")
    beedata_host = config.get("beedata_ssh_host", "")
    beedata_port = config.get("beedata_ssh_port", "22")

    ssh = SSHClient()
    ssh.load_system_host_keys()
    ssh.set_missing_host_key_policy(AutoAddPolicy)

    ssh.connect(hostname=beedata_host,
                port=int(beedata_port),
                username=beedata_user,
                password=beedata_password)
    return ssh
Esempio n. 14
0
    def _process_end(self, cr, uid, modules):
        if not modules:
            return True
        cr.execute('select id,name,model,res_id,module from ir_model_data where module in %s and noupdate=%s', (tuple(modules), False))
        wkf_todo = []
        for (id, name, model, res_id,module) in cr.fetchall():
            if (module,name) not in self.loads:
                self.unlink_mark[(model,res_id)] = id
                if model=='workflow.activity':
                    cr.execute('select res_type,res_id from wkf_instance where id in (select inst_id from wkf_workitem where act_id=%s)', (res_id,))
                    wkf_todo.extend(cr.fetchall())
                    cr.execute("update wkf_transition set condition='True', role_id=NULL, signal=NULL,act_to=act_from,act_from=%s where act_to=%s", (res_id,res_id))
                    cr.execute("delete from wkf_transition where act_to=%s", (res_id,))

        for model,id in wkf_todo:
            wf_service = netsvc.LocalService("workflow")
            wf_service.trg_write(uid, model, id, cr)

        cr.commit()
        if not config.get('import_partial', False):
            logger = netsvc.Logger()
            for (model, res_id) in self.unlink_mark.keys():
                if self.pool.get(model):
                    logger.notifyChannel('init', netsvc.LOG_INFO, 'Deleting %s@%s' % (res_id, model))
                    try:
                        self.pool.get(model).unlink(cr, uid, [res_id])
                        id = self.unlink_mark[(model, res_id)]
                        if id:
                            self.unlink(cr, uid, [id])
                            cr.execute('DELETE FROM ir_values WHERE value=%s', ('%s,%s' % (model, res_id),))
                        cr.commit()
                    except Exception, e:
                        cr.rollback()
                        logger.notifyChannel('init', netsvc.LOG_ERROR, e)
                        logger.notifyChannel('init', netsvc.LOG_ERROR, 'Could not delete id: %d of model %s\nThere should be some relation that points to this resource\nYou should manually fix this and restart --update=module' % (res_id, model))
Esempio n. 15
0
    def _process_end(self, cr, uid, modules):
        if not modules:
            return True
        modules = list(modules)
        cr.execute('SELECT id, name, model, res_id, module '
                    'FROM ir_model_data '
                    'WHERE module = ANY(%s) AND noupdate=%s',
                    (modules, False), debug=self._debug)
        wkf_todo = []
        for (id, name, model, res_id,module) in cr.fetchall():
            if (module,name) not in self.loads:
                self.unlink_mark[(model,res_id)] = id
                if model=='workflow.activity':
                    cr.execute('SELECT res_type, res_id FROM wkf_instance '
                                'WHERE id IN (SELECT inst_id FROM wkf_workitem WHERE act_id=%s)',
                                (res_id,), debug=self._debug)
                    wkf_todo.extend(cr.fetchall())
                    cr.execute("UPDATE wkf_transition "
                            "SET condition='True', group_id=NULL, signal=NULL, "
                            "    act_to=act_from, act_from=%s "
                            "WHERE act_to=%s", (res_id,res_id), debug=self._debug)
                    cr.execute("DELETE FROM wkf_transition WHERE act_to=%s", (res_id,), debug=self._debug)

        for model,id in wkf_todo:
            wf_service = netsvc.LocalService("workflow")
            wf_service.trg_write(uid, model, id, cr)

        cr.commit()
        if not config.get('import_partial'):
            for (model, res_id) in self.unlink_mark.keys():
                if self.pool.get(model):
                    self.__logger.info('Deleting %s@%s', res_id, model)
                    try:
                        self.pool.get(model).unlink(cr, uid, [res_id])
                        if id:
                            ids = self.search(cr, uid, [('res_id','=',res_id),
                                                        ('model','=',model)])
                            self.__logger.debug('=> Deleting %s: %s',
                                                self._name, ids)
                            if len(ids) > 1 and \
                               self.__logger.isEnabledFor(logging.WARNING):
                                self.__logger.warn(
                                    'Got %d %s for (%s, %d): %s',
                                    len(ids), self._name, model, res_id,
                                    map(itemgetter('module','name'),
                                        self.read(cr, uid, ids,
                                                  ['name', 'module'])))
                            self.unlink(cr, uid, ids)
                            cr.execute(
                                'DELETE FROM ir_values WHERE value=%s',
                                ('%s,%s'%(model, res_id),))
                        cr.commit()
                    except Exception:
                        cr.rollback()
                        self.__logger.exception(
                            'Could not delete id: %d of model %s\nThere '
                            'should be some relation that points to this '
                            'resource\nYou should manually fix this and '
                            'restart with --update=module', res_id, model)
        return True
Esempio n. 16
0
    def get_csv(self, cursor, uid, ids, context=None):
        if isinstance(ids, (tuple, list)):
            ids = ids[0]

        lot_obj = self.pool.get('som.infoenergia.lot.enviament')
        lot = lot_obj.browse(cursor, uid, ids)

        try:
            ssh = get_ssh_connection()
            output_dir = config.get("infoenergia_report_download_dir",
                                    "/tmp/test_shera/reports")

            csv_path_file = context.get('path_csv', '')
            output_filepath = os.path.join(output_dir, 'Enviaments.csv')

            scp = SCPClient(ssh.get_transport())
            scp.get(csv_path_file, output_filepath)

            lot.create_enviaments_from_csv_file(output_filepath, context)
            self._attach_csv(cursor, uid, ids, output_filepath)

            self.add_info_line(cursor, uid, ids,
                               'CSV descarregat correctament')
        except Exception as e:
            message = 'ERROR ' + str(e)
            self.add_info_line(cursor, uid, ids, message)
Esempio n. 17
0
def set_cookie_and_redirect(req, redirect_url):
    redirect = werkzeug.utils.redirect(redirect_url, 303)
    redirect.autocorrect_location_header = False
    cookie_val = urllib2.quote(simplejson.dumps(req.session_id))

    redirect.set_cookie('instance0|session_id', cookie_val, domain=config.get('domain', None))
    return redirect
Esempio n. 18
0
 def wamp_connect(self, cr, uid, context=None):
     global wamp_client_connections
     url = unicode(config.get("wamp_url"))
     realm = unicode(config.get("wamp_realm"))
     if not (url and realm):
         raise Exception(
             "Error",
             "Error creating wamp client connection: No wamp_url or wamp_realm configured."
         )
     wamp = wamp_client_connections.setdefault((os.getpid(), uid),
                                               WAMPClientTicket())
     if not wamp.is_connected():
         if uid != 0:
             username = unicode(self.wamp_login(cr, uid))
             password = unicode(self.wamp_api_key(cr, uid))
         else:
             username = unicode(config.get("wamp_login"))
             password = unicode(config.get("wamp_password"))
         wamp.configure(username=username,
                        password=password,
                        url=url,
                        uri_base=unicode(
                            config.get("wamp_uri_base", "com.izaber.wamp")),
                        realm=unicode(config.get("wamp_realm", "izaber")),
                        timeout=config.get("wamp_timout", 10))
         wamp.start()
     return wamp
Esempio n. 19
0
 def get_email_body(self, cr, uid, rule_id, case, context=None):
     """
     Override CRM.Case get_email_body
     Gets the body from the template on the rule and renders it with the
     case values, updating the context language with the case language
     :param cr:      OpenERP Cursor
     :param uid:     OpenERP User ID
     :param rule_id: OpenERP action (Crm.Case.Rule) ID
     :param case:    OpenERP case (Crm.Case) browse record
     :param context: OpenERP Context
     :return:        The rendered body for the template referenced on the rule
     """
     if not context:
         context = {}
     if isinstance(rule_id, list):
         rule_id = rule_id[0]
     if isinstance(case, list):
         case = case[0]
     action_body = super(CrmCaseRule, self).get_email_body(
         cr, uid, rule_id, case, context)
     action_template = self.read(
         cr, uid, rule_id, ['pm_template_id'])['pm_template_id']
     if not action_template:
         return action_body
     else:
         action_template = action_template[0]
     pm_template_obj = self.pool.get('poweremail.templates')
     pm_template = pm_template_obj.browse(cr, uid, action_template)
     pm_send_wizard_obj = self.pool.get('poweremail.send.wizard')
     ctx = context.copy()
     # Get lang from template
     lang = pm_send_wizard_obj.get_value(
         cr, uid, pm_template, pm_template.lang, context, id=case.id)
     if not lang:
         # Get lang from case.partner_id (source)
         if case.partner_id and case.partner_id.lang:
             lang = case.partner_id.lang
         # Get lang from case.user_id (responsible)
         elif case.user_id and case.user_id.context_lang:
             lang = case.user_id.context_lang
         # Get lang from Context (Server-based)
         elif ctx.get('lang', False):
             lang = ctx.get('lang')
         # Get lang from config file
         else:
             lang = config.get('language', False) or False
     if lang:
         ctx['lang'] = lang
     template_body = pm_template_obj.read(
         cr, uid, action_template, ['def_body_text'], ctx)['def_body_text']
     body = template_body or action_body
     body_mako_tpl = Template(body, input_encoding='utf-8')
     rendered_body = body_mako_tpl.render(
         object=case,
         date_now=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
     )
     return rendered_body
Esempio n. 20
0
    def _process_end(self, cr, uid, modules):
        """ Clear records removed from updated module data.

        This method is called at the end of the module loading process.
        It is meant to removed records that are no longer present in the
        updated data. Such records are recognised as the one with an xml id
        and a module in ir_model_data and noupdate set to false, but not
        present in self.loads.

        """
        if not modules:
            return True
        modules = list(modules)
        module_in = ",".join(["%s"] * len(modules))
        cr.execute(
            "select id,name,model,res_id,module from ir_model_data where module IN (" + module_in + ") and noupdate=%s",
            modules + [False],
        )
        wkf_todo = []
        to_unlink = []
        for (id, name, model, res_id, module) in cr.fetchall():
            if (module, name) not in self.loads:
                to_unlink.append((model, res_id))
                if model == "workflow.activity":
                    cr.execute(
                        "select res_type,res_id from wkf_instance where id IN (select inst_id from wkf_workitem where act_id=%s)",
                        (res_id,),
                    )
                    wkf_todo.extend(cr.fetchall())
                    cr.execute(
                        "update wkf_transition set condition='True', group_id=NULL, signal=NULL,act_to=act_from,act_from=%s where act_to=%s",
                        (res_id, res_id),
                    )
                    cr.execute("delete from wkf_transition where act_to=%s", (res_id,))

        for model, id in wkf_todo:
            wf_service = netsvc.LocalService("workflow")
            wf_service.trg_write(uid, model, id, cr)

        cr.commit()
        if not config.get("import_partial"):
            for (model, res_id) in to_unlink:
                if self.pool.get(model):
                    _logger.info("Deleting %s@%s", res_id, model)
                    try:
                        self.pool.get(model).unlink(cr, uid, [res_id])
                        cr.commit()
                    except Exception:
                        cr.rollback()
                        _logger.warning(
                            "Could not delete obsolete record with id: %d of model %s\n"
                            "There should be some relation that points to this resource\n"
                            "You should manually fix this and restart with --update=module",
                            res_id,
                            model,
                        )
        return True
Esempio n. 21
0
def set_cookie_and_redirect(req, redirect_url):
    redirect = werkzeug.utils.redirect(redirect_url, 303)
    redirect.autocorrect_location_header = False
    cookie_val = urllib2.quote(simplejson.dumps(req.session_id))

    redirect.set_cookie('instance0|session_id',
                        cookie_val,
                        domain=config.get('domain', None))
    return redirect
Esempio n. 22
0
 def __init__(self, pool, cursor):
     if not config.get('sentry_dsn', False):
         raise osv.except_osv(
             _(u'Error'), _(u'No sentry DSN configured in config file.'))
     processors = ('raven.processors.SanitizePasswordsProcessor',
                   'raven_sanitize_openerp.OpenerpPasswordsProcessor')
     self.client = Client(dsn=config['sentry_dsn'], processors=processors)
     handler = SentryHandler(self.client)
     setup_logging(handler)
     super(SentrySetup, self).__init__(pool, cursor)
Esempio n. 23
0
 def get_uri(child_port):
     interface = config.get('interface', 'localhost')
     if not interface:
         interface = 'localhost'
     if config['secure']:
         uri = 'https://{0}'.format(interface)
     else:
         uri = 'http://{0}'.format(interface)
     uri += ':%s' % child_port
     return uri
Esempio n. 24
0
def async_report_report(db, uid, passwd, object, ids, datas=None, context=None):
    from oorq.oorq import setup_redis_connection
    from oorq.tasks import report
    from rq import Queue
    from jinja2 import Template
    if not datas:
        datas = {}
    if not context:
        context={}
    security.check(db, uid, passwd)
    self = netsvc.SERVICES['report']

    self.id_protect.acquire()
    self.id += 1
    id = self.id
    self.id_protect.release()

    self._reports[id] = {
        'uid': uid,
        'result': False,
        'state': False,
        'exception': None
    }
    redis_conn = setup_redis_connection()
    q = Queue('report', default_timeout=86400,
              connection=redis_conn)
    # Pass OpenERP server config to the worker
    conf_attrs = dict(
        [(attr, value) for attr, value in config.options.items()]
    )
    job = q.enqueue(report, conf_attrs, db, uid, object, ids, datas, context)
    job.result_ttl = 86400
    job.save()
    # Check the configured timeout for the report. If the timeout is reached
    # then return a html report which redirects to the job info page.
    timeout = int(config.get('report_timeout', 5))
    protocol = 'http'
    if config['secure']:
        protocol = 'https'
    result = job.result
    while not result:
        time.sleep(0.1)
        result = job.result
        timeout -= 0.1
        if timeout <= 0:
            tmpl = Template(get_template('async_reports.html'))
            result = (tmpl.render(protocol=protocol,
                                  host=config['interface'],
                                  port=config['port'],
                                  job=job.id),
                      'html')
    self._reports[id]['result'] = result[0]
    self._reports[id]['format'] = result[1]
    self._reports[id]['state'] = True
    return id
Esempio n. 25
0
def FindCustomFonts():
    """Fill the __foundFonts list with those filenames, whose fonts
       can be found in the reportlab ttf font path.

       This process needs only be done once per loading of this module,
       it is cached. But, if the system admin adds some font in the
       meanwhile, the server must be restarted eventually.
    """
    dirpath = []
    log = logging.getLogger("report.fonts")
    global __foundFonts
    __foundFonts = {}
    searchpath = []

    if config.get("fonts_search_path"):
        searchpath += map(str.strip, config.get("fonts_search_path").split(","))

    local_platform = platform.system()
    if local_platform in TTFSearchPathMap:
        searchpath += TTFSearchPathMap[local_platform]

    # Append the original search path of reportlab (at the end)
    searchpath += rl_config.TTFSearchPath

    # Perform the search for font files ourselves, as reportlab's
    # TTFOpenFile is not very good at it.
    for dirglob in searchpath:
        dirglob = os.path.expanduser(dirglob)
        for dirname in glob.iglob(dirglob):
            abp = os.path.abspath(dirname)
            if os.path.isdir(abp):
                dirpath.append(abp)

    for k, (name, font, filename, mode) in enumerate(CustomTTFonts):
        if filename in __foundFonts:
            continue
        for d in dirpath:
            abs_filename = os.path.join(d, filename)
            if os.path.exists(abs_filename):
                log.debug("Found font %s at %s", filename, abs_filename)
                __foundFonts[filename] = abs_filename
                break
Esempio n. 26
0
def FindCustomFonts():
    """Fill the __foundFonts list with those filenames, whose fonts
       can be found in the reportlab ttf font path.

       This process needs only be done once per loading of this module,
       it is cached. But, if the system admin adds some font in the
       meanwhile, the server must be restarted eventually.
    """
    dirpath =  []
    log = logging.getLogger('report.fonts')
    global __foundFonts
    __foundFonts = {}
    searchpath = []

    if config.get('fonts_search_path'):
        searchpath += map(str.strip, config.get('fonts_search_path').split(','))

    local_platform = platform.system()
    if local_platform in TTFSearchPathMap:
        searchpath += TTFSearchPathMap[local_platform]

    # Append the original search path of reportlab (at the end)
    searchpath += rl_config.TTFSearchPath

    # Perform the search for font files ourselves, as reportlab's
    # TTFOpenFile is not very good at it.
    for dirglob in searchpath:
        dirglob = os.path.expanduser(dirglob)
        for dirname in glob.iglob(dirglob):
            abp = os.path.abspath(dirname)
            if os.path.isdir(abp):
                dirpath.append(abp)

    for k, (name, font, filename, mode) in enumerate(CustomTTFonts):
        if filename in __foundFonts:
            continue
        for d in dirpath:
            abs_filename = os.path.join(d, filename)
            if os.path.exists(abs_filename):
                log.debug("Found font %s at %s", filename, abs_filename)
                __foundFonts[filename] = abs_filename
                break
Esempio n. 27
0
    def default_get(self, cr, uid, fields, context=None):
        res = {}
        host = ''
        port = ''
        prefix = 'http://'
        if not config.get('xmlrpc'):
            if not config.get('netrpc'):
                prefix = 'https://'
                host = config.get('xmlrpcs_interface', None)
                port = config.get('xmlrpcs_port', 8071)
            else:
                host = config.get('netrpc_interface', None)
                port = config.get('netrpc_port', 8070)
        else:
            host = config.get('xmlrpc_interface', None)
            port = config.get('xmlrpc_port', 8069)
        if host == '' or None:
            host = 'localhost'
            port = 8069
        if not config.get_misc('webdav', 'enable', True):
            raise Exception("WebDAV is disabled, cannot continue")
        user_pool = self.pool.get('res.users')
        current_user = user_pool.browse(cr, uid, uid, context=context)
        pref_obj = self.pool.get('user.preference')
        pref_ids = pref_obj.browse(cr,
                                   uid,
                                   context.get('rec_id', False),
                                   context=context)
        #TODO write documentation
        res['description'] = self.__doc['other']
        if pref_ids:
            pref_ids = pref_ids[0]
            if pref_ids.device == 'iphone':
                url = host + ':' + str(
                    port
                ) + '/' + pref_ids.service + '/' + cr.dbname + '/' + 'calendars/'
            else:
                url = host + ':' + str(
                    port
                ) + '/' + pref_ids.service + '/' + cr.dbname + '/' + 'calendars/' + 'users/' + current_user.login + '/' + pref_ids.collection.name + '/' + pref_ids.calendar.name

            res['description'] = self.__doc.get(pref_ids.device,
                                                self.__doc['other'])
        file = open(
            addons.get_module_resource('caldav', 'doc', 'caldav_doc.pdf'),
            'rb')
        res['caldav_doc_file'] = base64.encodestring(file.read())

        #res['doc_link'] = 'http://doc.openerp.com/'
        res['url'] = prefix + url
        return res
Esempio n. 28
0
    def _process_end(self, cr, uid, modules):
        """ Clear records removed from updated module data.

        This method is called at the end of the module loading process.
        It is meant to removed records that are no longer present in the
        updated data. Such records are recognised as the one with an xml id
        and a module in ir_model_data and noupdate set to false, but not
        present in self.loads.

        """
        if not modules:
            return True
        modules = list(modules)
        module_in = ",".join(["%s"] * len(modules))
        cr.execute(
            'select id,name,model,res_id,module from ir_model_data where module IN ('
            + module_in + ') and noupdate=%s', modules + [False])
        wkf_todo = []
        to_unlink = []
        for (id, name, model, res_id, module) in cr.fetchall():
            if (module, name) not in self.loads:
                to_unlink.append((model, res_id))
                if model == 'workflow.activity':
                    cr.execute(
                        'select res_type,res_id from wkf_instance where id IN (select inst_id from wkf_workitem where act_id=%s)',
                        (res_id, ))
                    wkf_todo.extend(cr.fetchall())
                    cr.execute(
                        "update wkf_transition set condition='True', group_id=NULL, signal=NULL,act_to=act_from,act_from=%s where act_to=%s",
                        (res_id, res_id))
                    cr.execute("delete from wkf_transition where act_to=%s",
                               (res_id, ))

        for model, id in wkf_todo:
            wf_service = netsvc.LocalService("workflow")
            wf_service.trg_write(uid, model, id, cr)

        cr.commit()
        if not config.get('import_partial'):
            for (model, res_id) in to_unlink:
                if self.pool.get(model):
                    _logger.info('Deleting %s@%s', res_id, model)
                    try:
                        self.pool.get(model).unlink(cr, uid, [res_id])
                        cr.commit()
                    except Exception:
                        cr.rollback()
                        _logger.warning(
                            'Could not delete obsolete record with id: %d of model %s\n'
                            'There should be some relation that points to this resource\n'
                            'You should manually fix this and restart with --update=module',
                            res_id, model)
        return True
Esempio n. 29
0
    def _process_end(self, cr, uid, modules):
        if not modules:
            return True
        modules = list(modules)
        module_in = ",".join(["%s"] * len(modules))
        cr.execute('select id,name,model,res_id,module from ir_model_data where module IN (' + module_in + ') and noupdate=%s', modules + [False])
        wkf_todo = []
        for (id, name, model, res_id,module) in cr.fetchall():
            if (module,name) not in self.loads:
                self.unlink_mark[(model,res_id)] = id
                if model=='workflow.activity':
                    cr.execute('select res_type,res_id from wkf_instance where id IN (select inst_id from wkf_workitem where act_id=%s)', (res_id,))
                    wkf_todo.extend(cr.fetchall())
                    cr.execute("update wkf_transition set condition='True', group_id=NULL, signal=NULL,act_to=act_from,act_from=%s where act_to=%s", (res_id,res_id))
                    cr.execute("delete from wkf_transition where act_to=%s", (res_id,))

        for model,id in wkf_todo:
            wf_service = netsvc.LocalService("workflow")
            wf_service.trg_write(uid, model, id, cr)

        cr.commit()
        if not config.get('import_partial'):
            for (model, res_id) in self.unlink_mark.keys():
                if self.pool.get(model):
                    self.__logger.info('Deleting %s@%s', res_id, model)
                    try:
                        self.pool.get(model).unlink(cr, uid, [res_id])
                        if id:
                            ids = self.search(cr, uid, [('res_id','=',res_id),
                                                        ('model','=',model)])
                            self.__logger.debug('=> Deleting %s: %s',
                                                self._name, ids)
                            if len(ids) > 1 and \
                               self.__logger.isEnabledFor(logging.WARNING):
                                self.__logger.warn(
                                    'Got %d %s for (%s, %d): %s',
                                    len(ids), self._name, model, res_id,
                                    map(itemgetter('module','name'),
                                        self.read(cr, uid, ids,
                                                  ['name', 'module'])))
                            self.unlink(cr, uid, ids)
                            cr.execute(
                                'DELETE FROM ir_values WHERE value=%s',
                                ('%s,%s'%(model, res_id),))
                        cr.commit()
                    except Exception:
                        cr.rollback()
                        self.__logger.warn(
                            'Could not delete id: %d of model %s\nThere '
                            'should be some relation that points to this '
                            'resource\nYou should manually fix this and '
                            'restart with --update=module', res_id, model)
        return True
Esempio n. 30
0
    def dispatch(self, environ, start_response):
        """
        Performs the actual WSGI dispatching for the application, may be
        wrapped during the initialization of the object.

        Call the object directly.
        """
        request = werkzeug.wrappers.Request(environ)
        request.parameter_storage_class = werkzeug.datastructures.ImmutableDict
        request.app = self
        if request.path == '/':
            params = urllib.urlencode(request.args)
            return werkzeug.utils.redirect(self.root + '?' + params, 301)(
                environ, start_response)
        elif request.path == '/mobile':
            return werkzeug.utils.redirect(
                '/web_mobile/static/src/web_mobile.html', 301)(environ, start_response)
        elif request.path == self.root and main_config.get('main_domain_for_admin') and not main_config.get('main_domain_for_admin') == '0':
            domain_list = map(str.strip, main_config.get('domain_list', '').split(','))
            if request.host.split(':')[0] in domain_list:
                return werkzeug.utils.redirect("/%s" % main_config.options.get('admin_url', 'admin'), 301)(environ, start_response)

        handler = self.find_handler(*(request.path.split('/')[1:]))

        if not handler:
            response = werkzeug.exceptions.NotFound()
        else:
            with session_context(request, self.session_storage, self.session_cookie) as session:
                result = handler( request, self.config)

                if isinstance(result, basestring):
                    headers=[('Content-Type', 'text/html; charset=utf-8'), ('Content-Length', len(result))]
                    response = werkzeug.wrappers.Response(result, headers=headers)
                else:
                    response = result

                if hasattr(response, 'set_cookie'):
                    response.set_cookie(self.session_cookie, session.sid)

        return response(environ, start_response)
Esempio n. 31
0
    def handle_join(self, details):
        """ Executed when the script attaches to the server
        """
        _logger.info(u"Joined WAMP router. Attempting registration of calls: {}".format(details))

        wamp_register = config.get('wamp_register','').split(',')
        for l in wamp_register:
            if '=' in l:
                (service_name, db_name) = l.split('=',1)
                service_name = service_name.strip()
                db_name = db_name.strip()
            else:
                service_name = l
                db_name = l
            if service_name and db_name:
                DATABASE_MAPPINGS[service_name] = db_name

        databases = openerp.service.web_services.db().exp_list()
        if not DATABASE_MAPPINGS:
            for database in databases:
                DATABASE_MAPPINGS[database] = database

        for service_name,db_name in DATABASE_MAPPINGS.items():
            if not db_name in databases:
                _logger.warn(u"Database '{}' does not exist for registering on WAMP!".format(db_name))
                continue

            # Version 2 Support
            # For '*.*' services (such as object.execute)
            service_uri = config.get('wamp_registration_prefix','com.izaber.nexus.zerp')\
                                        +':{}:'.format(service_name)
            service_uri = unicode(service_uri)
            _logger.info(u"Registering '{}' on WAMP server".format(service_uri))
            res = self.register(
                        service_uri,
                        self.dispatch_model,
                        details={"match": u"prefix"}
                    )
Esempio n. 32
0
def start_server():
    HOST = config.get('ftp_server_host', '0.0.0.0')
    PORT = int(config.get('ftp_server_port', '8021'))
    PASSIVE_PORTS = None
    pps = config.get('ftp_server_passive_ports', '').split(':')
    if len(pps) == 2:
        PASSIVE_PORTS = int(pps[0]), int(pps[1])

    class ftp_server(threading.Thread):
        def log(self, level, message):
            logger = netsvc.Logger()
            logger.notifyChannel('FTP', level, message)

        def run(self):
            autho = authorizer.authorizer()
            ftpserver.FTPHandler.authorizer = autho
            ftpserver.max_cons = 300
            ftpserver.max_cons_per_ip = 50
            ftpserver.FTPHandler.abstracted_fs = abstracted_fs.abstracted_fs
            if PASSIVE_PORTS:
                ftpserver.FTPHandler.passive_ports = PASSIVE_PORTS

            ftpserver.log = lambda msg: self.log(netsvc.LOG_INFO, msg)
            ftpserver.logline = lambda msg: None
            ftpserver.logerror = lambda msg: self.log(netsvc.LOG_ERROR, msg)

            ftpd = ftpserver.FTPServer((HOST, PORT), ftpserver.FTPHandler)
            ftpd.serve_forever()

    if HOST.lower() == 'none':
        netsvc.Logger().notifyChannel("FTP", netsvc.LOG_INFO,
                                      "\n Server FTP Not Started\n")
    else:
        netsvc.Logger().notifyChannel(
            "FTP", netsvc.LOG_INFO, "\n Serving FTP on %s:%s\n" % (HOST, PORT))
        ds = ftp_server()
        ds.daemon = True
        ds.start()
Esempio n. 33
0
 def __init__(self, pool, cursor):
     if not config.get('sentry_dsn', False):
         raise osv.except_osv(
             _(u'Error'),
             _(u'No sentry DSN configured in config file.')
         )
     processors = (
         'raven.processors.SanitizePasswordsProcessor',
         'raven_sanitize_openerp.OpenerpPasswordsProcessor'
     )
     self.client = Client(dsn=config['sentry_dsn'], processors=processors)
     handler = SentryHandler(self.client)
     setup_logging(handler)
     super(SentrySetup, self).__init__(pool, cursor)
Esempio n. 34
0
    def setup(self,
              cursor,
              uid,
              username=None,
              password=None,
              company_id=None,
              cert_file=None,
              version=None,
              context=None):
        if not context:
            context = {}
        amon_setup_logging()
        self.company_id = config.get('empowering_company', company_id)
        self.cert_file = config.get('empowering_cert', cert_file)
        self.version = config.get('empowering_version', version)
        self.username = config.get('empowering_username', username)
        self.password = config.get('empowering_password', password)

        emp_conf = {}
        if self.company_id:
            emp_conf['company_id'] = self.company_id
        if self.cert_file:
            emp_conf['cert_file'] = self.cert_file
            emp_conf['key_file'] = self.cert_file
        if self.version:
            emp_conf['version'] = self.version
        if self.username and self.password:
            emp_conf['username'] = self.username
            emp_conf['password'] = self.password

        self.service = setup_empowering_api(**emp_conf)
        log("Setting Up Empowering Service (%s). Company-Id: %s Cert file: %s"
            % (self.service.apiroot, self.service.company_id,
               self.service.cert_file))
        for k, v in context.get('empowering_args', {}).items():
            log("%s => %s" % (k, v))
            setattr(self.service, k, v)
Esempio n. 35
0
class PoweremailMailbox(osv.osv):
    _name = "poweremail.mailbox"
    _inherit = 'poweremail.mailbox'

    @job(queue=config.get('poweremail_sender_queue', 'poweremail'))
    def send_in_background(self, cursor, uid, ids, context):
        return super(PoweremailMailbox,
                     self).send_this_mail(cursor, uid, ids, context)

    def send_this_mail(self, cursor, uid, ids=None, context=None):
        if not isinstance(ids, (tuple, list)):
            ids = [ids]
        for mail_id in ids:
            self.send_in_background(cursor, uid, [mail_id], context)
        return True
Esempio n. 36
0
def start_server():
    HOST = config.get('ftp_server_host', '0.0.0.0')
    PORT = int(config.get('ftp_server_port', '8021'))
    PASSIVE_PORTS = None
    pps = config.get('ftp_server_passive_ports', '').split(':')
    if len(pps) == 2:
        PASSIVE_PORTS = int(pps[0]), int(pps[1])

    class ftp_server(threading.Thread):
        def log(self, level, message):
            logger = netsvc.Logger()
            logger.notifyChannel('FTP', level, message)

        def run(self):
            autho = authorizer.authorizer()
            ftpserver.FTPHandler.authorizer = autho
            ftpserver.max_cons = 300
            ftpserver.max_cons_per_ip = 50
            ftpserver.FTPHandler.abstracted_fs = abstracted_fs.abstracted_fs
            if PASSIVE_PORTS:
                ftpserver.FTPHandler.passive_ports = PASSIVE_PORTS

            ftpserver.log = lambda msg: self.log(netsvc.LOG_INFO, msg)
            ftpserver.logline = lambda msg: None
            ftpserver.logerror = lambda msg: self.log(netsvc.LOG_ERROR, msg)

            ftpd = ftpserver.FTPServer((HOST, PORT), ftpserver.FTPHandler)
            ftpd.serve_forever()

    if HOST.lower() == 'none':
        netsvc.Logger().notifyChannel("FTP", netsvc.LOG_INFO, "\n Server FTP Not Started\n")
    else:
        netsvc.Logger().notifyChannel("FTP", netsvc.LOG_INFO, "\n Serving FTP on %s:%s\n" % (HOST, PORT))
        ds = ftp_server()
        ds.daemon = True
        ds.start()
Esempio n. 37
0
    def get_resource_path(self,cr,uid,dir_id,res_model,res_id):
        # this method will be used in process module
        # to be need test and Improvement if resource dir has parent resource (link resource)
        path=[]
        def _parent(dir_id,path):
            parent=self.browse(cr,uid,dir_id)
            if parent.parent_id and not parent.ressource_parent_type_id:
                _parent(parent.parent_id.id,path)
                path.append(parent.name)
            else:
                path.append(parent.name)
                return path

        directory=self.browse(cr,uid,dir_id)
        model_ids=self.pool.get('ir.model').search(cr,uid,[('model','=',res_model)])
        if directory:
            _parent(dir_id,path)
            if res_id:
                path.append(self.pool.get(directory.ressource_type_id.model).browse(cr,uid,res_id).name)
            user=self.pool.get('res.users').browse(cr,uid,uid)
            return "ftp://%s:%s@localhost:%s/%s/%s"%(user.login,user.password,config.get('ftp_server_port',8021),cr.dbname,'/'.join(path))
        return False
    def download_pdf(self, cursor, uid, ids, context):
        if isinstance(ids, (tuple, list)):
            ids = ids[0]
        env_obj = self.pool.get('som.infoenergia.enviament')

        env = env_obj.browse(cursor, uid, ids)
        to_download = ['esborrany']
        if context.get('force_download_pdf', False):
            to_download.append('obert')

        if not env.polissa_id:
            env.write({'estat': 'error'})
            message = u'ERROR: No es pot descarregar el PDF perque l\'enviament no té cap pòlissa associada'
            self.add_info_line(cursor, uid, ids, message, context)
            return
        if env.estat not in to_download:
            return
        try:
            ssh = get_ssh_connection()
            output_dir = config.get("infoenergia_report_download_dir",
                                    "/tmp/test_shera/reports")
            pdf_name = env.pdf_filename.split("/")[-1]
            output_filepath = os.path.join(output_dir, pdf_name)

            scp = SCPClient(ssh.get_transport())
            scp.get(env.pdf_filename, output_filepath)

            self.render_header_data(cursor, uid, ids, output_filepath,
                                    output_dir)

            self._attach_pdf(cursor, uid, ids, output_filepath)
            now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
            env.write({'estat': 'obert', 'data_informe': now})
            self.add_info_line(cursor, uid, ids,
                               'PDF descarregat correctament', context)
        except Exception as e:
            env.write({'estat': 'error'})
            message = 'ERROR ' + str(e)
            self.add_info_line(cursor, uid, ids, message, context)
Esempio n. 39
0
def monkey_dispatch(self, environ, start_response):
    """
    Performs the actual WSGI dispatching for the application, may be
    wrapped during the initialization of the object.

    Call the object directly.
    """
    request = werkzeug.wrappers.Request(environ)
    request.parameter_storage_class = werkzeug.datastructures.ImmutableDict
    request.app = self

    handler = self.find_handler(*(request.path.split('/')[1:]))

    if not handler:
        response = werkzeug.exceptions.NotFound()
    else:
        sid = request.cookies.get('sid')
        if not sid:
            sid = request.args.get('sid')

        session_gc(self.session_store)

        with session_context(request, self.session_store, self.session_lock, sid) as session:
            result = handler(request)

            if isinstance(result, basestring):
                headers = [('Content-Type', 'text/html; charset=utf-8'), ('Content-Length', len(result))]
                response = werkzeug.wrappers.Response(result, headers=headers)
            else:
                response = result

            if hasattr(response, 'set_cookie'):
                from tools import config

                response.set_cookie('sid', session.sid, domain=config.get('domain', None))

    return response(environ, start_response)
Esempio n. 40
0
    def __init__(self,details):
        self.service_base = config.get('wamp_registration_prefix',u'com.izaber.nexus.zerp')
        self.service_base = unicode(self.service_base)

        uri_elements = details["procedure"].split(':')

        # Format should be
        # For reports it can be
        # <prefix>:<database>:<service>
        if len(uri_elements) == 3:
            ( prefix, self.database, self.service_name ) = uri_elements
            self.database = DATABASE_MAPPINGS.get(self.database)
            self.model = None
            self.version = 2
        # <prefix>:<database>:<model>:<service>:<method>
        elif len(uri_elements) == 4:
            ( prefix, self.database, self.model, self.service_name ) = uri_elements
            # Handle rewrites
            if self.service_name in SERVICE_REWRITES:
                self.service_name = SERVICE_REWRITES[self.service_name]
            self.database = DATABASE_MAPPINGS.get(self.database)
            self.version = 2
        else:
            raise Exception(u'URI should be in format "<prefix>:<database>:<model>:<service>:<method>"')
class PoweremailSendWizard(osv.osv_memory):
    _name = 'poweremail.send.wizard'
    _inherit = 'poweremail.send.wizard'

    def save_to_mailbox(self, cursor, uid, ids, context=None):
        if get_current_job():
            return super(PoweremailSendWizard,
                         self).save_to_mailbox(cursor,
                                               uid,
                                               ids,
                                               context=context)

        fields = self.fields_get(cursor, uid, context=context).keys()
        wiz = self.read(cursor, uid, ids, [], context)[0]
        for k in wiz.keys():
            if k not in fields:
                del wiz[k]
        res = []
        ctx = context.copy()
        j_pool = JobsPool()
        # Copy the original list
        src_rec_ids = context.get('src_rec_ids', [])[:]
        len_src_rec_ids = len(src_rec_ids)
        new_rec_ids = []

        # Due the original method only parse the templates if the len of
        # src_rec_ids is greater than 1 but we want to minimize the mails to
        # generate we make groups of two. [(1,2), (3,4), (5,6)]
        # We check if the len is mod of 2 if not we make a fisrt group of 3
        if len(src_rec_ids) % 2:
            new_rec_ids.append(tuple(src_rec_ids[:3]))
            src_rec_ids = src_rec_ids[3:]
        # Make the group of 2 for the rest
        new_rec_ids.extend(zip(*(iter(src_rec_ids), ) * 2))
        len_new_rec_ids = len(list(chain.from_iterable(new_rec_ids)))
        if len_src_rec_ids != len_new_rec_ids:
            raise Exception("Original list is different %s != %s" %
                            (len_src_rec_ids, len_new_rec_ids))
        for rec_id in new_rec_ids:
            ctx['screen_vals'] = wiz
            ctx['src_rec_ids'] = rec_id
            job = self.save_to_mailbox_in_background(cursor, uid, ctx)
            j_pool.add_job(job)
            if 'screen_vals' in ctx:
                del ctx['screen_vals']
        j_pool.join()
        for res_job in j_pool.results.values():
            res += res_job
        # Put the rendered mails on outbox
        mailbox_obj = self.pool.get('poweremail.mailbox')
        mailbox_obj.write(cursor, uid, res, {'folder': 'outbox'}, ctx)
        return res

    @job(queue=config.get('poweremail_render_queue', 'poweremail'))
    def save_to_mailbox_in_background(self, cursor, uid, context):
        mailbox_obj = self.pool.get('poweremail.mailbox')
        if not context:
            context = {}
        screen_vals = context.get('screen_vals', {})
        ctx = context.copy()
        del ctx['screen_vals']
        if not screen_vals:
            raise Exception("No screen_vals found in the context!")
        attach_ids = screen_vals.get('attachment_ids', [])
        if attach_ids:
            screen_vals['attachment_ids'] = [(6, 0, attach_ids)]

        wiz_id = self.create(cursor, uid, screen_vals, ctx)
        mail_ids = super(PoweremailSendWizard,
                         self).save_to_mailbox(cursor, uid, [wiz_id], ctx)
        mailbox_obj.write(cursor, uid, mail_ids, {'folder': 'drafts'}, ctx)
        return mail_ids
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.     
#
##############################################################################

import threading
import ftpserver
import authorizer
import abstracted_fs
import netsvc
from tools import config

HOST = config.get('ftp_server_host', '127.0.0.1')
PORT = int(config.get('ftp_server_port', '8021'))
PASSIVE_PORTS = None
pps = config.get('ftp_server_passive_ports', '').split(':')
if len(pps) == 2:
    PASSIVE_PORTS = int(pps[0]), int(pps[1])

class ftp_server(threading.Thread):
    def log(self, level, message):
        logger = netsvc.Logger()
        logger.notifyChannel('FTP', level, message)

    def run(self):
        autho = authorizer.authorizer()
        ftpserver.FTPHandler.authorizer = autho
        ftpserver.max_cons = 300
Esempio n. 43
0
#.apidoc add-classes: Cursor Connection ConnectionPool

__all__ = ['db_connect', 'close_db']

import uuid
from functools import wraps
import logging
import psycopg2.extensions
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_REPEATABLE_READ
from psycopg2.pool import PoolError
from psycopg2.psycopg1 import cursor as psycopg1cursor
from threading import currentThread
import time
from tools import config

CURSOR_TIMEOUT = int(config.get('cursor_timeout', "0")) # Disabled by default
CONNECTION_LIFETIME = int(config.get('connection_lifetime', "300")) # 5 minutes by default

psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)

_logger = logging.getLogger(__name__)

types_mapping = {
    'date': (1082,),
    'time': (1083,),
    'datetime': (1114,),
}

def unbuffer(symb, cr):
    if symb is None: return None
    return str(symb)
Esempio n. 44
0
# coding:utf-8
from tools import socket
from tools import config
"""
配置文件变量:
global:
socket_port		: server端口
socket_buffer	: socket通信buffer大小
queue			: server与taskManager之间队列大小

"""
BUFF_SIZE = config.get()["socket_buffer"]
SOCKET_PORT = config.get()["socket_port"]


def createSocket():
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.bind(("localhost", SOCKET_PORT))
    s.listen(5)
    return s


def connectServer():
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect(('localhost', 8001))
    return s


def recv(sock):
    szBuf = ''
    while (True):
"""
Store database-specific configuration parameters
"""

from osv import osv,fields
import uuid
import datetime
from tools import misc, config

"""
A dictionary holding some configuration parameters to be initialized when the database is created.
"""
_default_parameters = {
    "database.uuid": lambda: str(uuid.uuid1()),
    "database.create_date": lambda: datetime.datetime.now().strftime(misc.DEFAULT_SERVER_DATETIME_FORMAT),
    "web.base.url": lambda: "http://localhost:%s" % config.get('xmlrpc_port'),
}

class ir_config_parameter(osv.osv):
    """Per-database storage of configuration key-value pairs."""

    _name = 'ir.config_parameter'

    _columns = {
        'key': fields.char('Key', size=256, required=True, select=1),
        'value': fields.text('Value', required=True),
    }

    _sql_constraints = [
        ('key_uniq', 'unique (key)', 'Key must be unique.')
    ]
Esempio n. 46
0
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################

import threading
import ftpserver
import authorizer
import abstracted_fs
import netsvc
from tools import config

HOST = config.get('ftp_server_host', '127.0.0.1')
PORT = int(config.get('ftp_server_port', '8021'))
PASSIVE_PORTS = None
pps = config.get('ftp_server_passive_ports', '').split(':')
if len(pps) == 2:
    PASSIVE_PORTS = int(pps[0]), int(pps[1])


class ftp_server(threading.Thread):
    def log(self, level, message):
        logger = netsvc.Logger()
        logger.notifyChannel('FTP', level, message)

    def run(self):
        autho = authorizer.authorizer()
        ftpserver.FTPHandler.authorizer = autho
    def get_tariff_prices(self, cursor, uid, tariff_id, municipi_id,
                          fiscal_position_id=None, with_taxes=False,
                          date=False, context=None):
        """
            Returns a dictionary with the prices of the given tariff.
            Example of return value:
            {
                'bo_social': {
                    {'value': 0.123, 'uom': '€/dia'}
                },
                'comptador': {
                    {'value': 0.123, 'uom': '€/mes'}
                },
                'te': {
                    'P1': {'value': 0.123, 'uom': '€/KW dia'}
                },
                'tp': {
                    'P1': {'value': 0.123, 'uom': '€/KW dia'},
                    'P2': {'value': 0.234, 'uom': '€/KW dia'}
                },
                ...
            }

        """
        if context is None:
            context = {}

        if isinstance(tariff_id, (list, tuple)):
            tariff_id = tariff_id[0]

        tariff_obj = self.pool.get('giscedata.polissa.tarifa')
        municipi_obj = self.pool.get('res.municipi')
        fp_obj = self.pool.get('account.fiscal.position')
        uom_obj = self.pool.get('product.uom')
        prod_obj = self.pool.get('product.product')
        prop_obj = self.pool.get('ir.property')

        # get default pricelist for this tariff
        tariff = tariff_obj.browse(cursor, uid, tariff_id)
        pricelist_list = tariff.llistes_preus_comptatibles
        pricelist_municipi = municipi_obj.filter_compatible_pricelists(
            cursor, uid, municipi_id=municipi_id,
            pricelist_list=pricelist_list, context=context)

        if not date:
            pricelist = pricelist_municipi
        else:
            pricelist = []
            for item in pricelist_municipi:
                versions = item.version_id
                for version in versions:
                    date_start = version.date_start
                    date_end = version.date_end
                    if (not date_start or date_start <= date) and \
                        (not date_end or date_end >= date):
                        pricelist.append(item)

        fiscal_position = None
        if not fiscal_position_id:
            prop_id = prop_obj.search(cursor,uid,[('name','=','property_account_position'),('res_id','=',False)])
            if isinstance(prop_id,list):
                prop_id = prop_id[0]
            prop=prop_obj.browse(cursor, uid, prop_id)
            if prop.value:
                fiscal_position_id = int(prop.value.split(',')[1])
        if fiscal_position_id:
            fiscal_position = fp_obj.browse(cursor, uid, fiscal_position_id)

        if not pricelist:
            raise osv.except_osv(
                'Warning !',
                'Tariff pricelist not found'
            )

        pricelist = pricelist[0]

        periods = self.__get_all_periods(cursor, uid, tariff, context)

        preus = {}  # dictionary to be returned
        for period in periods:

            if period['tipus'] not in preus:
                preus[period['tipus']] = {}

            product_id = period['product_id']

            # taxes for gkwh are calculated later and taxes for autoconsum
            # are not calculated
            apply_taxes = with_taxes and period['tipus'] not in ['gkwh']

            value, discount, uom_id = pricelist.get_atr_price(
                tipus=period['tipus'], product_id=product_id,
                fiscal_position=fiscal_position, context=context,
                with_taxes=apply_taxes, direccio_pagament=None,
                titular=None
            )

            # apply taxes of the energy term to gkwh price
            if with_taxes and period['tipus'] == 'gkwh':
                value = prod_obj.add_taxes(
                    cursor, uid, period['taxes_product_id'], value,
                    fiscal_position, direccio_pagament=None,
                    titular=None
                )

            # units of measure
            uom = uom_obj.browse(cursor, uid, uom_id)
            preus[period['tipus']][period['name']] = {
                'value': round(value, config.get('price_accuracy', 6)),
                'uom': '€/{}'.format(uom.name if uom.name != 'PCE' else 'kWh')
            }

        value, uom = self.get_bo_social_price(
            cursor, uid, pricelist, fiscal_position=fiscal_position, with_taxes=with_taxes, context=context
        )
        preus['bo_social'] = {
            'value': round(value, config.get('price_accuracy', 6)),
            'uom': '€/{}'.format(uom.name)
        }

        value, uom = self.get_comptador_price(
            cursor, uid, pricelist, fiscal_position=fiscal_position, with_taxes=with_taxes, context=context
        )
        preus['comptador'] = {
            'value': round(value, config.get('price_accuracy', 6)),
            'uom': '€/{}'.format(uom.name.split('/')[1])
        }

	return preus
Esempio n. 48
0
            'name': log_name,
            'datas_fname': log_name,
            'parent_id': imp_data.log_dir_id.id,
            'datas': log_enc,
        }
        if not self.create(cr, uid, log_args):
            self._logger.error('module document_csv: impossible to create the log file!')

        ir_mail_server = self.pool.get('ir.mail_server')

        if email_to or imp_data.err_mail:
            res_email_to = email_to and [email_to] or False

            email_from = imp_data.mail_from
            if not email_from:
                email_from = config.get('email_from')

            log_attachment = [(log_name, log_content)]
            legend = {}
            if (not isinstance(res, bool) and res[0] >= 0) and integ:
                legend['count'] = res[0]
                subject = imp_data.mail_subject and (imp_data.mail_subject % legend) or 'No subject'
                body = imp_data.mail_body and (imp_data.mail_body % legend) or 'No body'
                mail_cc = [imp_data.mail_cc]
            else:
                subject = imp_data.mail_subject_err and (imp_data.mail_subject_err % legend) or 'No subject'
                body = imp_data.mail_body_err and (imp_data.mail_body_err % {'error': error}) or 'No body'
                mail_cc = [imp_data.mail_cc_err]

            if mail_cc and not res_email_to:
                res_email_to = mail_cc
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################

import threading
import ftpserver
import authorizer
import abstracted_fs
import netsvc
from tools import config

HOST = config.get('ftp_server_host', '192.168.1.116')
PORT = int(config.get('ftp_server_port', '8021'))
PASSIVE_PORTS = None
pps = config.get('ftp_server_passive_ports', '').split(':')
if len(pps) == 2:
    PASSIVE_PORTS = int(pps[0]), int(pps[1])


class ftp_server(threading.Thread):
    def log(self, level, message):
        logger = netsvc.Logger()
        logger.notifyChannel('FTP', level, message)

    def run(self):
        autho = authorizer.authorizer()
        ftpserver.FTPHandler.authorizer = autho
Esempio n. 50
0
        <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/>
        <meta http-equiv="content-type" content="text/html; charset=utf-8" />
        <meta name="renderer" content="webkit"/>
        <title>UpdisERP</title>
        <link rel="shortcut icon" href="/up_web/static/src/img/favicon.ico" type="image/x-icon"/>
        <link rel="stylesheet" href="/web/static/src/css/full.css" />
        %(css)s
        %(js)s
        <script type="text/javascript">
            $(function() {
                var s = new openerp.init(%(modules)s);
                %(init)s
            });
        </script>
    </head>
    <body>
        <!--[if lte IE 8]>
        <script src="//ajax.googleapis.com/ajax/libs/chrome-frame/1/CFInstall.min.js"></script>
        <script>CFInstall.check({mode: "overlay"});</script>
        <![endif]-->
        <div id="openerp-domain-value" style='display: none;'>""" + config.get('domain', "") + """</div>
        <div id='openerp-logout-url' style='display: none;'>""" + config.get('logout_redirect_url', "") + """</div>
    </body>
</html>
"""
main.html_template = html_template
main.set_cookie_and_redirect = set_cookie_and_redirect
#import sys
#sys.modules['|openerp.addons.web.controllers.main'].set_cookie_and_redirect = set_cookie_and_redirect

Esempio n. 51
0
from osv import osv, fields
import uuid
import datetime
from tools import misc, config
from openerp import SUPERUSER_ID
"""
A dictionary holding some configuration parameters to be initialized when the database is created.
"""
_default_parameters = {
    "database.uuid":
    lambda: str(uuid.uuid1()),
    "database.create_date":
    lambda: datetime.datetime.now().strftime(misc.
                                             DEFAULT_SERVER_DATETIME_FORMAT),
    "web.base.url":
    lambda: "http://localhost:%s" % config.get('xmlrpc_port'),
}


class ir_config_parameter(osv.osv):
    """Per-database storage of configuration key-value pairs."""

    _name = 'ir.config_parameter'

    _columns = {
        'key': fields.char('Key', size=256, required=True, select=1),
        'value': fields.text('Value', required=True),
    }

    _sql_constraints = [('key_uniq', 'unique (key)', 'Key must be unique.')]
Esempio n. 52
0
        <meta http-equiv="content-type" content="text/html; charset=utf-8" />
        <meta name="renderer" content="webkit"/>
        <title>UpdisERP</title>
        <link rel="shortcut icon" href="/up_web/static/src/img/favicon.ico" type="image/x-icon"/>
        <link rel="stylesheet" href="/web/static/src/css/full.css" />
        %(css)s
        %(js)s
        <script type="text/javascript">
            $(function() {
                var s = new openerp.init(%(modules)s);
                %(init)s
            });
        </script>
    </head>
    <body>
        <!--[if lte IE 8]>
        <script src="//ajax.googleapis.com/ajax/libs/chrome-frame/1/CFInstall.min.js"></script>
        <script>CFInstall.check({mode: "overlay"});</script>
        <![endif]-->
        <div id="openerp-domain-value" style='display: none;'>""" + config.get(
    'domain', "") + """</div>
        <div id='openerp-logout-url' style='display: none;'>""" + config.get(
        'logout_redirect_url', "") + """</div>
    </body>
</html>
"""
main.html_template = html_template
main.set_cookie_and_redirect = set_cookie_and_redirect
#import sys
#sys.modules['|openerp.addons.web.controllers.main'].set_cookie_and_redirect = set_cookie_and_redirect
Esempio n. 53
0
def get_plain_ftp(timeout=10.0):
    ftp = FTP()
    host = config.get('ftp_server_host', '127.0.0.1')
    port = config.get('ftp_server_port', '8021')
    ftp.connect(host, port,timeout)
    return ftp
Esempio n. 54
0
#.apidoc add-classes: Cursor Connection ConnectionPool

__all__ = ['db_connect', 'close_db']

import uuid
from functools import wraps
import logging
import psycopg2.extensions
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_REPEATABLE_READ
from psycopg2.pool import PoolError
from psycopg2.psycopg1 import cursor as psycopg1cursor
from threading import currentThread
import time
from tools import config

CURSOR_TIMEOUT = config.get('cursor_timeout')
CONNECTION_LIFETIME = config.get('connection_lifetime')

psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)

_logger = logging.getLogger(__name__)

types_mapping = {
    'date': (1082,),
    'time': (1083,),
    'datetime': (1114,),
}

def unbuffer(symb, cr):
    if symb is None: return None
    return str(symb)