def __init__(self, pool, cr):
        """ Dynamically add columns
        """

        super(report_prompt_class, self).__init__(pool, cr)

        for counter in range(0, MAX_PARAMS):
            field_name = PARAM_XXX_STRING_VALUE % counter
            self._columns[field_name] = fields.char('String Value', size=64)

            field_name = PARAM_XXX_BOOLEAN_VALUE % counter
            self._columns[field_name] = fields.boolean('Boolean Value')

            field_name = PARAM_XXX_INTEGER_VALUE % counter
            self._columns[field_name] = fields.integer('Integer Value')

            field_name = PARAM_XXX_NUMBER_VALUE % counter
            self._columns[field_name] = fields.float('Number Value')

            field_name = PARAM_XXX_DATE_VALUE % counter
            self._columns[field_name] = fields.date('Date Value')

            field_name = PARAM_XXX_TIME_VALUE % counter
            self._columns[field_name] = fields.datetime('Time Value')

        self.paramfile = False
Exemplo n.º 2
0
    def _group(self, items, prefix):
        """Return an XML chunk which represents a group of fields."""
        names = []
        for k, v in items:
            key = "%s\\%s" % (prefix, k)
            # Mask passwords
            if "passw" in k and not self.show_passwords:
                v = "**********"
            # for the GTK display, we need to replace '_' with '__'.
            # XXX: remove this hack when we switch to the web client.
            k = k.replace("_", "__")
            self._columns[key] = fields.char(k, size=1024)
            self._defaults[key] = v
            names.append(key)

        return (
            '<group col="2" colspan="4">'
            + "".join(['<field name="%s" readonly="1"/>' % _escape(name) for name in names])
            + "</group>"
        )
Exemplo n.º 3
0
            #
            statement_facade.write(cr, uid, [statement_id], {
                                    'date': st_data['fecha_fin'],
                                    'balance_start': st_data['saldo_ini'],
                                    'balance_end_real': st_data['saldo_fin'],
                                }, context=context)

            # Attach the C43 file to the current statement
            data = base64.encodestring( c43_wizard.file )
            res = statement_facade._attach_file_to_statement(cr, uid, data, statement_id, _('Bank Statement'), _('bank-statement.txt') )

        return {}


    _name = 'l10n.es.bank.statement.import.c43.wizard'

    _columns = {
        'file': fields.binary('Bank Statements File', required=True, filename='file_name'),
        'file_name': fields.char('Bank Statements File', size=64, readonly=True),
        'reco_reference_and_amount': fields.boolean('Reconcile by reference and amount'),
        'reco_vat_and_amount' : fields.boolean('Reconcile by VAT number and amount'),
        'reco_amount' : fields.boolean('Reconcile by amount'),
        'reco_rules' : fields.boolean('Statement Line Rules'),
        'reco_payment_order': fields.boolean('Reconcile payment orders by total amount'),
        'reco_max_days' : fields.integer('Max. days from statement date',help='Maximum difference in days, between the maturity date of the entry to reconcile and the bank statement entry')
        }

l10n_es_bank_statement_import_c43_wizard()

# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
class project_task(osv.osv):
    _inherit = "project.task"
    _name = "project.task"
    
    def _get_parent_category(self, cr, uid, ids, fields, args, context=None):
        context = context or {}
        res = {}
        for task in self.browse(cr, uid, ids):
            res[task.id] = task.gap_category_id and task.gap_category_id.parent_id.id or False
        return res
    
    def _task_to_update_after_category_change(self, cr, uid, ids, fields=None, arg=None, context=None):
        if type(ids) != type([]):
            ids = [ids]
        return self.pool.get('project.task').search(cr, uid, [('gap_category_id', 'in', ids)]) or []
    
    def _get_child_tasks(self, cr, uid, ids, context=None):
        if type(ids) != type([]):
            ids = [ids]
        cr.execute("SELECT DISTINCT parent_id FROM project_task_parent_rel WHERE task_id in %s", (tuple(ids),))
        task_ids = filter(None, map(lambda x:x[0], cr.fetchall())) or []
        return task_ids
    
    def _get_child_hours(self, cr, uid, ids, field_names, args, context=None):
        result = {}
        for task in self.browse(cr, uid, ids, context=context):
            res = {}
            child_org_planned_hours = 0.0
            child_planned_hours     = 0.0
            child_remaining_hours   = 0.0
            
            for child in task.child_ids:
                child_org_planned_hours += child.org_planned_hours
                child_planned_hours     += child.planned_hours
                child_remaining_hours   += child.remaining_hours
            
            res['child_org_planned_hours'] = child_org_planned_hours
            res['child_planned_hours']     = child_planned_hours
            res['child_remaining_hours']   = child_remaining_hours
            result[task.id] = res
        return result
    
#    def onchange_planned(self, cr, uid, ids, planned = 0.0, effective = 0.0):
#        return {'value':{'remaining_hours': planned - effective, 'org_planned_hours':planned}}
    
    _columns = {
        'child_org_planned_hours': fields.function(_get_child_hours, string='Child Original Planned Hours', multi='child_hours', help="Computed using the sum of the child tasks Original planned hours.",
            store = {
                'project.task': (_get_child_tasks, ['org_planned_hours','planned_hours'], 10),
            }),
        'child_planned_hours':   fields.function(_get_child_hours, string='Child Planned Hours', multi='child_hours', help="Computed using the sum of the child tasks planned hours.",
            store = {
                'project.task': (_get_child_tasks, ['planned_hours','remaining_hours'], 10),
            }),
        'child_remaining_hours': fields.function(_get_child_hours, string='Child Remaining Hours', multi='child_hours', help="Computed using the sum of the child tasks work done.",
            store = {
                'project.task': (_get_child_tasks, ['planned_hours','remaining_hours'], 10),
            }),
        
        'module_id': fields.many2one('openerp_module', 'Module', select=True),
        'gap_category_id': fields.many2one('gap_analysis.functionality.category','Category', select=True),
        'parent_category': fields.function(_get_parent_category, method=True, type='many2one', obj='gap_analysis.functionality.category', string='Parent Category', store={'project.task': (lambda self, cr, uid, ids, context: ids, ['gap_category_id'], 10), 'gap_analysis.functionality.category': (_task_to_update_after_category_change, ['parent_id'], 10),}),
        'gap_line_id': fields.many2one('gap_analysis.line', 'Gap Analysis Line', select=True),
        'code_gap': fields.char('Code in Gap', size=6),
        'to_report': fields.boolean('Report to customer'),
        'org_planned_hours': fields.float('Original Planned Hours', help='Original estimated time to do the task, usually set by the project manager when the task is in draft state.'),
    }
Exemplo n.º 5
0
        try:
            im = urllib2.urlopen(_url.encode("UTF-8"))
            if im.headers.maintype != "image":
                raise TypeError(im.headers.maintype)
        except Exception, e:
            path = os.path.join("report_aeroo", "config_pixmaps", "module_banner.png")
            image_file = file_data = tools.file_open(path, "rb")
            try:
                file_data = image_file.read()
                self._logo_image = base64.encodestring(file_data)
                return self._logo_image
            finally:
                image_file.close()
        else:
            self._logo_image = base64.encodestring(im.read())
            return self._logo_image

    def _get_image_fn(self, cr, uid, ids, name, args, context=None):
        image = self._get_image(cr, uid, context)
        return dict.fromkeys(ids, image)  # ok to use .fromkeys() as the image is same for all

    _columns = {
        "link": fields.char("Original developer", size=128, readonly=True),
        "config_logo": fields.function(_get_image_fn, string="Image", type="binary", method=True),
    }

    _defaults = {"config_logo": _get_image, "link": "http://www.alistek.com"}


report_aeroo_installer()
Exemplo n.º 6
0
            try:
                file_data = image_file.read()
                self._logo_image = base64.encodestring(file_data)
                return self._logo_image
            finally:
                image_file.close()
        else:
            self._logo_image = base64.encodestring(im.read())
            return self._logo_image

    def _get_image_fn(self, cr, uid, ids, name, args, context=None):
        image = self._get_image(cr, uid, context)
        return dict.fromkeys(ids, image) # ok to use .fromkeys() as the image is same for all 

    _columns = {
        'host': fields.char('Host', size=64, required=True),
        'port': fields.integer('Port', required=True),
        'ooo_restart_cmd': fields.char('OOO restart command', size=256, \
            help='Enter the shell command that will be executed to restart the LibreOffice/OpenOffice background process.'+ \
                'The command will be executed as the user of the OpenERP server process,'+ \
                'so you may need to prefix it with sudo and configure your sudoers file to have this command executed without password.'),
        'state':fields.selection([
            ('init','Init'),
            ('error','Error'),
            ('done','Done'),
            
        ], 'State', select=True, readonly=True),
        'msg': fields.text('Message', readonly=True),
        'error_details': fields.text('Error Details', readonly=True),
        'link':fields.char('Installation Manual', size=128, help='Installation (Dependencies and Base system setup)', readonly=True),
        'config_logo': fields.function(_get_image_fn, string='Image', type='binary', method=True),
            for package in packages:
                register_vals = {
                    'tracking_no': package.TrackingNumber.pyval,
                    'label_image': package.LabelImage.GraphicImage.pyval,
                    'state': 'accepted'
                }
                packages_obj.write(
                    cursor, user, package_record_ids[packages.index(package)],
                    register_vals, context)
            # changing state to accepted of shipping register record.
            self.write(cursor, user, shipping_register_record.id, 
                {'state': 'accepted'}, context)
        return True

    _columns = {
        'name': fields.char(string='Name', select="1", size=150, 
            readonly=True),
        'service_type': fields.many2one('ups.codes', 'Service Type',
            domain=[('type', '=', 'service')], select="1"),
        'package_det': fields.one2many('ups.shippingregister.package',
            'shipping_register_rel', string='Packages',),
        'to_address': fields.many2one('res.partner.address',
            'Shipping Address', required=True),
        'from_address': fields.many2one('res.partner.address',
            'From Address', required=True),
        'shipper_address': fields.many2one('res.partner.address',
            'Shipper Address', required=True),
        'saturday_delivery': fields.boolean('Saturday Delivery?'),
        'description': fields.text('Description'),
        'state':fields.selection(STATE_SELECTION, 'Status', readonly=True,),

        # The following are UPS filled information
    def _save_file(self, path, b64_file):
        """Save a file encoded in base 64"""
        self._check_filestore(path)
        with open(path, "w") as ofile:
            ofile.write(base64.b64decode(b64_file))
        return True

    def _set_image(self, cr, uid, id, name, value, arg, context=None):
        image = self.browse(cr, uid, id, context=context)
        full_path = self._image_path(cr, uid, image, context=context)
        if full_path:
            return self._save_file(full_path, value)
        return self.write(cr, uid, id, {"file_db_store": value}, context=context)

    _columns = {
        "name": fields.char("Image Title", translate=True, size=100, required=True),
        "extention": fields.char("file extention", size=6),
        "link": fields.boolean(
            "Link?", help="Images can be linked from files on your file system or remote (Preferred)"
        ),
        "file_db_store": fields.binary("Image stored in database"),
        "file": fields.function(
            _get_image, fnct_inv=_set_image, type="binary", method=True, filters="*.png,*.jpg,*.gif"
        ),
        "url": fields.char("File Location", size=128),
        "comments": fields.text("Comments", translate=True),
        "product_id": fields.many2one("product.product", "Product"),
    }

    _defaults = {"link": lambda *a: False}
Exemplo n.º 9
0
            bnkdel = urllib2.urlopen(context.get('location_bnkdel'))
        except urllib2.HTTPError, err:
            raise osv.except_osv(_('Bad URL for bnkdel.txt!'), _('Fix the URL and try again.'))
        csv = csv_reader(bnkdel, csvEncoding, delimiter=csvDelimiter)
        for row in csv:
            bic = row[6].strip()
            deleted = row[1].strip()

            ids = bank.search(cr, uid, [('bic', '=', bic), ('last_updated', '<=', deleted)], context=context)
            if ids:
                values = {
                    'active': False,
                }
                bank.write(cr, uid, ids, values, context=context)
        return {
            'view_type': 'form,tree',
            'view_mode': 'tree',
            'res_model': 'res.bank',
            'type': 'ir.actions.act_window',
        }

    _name = 'wizard.update.banks'
    _columns = {
        'location_bnkseek': fields.char('Location of bnkseek.txt', size=500),
        'location_bnkdel': fields.char('Location of bnkdel.txt', size=500),
    }
    _defaults = {
        'location_bnkseek': lambda *a: csvBnkseekPath,
        'location_bnkdel': lambda *a: csvBnkdelPath,
    }
wizard_update_banks()
Exemplo n.º 10
0
class crm_segmentation_line(osv.osv):
    _name = "crm.segmentation.line"
    _description = "Segmentation line"
    _columns = {
        'name': fields.char('Rule Name', size=64, required=True),
        'segmentation_id': fields.many2one('crm.segmentation', 'Segmentation'),
        'expr_name': fields.selection([('sale','Sale Amount'),('som','State of Mind'),('purchase','Purchase Amount')], 'Control Variable', size=64, required=True),
        'expr_operator': fields.selection([('<','<'),('=','='),('>','>')], 'Operator', required=True),
        'expr_value': fields.float('Value', required=True),
        'operator': fields.selection([('and','Mandatory Expression'),('or','Optional Expression')],'Mandatory / Optional', required=True),
    }
    _defaults = {
        'expr_name': lambda *a: 'sale',
        'expr_operator': lambda *a: '>',
        'operator': lambda *a: 'and'
    }
    def test(self, cr, uid, ids, partner_id):
        expression = {'<': lambda x,y: x<y, '=':lambda x,y:x==y, '>':lambda x,y:x>y}
        ok = False
        lst = self.read(cr, uid, ids)
        for l in lst:
            cr.execute('select * from ir_module_module where name=%s and state=%s', ('account','installed'))
            if cr.fetchone():
                if l['expr_name']=='som':
                    datas = self.pool.get('crm.segmentation').read(cr, uid, [l['segmentation_id'][0]],
                            ['som','som_interval','som_interval_max','som_interval_default', 'som_interval_decrease'])
                    value = crm_operators.som(cr, uid, partner_id, datas[0])
                elif l['expr_name']=='sale':
                    cr.execute('SELECT SUM(l.price_unit * l.quantity) ' \
                            'FROM account_invoice_line l, account_invoice i ' \
                            'WHERE (l.invoice_id = i.id) ' \
                                'AND i.partner_id = %s '\
                                'AND i.type = \'out_invoice\'',
                            (partner_id,))
                    value = cr.fetchone()[0] or 0.0
                    cr.execute('SELECT SUM(l.price_unit * l.quantity) ' \
                            'FROM account_invoice_line l, account_invoice i ' \
                            'WHERE (l.invoice_id = i.id) ' \
                                'AND i.partner_id = %s '\
                                'AND i.type = \'out_refund\'',
                            (partner_id,))
                    value -= cr.fetchone()[0] or 0.0
                elif l['expr_name']=='purchase':
                    cr.execute('SELECT SUM(l.price_unit * l.quantity) ' \
                            'FROM account_invoice_line l, account_invoice i ' \
                            'WHERE (l.invoice_id = i.id) ' \
                                'AND i.partner_id = %s '\
                                'AND i.type = \'in_invoice\'',
                            (partner_id,))
                    value = cr.fetchone()[0] or 0.0
                    cr.execute('SELECT SUM(l.price_unit * l.quantity) ' \
                            'FROM account_invoice_line l, account_invoice i ' \
                            'WHERE (l.invoice_id = i.id) ' \
                                'AND i.partner_id = %s '\
                                'AND i.type = \'in_refund\'',
                            (partner_id,))
                    value -= cr.fetchone()[0] or 0.0
                res = expression[l['expr_operator']](value, l['expr_value'])
                if (not res) and (l['operator']=='and'):
                    return False
                if res:
                    return True
        return True
Exemplo n.º 11
0
class rental_agreement(osv.osv):
    _name = 'rental.agreement'

    def _get_mount_point(self, cr, uid, ids, name, arg, context={}):
        res = {}
        user = self.pool.get('res.users').browse(cr, uid, uid)
        company = user.company_id
        for top in self.browse(cr, uid, ids, context):
            if user.document_mount:
                mount = user.default_mount_agreement
            else:
                mount = company.default_mount_agreement
            if user.document_client:
                client = user.document_client
            else:
                client = company.default_document_client
            model_obj = self.pool.get('ir.model')
            model_id = model_obj.search(
                cr, uid, [('model', '=', 'rental.agreement')])[0]
            dir_obj = self.pool.get('document.directory')
            dir_id = dir_obj.search(cr, uid,
                                    [('ressource_type_id', '=', model_id),
                                     ('domain', '=', '[]')])[0]
            diry = dir_obj.browse(cr, uid, dir_id, context)
            path = ''
            if client == 'unix':
                path = mount + diry.name + '/' + top.name + '/'
            elif client == 'win':
                path = mount + diry.name + '\\' + top.name + '\\'
            elif client == 'web':
                data_pool = self.pool.get('ir.model.data')
                aid = data_pool._get_id(cr, uid, 'document_ftp',
                                        'action_document_browse')
                aid = data_pool.browse(cr, uid, aid, context=context).res_id
                ftp_url = self.pool.get('ir.actions.url').browse(
                    cr, uid, aid, context=context)
                url = ftp_url.url and ftp_url.url.split('ftp://') or []
                if url:
                    url = url[1]
                    if url[-1] == '/':
                        url = url[:-1]
                else:
                    url = '%s:%s' % (ftpserver.HOST, ftpserver.PORT)
                path = 'ftp://%s@%s' % (
                    user.login, url) + '/' + diry.name + '/' + top.name + '/'
            res[top.id] = path
        return res

    _columns = {
        'name':
        fields.char('Reference', size=64, select=True, readonly=True),
        'partner_id':
        fields.many2one('res.partner',
                        'Tenant',
                        select=True,
                        required=True,
                        domain=[('real_state_type', '=', 'tenant')]),
        'signing_date':
        fields.date('Signing date'),
        'start_date':
        fields.date('Start date', required=True),
        'end_date':
        fields.date('End date', required=True),
        'rent_price':
        fields.float('Rent Price'),
        'notes':
        fields.text('Notes'),
        'top_id':
        fields.many2one('real.state.top',
                        'Top',
                        required=True,
                        ondelete='cascade',
                        select=True),
        'owner_id':
        fields.many2one('res.partner',
                        'Owner',
                        select=True,
                        domain=[('real_state_type', '=', 'owner')]),
        'rent_attachments_url':
        fields.function(_get_mount_point,
                        method=True,
                        store=False,
                        type='char',
                        size=1024,
                        string='Attachments URL'),
    }
    _order = 'start_date'

    def create(self, cr, uid, vals, context=None):
        if context is None:
            context = {}
        vals['name'] = self.pool.get('ir.sequence').get(
            cr, uid, 'rental.agreement')
        res = super(rental_agreement, self).create(cr, uid, vals, context)
        return res

    def onchange_top_id(self, cr, uid, ids, top_id):
        if not top_id:
            return {}
        top = self.pool.get('real.state.top').browse(cr, uid, top_id)
        value = {'owner_id': top.partner_id.id}

        return {'value': value}
class psd_amc_sale_customer_search_wizard(osv.osv_memory):
    _name = "psd.amc.sale.customer.search.wizard"
    _description = "AMC Sale Order Customer Search"

    _columns = {
        'name':
        fields.char('Customer/Company Name', size=100),
        'contact_no':
        fields.char('Contact No', size=12),
        'order_no':
        fields.char('Order No', size=100),
        'invoice_no':
        fields.char('Invoice No', size=32),
        #############Search View according to the next changes
        'flat_no':
        fields.char('Flat No', size=100),
        'building_name':
        fields.char('Building Name', size=100),
        'sub_area':
        fields.char('Sub Area', size=100),
        'street':
        fields.char('Street', size=100),
        'landmark':
        fields.char('Landmark', size=100),
        'pincode':
        fields.char('Pincode', size=100),
        'psd_amc_sale_customer_search_line':
        fields.one2many('psd.amc.sale.customer.request.line',
                        'product_req_customer_search_id', 'Customers'),
    }

    def default_get(self, cr, uid, fields, context=None):
        customer_line_ids = []
        if context is None: context = {}
        res = super(psd_amc_sale_customer_search_wizard,
                    self).default_get(cr, uid, fields, context=context)
        active_ids = context.get('active_ids')
        if active_ids:
            active_id = active_ids[0]
            product_req_obj = self.pool.get('amc.sale.order')
            partner_obj = self.pool.get('res.partner')
            psd_customer_request_line = self.pool.get(
                'psd.amc.sale.customer.request.line')
            customer = product_req_obj.browse(cr, uid, active_id).customer_name
            customer_ids = partner_obj.search(cr,
                                              uid,
                                              [('name', 'ilike', customer)],
                                              context=context)
            res_create_id = self.create(cr, uid, {'name': customer})
            for customer_id in customer_ids:
                addrs_items = []
                address = ''
                partner = partner_obj.browse(cr, uid, customer_id)
                if partner.apartment not in [' ', False, None]:
                    addrs_items.append(partner.apartment)
                if partner.building not in [' ', False, None]:
                    addrs_items.append(partner.building)
                if partner.sub_area not in [' ', False, None]:
                    addrs_items.append(partner.sub_area)
                if partner.landmark not in [' ', False, None]:
                    addrs_items.append(partner.landmark)
                if partner.street not in [' ', False, None]:
                    addrs_items.append(partner.street)
                if partner.city_id:
                    addrs_items.append(partner.city_id.name1)
                if partner.district:
                    addrs_items.append(partner.district.name)
                if partner.tehsil:
                    addrs_items.append(partner.tehsil.name)
                if partner.state_id:
                    addrs_items.append(partner.state_id.name)
                if partner.zip not in [' ', False, None]:
                    addrs_items.append(partner.zip)
                if len(addrs_items) > 0:
                    last_item = addrs_items[-1]
                    for item in addrs_items:
                        if item != last_item:
                            address = address + item + ',' + ' '
                        if item == last_item:
                            address = address + item
                customer_line_id = ({
                    'customer_name':
                    partner.name,
                    'complete_address':
                    address,
                    'contact_person':
                    partner.contact_name,
                    'partner_id':
                    partner.id,
                    'contact_no':
                    partner.phone_many2one.number,
                })
                customer_line_ids.append(customer_line_id)
        picking_ids = context.get('active_ids', [])
        if not picking_ids or (not context.get('active_model') == 'amc.sale.order') \
            or len(picking_ids) != 1:
            # Partial Picking Processing may only be done for one picking at a time
            return res
        picking_id, = picking_ids
        if 'name' in fields:
            picking = self.pool.get('amc.sale.order').browse(cr,
                                                             uid,
                                                             picking_id,
                                                             context=context)
            res.update(name=picking.customer_name)
        if 'psd_amc_sale_customer_search_line' in fields:
            picking = self.pool.get(
                'psd.amc.sale.customer.request.line').browse(cr,
                                                             uid,
                                                             picking_id,
                                                             context=context)
            moves = [
                self._partial_move_for(cr, uid, m) for m in customer_line_ids
            ]
            res.update(psd_amc_sale_customer_search_line=moves)
        return res

    def _partial_move_for(self, cr, uid, move):
        customer_name = move.get('customer_name')
        complete_address = move.get('complete_address')
        contact_person = move.get('contact_person')
        partner_id = move.get('partner_id')
        contact_no = move.get('contact_no')
        partial_move = {
            'name': customer_name,
            'cust_address': complete_address,
            'contact_name': contact_person,
            'partner_id': partner_id,
            'contact_no': contact_no,
        }
        return partial_move

    def product_request_search_customer(self, cr, uid, ids, context=None):
        partner_obj = self.pool.get('res.partner')
        display_ids = []
        customer_search_data = self.browse(cr, uid, ids[0])
        loc_line_ids = []
        locations_line_ids = self.browse(
            cr, uid, ids[0]).psd_amc_sale_customer_search_line
        if locations_line_ids:
            for locations_line_id in locations_line_ids:
                loc_line_ids.append(locations_line_id.id)
        self.pool.get('psd.amc.sale.customer.request.line').unlink(
            cr, uid, loc_line_ids, context=context)
        partner_obj = self.pool.get('res.partner')
        psd_customer_request_line = self.pool.get(
            'psd.amc.sale.customer.request.line')
        customer = self.browse(cr, uid, ids[0]).name
        customer_ids = partner_obj.search(cr,
                                          uid, [('name', 'ilike', customer)],
                                          context=context)
        for customer_id in customer_ids:
            addrs_items = []
            address = ''
            partner = partner_obj.browse(cr, uid, customer_id)
            if partner.apartment:
                addrs_items.append(partner.apartment)
            if partner.building:
                addrs_items.append(partner.building)
            if partner.sub_area:
                addrs_items.append(partner.sub_area)
            if partner.landmark:
                addrs_items.append(partner.landmark)
            if partner.street:
                addrs_items.append(partner.street)
            if partner.city_id:
                addrs_items.append(partner.city_id.name1)
            if partner.district:
                addrs_items.append(partner.district.name)
            if partner.tehsil:
                addrs_items.append(partner.tehsil.name)
            if partner.state_id:
                addrs_items.append(partner.state_id.name)
            if partner.zip:
                addrs_items.append(partner.zip)
            if addrs_items:
                last_item = addrs_items[-1]
                for item in addrs_items:
                    if item != last_item:
                        address = address + item + ',' + ' '
                    if item == last_item:
                        address = address + item

            self.pool.get('psd.amc.sale.customer.request.line').create(
                cr, uid, {
                    'name': partner.name,
                    'cust_address': address,
                    'contact_name': partner.contact_name,
                    'partner_id': partner.id,
                    'contact_no': partner.phone_many2one.number,
                    'product_req_customer_search_id': ids[0]
                })
        return True

    def select_searched_customer(self, cr, uid, ids, context=None):
        active_id = context.get('active_id', False)
        product_req_obj = self.pool.get('amc.sale.order')
        partner_obj = self.pool.get('res.partner')
        line_obj = self.pool.get('psd.amc.sale.customer.request.line')
        res = line_obj.search(cr, uid,
                              [('product_req_customer_search_id', '=', ids[0]),
                               ('select_cust', '=', True)])
        if len(res) == 0:
            raise osv.except_osv(_('Warning!'),
                                 _("Please select one customer!"))
        if len(res) > 1:
            raise osv.except_osv(_('Warning!'),
                                 _("Multiple selection not allowed!"))
        customer_id = line_obj.browse(cr, uid, res[0],
                                      context=context).partner_id
        customer_search = partner_obj.search(cr,
                                             uid, [('id', '=', customer_id)],
                                             context=context)
        customer = partner_obj.browse(cr,
                                      uid,
                                      customer_search[0],
                                      context=context)

        if customer:
            product_req_obj.write(
                cr, uid, active_id, {
                    'customer_name': customer.name,
                    'partner_id': customer.id,
                    'customer_id': customer.ou_id,
                    'customer_type': 'existing',
                    'title': customer.title,
                    'contact_person':
                    customer.first_name + ' ' + customer.last_name,
                    'last_name': customer.last_name,
                    'middle_name': customer.middle_name,
                    'designation': customer.designation,
                    'premise_type': customer.premise_type,
                    'building': customer.building,
                    'location_name': customer.location_name,
                    'apartment': customer.apartment,
                    'sub_area': customer.sub_area,
                    'street': customer.street,
                    'tehsil': customer.tehsil.id,
                    'landmark': customer.landmark,
                    'state_id': customer.state_id.id,
                    'city_id': customer.city_id.id,
                    'district': customer.district.id,
                    'fax': customer.fax,
                    'ref_by': customer.ref_by.id,
                    'phone_many2one': customer.phone_many2one.id,
                    'zip': customer.zip,
                    'email': customer.email,
                })
        return {'type': 'ir.actions.act_window_close'}

    def clear_customer(self, cr, uid, ids, context=None):
        self.write(cr, uid, ids, {
            'name': None,
            'contact_no': None,
            'invoice_no': None,
            'address': None
        })
Exemplo n.º 13
0
class crm_segmentation(osv.osv):
    '''
        A segmentation is a tool to automatically assign categories on partners.
        These assignations are based on criterions.
    '''
    _name = "crm.segmentation"
    _description = "Partner Segmentation"
    _columns = {
        'name': fields.char('Name', size=64, required=True, help='The name of the segmentation.'),
        'description': fields.text('Description'),
        'categ_id': fields.many2one('res.partner.category', 'Partner Category', required=True, help='The partner category that will be added to partners that match the segmentation criterions after computation.'),
        'exclusif': fields.boolean('Exclusive', help='Check if the category is limited to partners that match the segmentation criterions. If checked, remove the category from partners that doesn\'t match segmentation criterions'),
        'state': fields.selection([('not running','Not Running'),('running','Running')], 'Execution Status', readonly=True),
        'partner_id': fields.integer('Max Partner ID processed'),
        'segmentation_line': fields.one2many('crm.segmentation.line', 'segmentation_id', 'Criteria', required=True),
        'som_interval': fields.integer('Days per Periode', help="A period is the average number of days between two cycle of sale or purchase for this segmentation. It's mainly used to detect if a partner has not purchased or buy for a too long time, so we suppose that his state of mind has decreased because he probably bought goods to another supplier. Use this functionality for recurring businesses."),
        'som_interval_max': fields.integer('Max Interval', help="The computation is made on all events that occured during this interval, the past X periods."),
        'som_interval_decrease': fields.float('Decrease (0>1)', help="If the partner has not purchased (or bought) during a period, decrease the state of mind by this factor. It\'s a multiplication"),
        'som_interval_default': fields.float('Default (0=None)', help="Default state of mind for period preceeding the 'Max Interval' computation. This is the starting state of mind by default if the partner has no event."),
        'sales_purchase_active': fields.boolean('Use The Sales Purchase Rules', help='Check if you want to use this tab as part of the segmentation rule. If not checked, the criteria beneath will be ignored')
    }
    _defaults = {
        'partner_id': lambda *a: 0,
        'state': lambda *a: 'not running',
        'som_interval_max': lambda *a: 3,
        'som_interval_decrease': lambda *a: 0.8,
        'som_interval_default': lambda *a: 0.5
    }

    def process_continue(self, cr, uid, ids, start=False):
        categs = self.read(cr,uid,ids,['categ_id','exclusif','partner_id', 'sales_purchase_active', 'profiling_active'])
        for categ in categs:
            if start:
                if categ['exclusif']:
                    cr.execute('delete from res_partner_category_rel where category_id=%s', (categ['categ_id'][0],))

            id = categ['id']

            cr.execute('select id from res_partner order by id ')
            partners = [x[0] for x in cr.fetchall()]

            if categ['sales_purchase_active']:
                to_remove_list=[]
                cr.execute('select id from crm_segmentation_line where segmentation_id=%s', (id,))
                line_ids = [x[0] for x in cr.fetchall()]

                for pid in partners:
                    if (not self.pool.get('crm.segmentation.line').test(cr, uid, line_ids, pid)):
                        to_remove_list.append(pid)
                for pid in to_remove_list:
                    partners.remove(pid)

            for partner_id in partners:
                cr.execute('insert into res_partner_category_rel (category_id,partner_id) values (%s,%s)', (categ['categ_id'][0],partner_id))
            cr.commit()

            self.write(cr, uid, [id], {'state':'not running', 'partner_id':0})
            cr.commit()
        return True

    def process_stop(self, cr, uid, ids, *args):
        return self.write(cr, uid, ids, {'state':'not running', 'partner_id':0})

    def process_start(self, cr, uid, ids, *args):
        self.write(cr, uid, ids, {'state':'running', 'partner_id':0})
        return self.process_continue(cr, uid, ids, start=True)
Exemplo n.º 14
0
class building_accident(osv.osv):
    def create(self, cr, user, vals, context=None):
        """
        Create new entry sequence for every building accident Record
        @param cr: cursor to database
        @param user: id of current user
        @param vals: list of record to be process
        @param context: context arguments, like lang, time zone
        @return: return a result 
      	"""
        if ('name' not in vals) or (vals.get('name') == '/'):
            vals['name'] = self.pool.get('ir.sequence').get(
                cr, user, 'building.accident')
        return super(building_accident, self).create(cr, user, vals, context)

    def copy(self, cr, uid, id, default=None, context=None):
        """ Override copy function to edit sequence """
        if default is None:
            default = {}
        if context is None:
            context = {}
        default.update({
            'name':
            self.pool.get('ir.sequence').get(cr, uid, 'building.accident'),
        })
        return super(building_accident, self).copy(cr, uid, id, default,
                                                   context)

    CATEGORY_SELECTION = [
        ('car', 'Cars'),
        ('building', 'Building '),
        ('station', 'Station'),
        ('other', 'Other'),
    ]

    STATE_SELECTION = [
        ('draft', 'Draft'),
        ('section', 'Waiting for service section manager to confirm '),
        ('approve', 'Waiting for Insurance section manager to confirm '),
        ('done', 'Done'),
        ('cancel', 'Cancel'),
    ]

    _name = "building.accident"
    _columns = {
        'name':
        fields.char(
            'Reference',
            size=64,
            required=True,
            select=True,
            readonly=True,
            help=
            "unique number of the building accident,computed automatically when occasion services record is created"
        ),
        'date':
        fields.date('Date', readonly=True),
        'accident_date':
        fields.date('Accident Date',
                    required=True,
                    states={'done': [('readonly', True)]}),
        'building_id':
        fields.many2one('building.manager',
                        'Building',
                        states={'done': [('readonly', True)]}),
        'station_id':
        fields.many2one('building.manager',
                        'Station',
                        states={'done': [('readonly', True)]}),
        'car_id':
        fields.many2one('fleet.vehicles',
                        'Car',
                        states={'done': [('readonly', True)]}),
        'station_company_id':
        fields.related('station_id',
                       'company_id',
                       type='many2one',
                       relation='res.company',
                       store=True,
                       string='Station Company',
                       readonly=True),
        'building_company_id':
        fields.related('building_id',
                       'company_id',
                       type='many2one',
                       relation='res.company',
                       store=True,
                       string='Building Company',
                       readonly=True),
        'car_department_id':
        fields.related('car_id',
                       'department_id',
                       type='many2one',
                       relation='hr.department',
                       store=True,
                       string='Department',
                       readonly=True),
        'accident_type_id':
        fields.many2one('accident.type',
                        'Accident Type',
                        required=True,
                        states={'done': [('readonly', True)]}),
        'user_id':
        fields.many2one(
            'res.users',
            'Responsible',
            readonly=True,
        ),
        'accident_desc':
        fields.text('Accident Description',
                    size=256,
                    states={'done': [('readonly', True)]}),
        'maintenance_desc':
        fields.text('Maintenance Description',
                    size=256,
                    states={'done': [('readonly', True)]}),
        'company_id':
        fields.many2one('res.company', 'Company', required=True,
                        readonly=True),
        'accident_category':
        fields.selection(CATEGORY_SELECTION,
                         'Category',
                         select=True,
                         states={'done': [('readonly', True)]}),
        'accident_location':
        fields.char('Accident Location',
                    size=128,
                    states={'done': [('readonly', True)]}),
        'estimated_cost':
        fields.float('Estimated Cost',
                     size=64,
                     states={'done': [('readonly', True)]}),
        'notify_insurance_date':
        fields.date('Insurance Date',
                    help="This is the date you notify The Insurance Company",
                    states={'done': [('readonly', True)]}),
        'notify_workshop_date':
        fields.date('Workshop Date',
                    help="This is the date you notify The maintance Workshop",
                    states={'done': [('readonly', True)]}),
        'coverage_date':
        fields.date('Coverage Date', states={'done': [('readonly', True)]}),
        'repayment_cost':
        fields.float('Repayment Cost',
                     size=64,
                     states={'done': [('readonly', True)]}),
        'partner_id':
        fields.many2one('res.partner',
                        'Partner',
                        states={'done': [('readonly', True)]}),
        'state':
        fields.selection(STATE_SELECTION, 'State', readonly=True, select=True),
    }
    _sql_constraints = [
        ('accident_name_uniq', 'unique(name)',
         'Building Accident Reference must be unique !'),
    ]
    _defaults = {
        'name':
        lambda self, cr, uid, context: '/',
        'user_id':
        lambda self, cr, uid, context: uid,
        'date':
        lambda *a: time.strftime('%Y-%m-%d'),
        'state':
        'draft',
        'accident_category':
        'car',
        'company_id':
        lambda self, cr, uid, c: self.pool.get('res.users').browse(
            cr, uid, uid, context=c).company_id.id,
    }

    def section(self, cr, uid, ids, context=None):
        self.write(cr, uid, ids, {'state': 'section'})
        return True

    def approve(self, cr, uid, ids, context=None):
        self.write(cr, uid, ids, {'state': 'approve'})
        return True

    def done(self, cr, uid, ids, context=None):
        self.write(cr, uid, ids, {'state': 'done'}, context=context)
        return True

    def cancel(self, cr, uid, ids, context=None):
        # Cancel Building Accident
        #if not notes:
        #        notes = ""
        #        u = self.pool.get('res.users').browse(cr, uid,uid).name
        #        notes = notes +'\n'+'Building Accident Cancelled at : '+time.strftime('%Y-%m-%d') + ' by '+ u
        self.write(cr, uid, ids, {'state': 'cancel'})
        return True

    def ir_action_cancel_draft(self, cr, uid, ids, context=None):
        # Reset the Building Accident
        if not len(ids):
            return False
        wf_service = netsvc.LocalService("workflow")
        for id in ids:
            self.write(cr, uid, id, {'state': 'draft'})
            wf_service.trg_delete(uid, 'building.accident', id, cr)
            wf_service.trg_create(uid, 'building.accident', id, cr)
        return True

    def unlink(self, cr, uid, ids, context=None):
        """delete the Building Accident record,
        and create log message to the deleted record
        @return: res,
        """
        buliding_accedint = self.read(cr, uid, ids, ['state'], context=context)
        unlink_ids = []
        for t in buliding_accedint:
            if t['state'] in ['draft', 'cancel']:
                unlink_ids.append(t['id'])
            else:
                raise osv.except_osv(
                    _('Invalid action !'),
                    _('In order to delete a building accident record, you must first cancel it, or in draft state .'
                      ))
        for id in unlink_ids:
            buliding_accedint_name = self.browse(cr, uid, id,
                                                 context=context).name
            message = _("Buliding Accident '%s' has been deleted."
                        ) % buliding_accedint_name
            self.log(cr, uid, id, message)
        return super(building_accident, self).unlink(cr,
                                                     uid,
                                                     unlink_ids,
                                                     context=context)
Exemplo n.º 15
0
class magento_web(osv.osv):
    _name = 'magento.web'
    _description = 'Magento Web'
    _columns = {
        'magento_flag':
        fields.boolean(
            'Magento web flag',
            help="The Magento active web must have this box checked."),
        'magento_name':
        fields.char('Magento web name', size=64),
        'magento_url':
        fields.char('Magento Url',
                    size=64,
                    help="URL to Magento shop ending with /"),
        'api_user':
        fields.char('Magento Api User', size=64),
        'api_pwd':
        fields.char('Magento Api Password', size=64),
        'auto_update':
        fields.boolean(
            'Auto update products and categories',
            help=
            "If auto update is checked, when you create, modify or delete products and categories in OpenERP, they are automatically created, modified or deleted in Magento. Also, if a existing product or category in OpenERP is checked as exportable, it is created in Magento. And when is unchecked as exportable, it is deleted in Magento."
        ),
    }

    def _constraint_unique(self, cr, uid, ids):
        web = self.pool.get('magento.web').search(cr, uid, [])
        if len(web) > 1:
            return False
        else:
            return True

    _constraints = [
        (_constraint_unique,
         _('Error: The module has been designed for only one Magento Web.'),
         [])
    ]

    # for lack of a better place to put this
    def createOrders(self, cr, uid, sale_order_array):
        import netsvc
        import magento_utils
        logger = netsvc.Logger()
        logger.notifyChannel(_("Magento Import"), netsvc.LOG_INFO,
                             "createOrders")

        utils = magento_utils.magento_utils()
        results = utils.createOrders(cr, uid, sale_order_array)

        return results

    #Magento Connection
    def connect(self, cr, uid, ids, datas={}, context={}):
        import xmlrpclib
        import netsvc
        connect_logger = netsvc.Logger()

        try:
            magento_id = self.pool.get('magento.web').search(
                cr, uid, [('magento_flag', '=', True)])
            if len(magento_id) > 1:
                raise osv.except_osv(
                    _('UserError'),
                    _('You must have only one shop with Magento flag turned on'
                      ))
            else:
                magento_web = self.pool.get('magento.web').browse(
                    cr, uid, magento_id[0])
                server = xmlrpclib.ServerProxy("%sindex.php/api/xmlrpc" %
                                               magento_web.magento_url)

        except Exception, error:
            raise osv.except_osv(
                _("UserError"),
                _("You must have a declared website with a valid URL, a Magento username and password"
                  ))
            connect_logger.notifyChannel(_("Magento Connect"),
                                         netsvc.LOG_ERROR,
                                         _("Error : %s") % error)

        try:
            session = server.login(magento_web.api_user, magento_web.api_pwd)

        except xmlrpclib.Fault, error:
            raise osv.except_osv(_("MagentoError"),
                                 _("Magento returned %s") % error)
Exemplo n.º 16
0
class document_storage(osv.osv):
    """ The primary object for data storage.
    Each instance of this object is a storage media, in which our application
    can store contents. The object here controls the behaviour of the storage
    media.
    The referring document.directory-ies will control the placement of data
    into the storage.
    
    It is a bad idea to have multiple document.storage objects pointing to
    the same tree of filesystem storage.
    """
    _name = 'document.storage'
    _description = 'Storage Media'
    _doclog = logging.getLogger('document')

    _columns = {
        'name':
        fields.char('Name', size=64, required=True, select=1),
        'write_date':
        fields.datetime('Date Modified', readonly=True),
        'write_uid':
        fields.many2one('res.users', 'Last Modification User', readonly=True),
        'create_date':
        fields.datetime('Date Created', readonly=True),
        'create_uid':
        fields.many2one('res.users', 'Creator', readonly=True),
        'user_id':
        fields.many2one('res.users', 'Owner'),
        'group_ids':
        fields.many2many('res.groups', 'document_storage_group_rel', 'item_id',
                         'group_id', 'Groups'),
        'dir_ids':
        fields.one2many('document.directory', 'parent_id', 'Directories'),
        'type':
        fields.selection([
            ('db', 'Database'),
            ('filestore', 'Internal File storage'),
            ('realstore', 'External file storage'),
        ],
                         'Type',
                         required=True),
        'path':
        fields.char('Path',
                    size=250,
                    select=1,
                    help="For file storage, the root path of the storage"),
        'online':
        fields.boolean(
            'Online',
            help=
            "If not checked, media is currently offline and its contents not available",
            required=True),
        'readonly':
        fields.boolean('Read Only', help="If set, media is for reading only"),
    }

    def _get_rootpath(self, cr, uid, context=None):
        return os.path.join(DMS_ROOT_PATH, cr.dbname)

    _defaults = {
        'user_id': lambda self, cr, uid, ctx: uid,
        'online': lambda *args: True,
        'readonly': lambda *args: False,
        # Note: the defaults below should only be used ONCE for the default
        # storage media. All other times, we should create different paths at least.
        'type': lambda *args: 'filestore',
        'path': _get_rootpath,
    }
    _sql_constraints = [
        # SQL note: a path = NULL doesn't have to be unique.
        ('path_uniq', 'UNIQUE(type,path)', "The storage path must be unique!")
    ]

    def __get_random_fname(self, path):
        flag = None
        # This can be improved
        if os.path.isdir(path):
            for dirs in os.listdir(path):
                if os.path.isdir(os.path.join(path, dirs)) and len(
                        os.listdir(os.path.join(path, dirs))) < 4000:
                    flag = dirs
                    break
        flag = flag or create_directory(path)
        filename = random_name()
        return os.path.join(flag, filename)

    def __prepare_realpath(self,
                           cr,
                           file_node,
                           ira,
                           store_path,
                           do_create=True):
        """ Cleanup path for realstore, create dirs if needed
        
            @param file_node  the node
            @param ira    ir.attachment browse of the file_node
            @param store_path the path of the parent storage object, list
            @param do_create  create the directories, if needed
            
            @return tuple(path "/var/filestore/real/dir/", npath ['dir','fname.ext'] )
        """
        file_node.fix_ppath(cr, ira)
        npath = file_node.full_path() or []
        # npath may contain empty elements, for root directory etc.
        npath = filter(lambda x: x is not None, npath)

        # if self._debug:
        #     self._doclog.debug('Npath: %s', npath)
        for n in npath:
            if n == '..':
                raise ValueError("Invalid '..' element in path")
            for ch in (
                    '*',
                    '|',
                    "\\",
                    '/',
                    ':',
                    '"',
                    '<',
                    '>',
                    '?',
            ):
                if ch in n:
                    raise ValueError("Invalid char %s in path %s" % (ch, n))
        dpath = [
            store_path,
        ]
        dpath += npath[:-1]
        path = os.path.join(*dpath)
        if not os.path.isdir(path):
            self._doclog.debug("Create dirs: %s", path)
            os.makedirs(path)
        return path, npath

    def get_data(self, cr, uid, id, file_node, context=None, fil_obj=None):
        """ retrieve the contents of some file_node having storage_id = id
            optionally, fil_obj could point to the browse object of the file
            (ir.attachment)
        """
        boo = self.browse(cr, uid, id, context=context)
        if not boo.online:
            raise IOError(errno.EREMOTE, 'medium offline')

        if fil_obj:
            ira = fil_obj
        else:
            ira = self.pool.get('ir.attachment').browse(cr,
                                                        uid,
                                                        file_node.file_id,
                                                        context=context)
        return self.__get_data_3(cr, uid, boo, ira, context)

    def get_file(self, cr, uid, id, file_node, mode, context=None):
        """ Return a file-like object for the contents of some node
        """
        if context is None:
            context = {}
        boo = self.browse(cr, uid, id, context=context)
        if not boo.online:
            raise IOError(errno.EREMOTE, 'medium offline')

        if boo.readonly and mode not in ('r', 'rb'):
            raise IOError(errno.EPERM, "Readonly medium")

        ira = self.pool.get('ir.attachment').browse(cr,
                                                    uid,
                                                    file_node.file_id,
                                                    context=context)
        if boo.type == 'filestore':
            if not ira.store_fname:
                # On a migrated db, some files may have the wrong storage type
                # try to fix their directory.
                if mode in ('r', 'r+'):
                    if ira.file_size:
                        self._doclog.warning(
                            "ir.attachment #%d does not have a filename, but is at filestore, fix it!"
                            % ira.id)
                    raise IOError(errno.ENOENT, 'No file can be located')
                else:
                    store_fname = self.__get_random_fname(boo.path)
                    cr.execute(
                        'UPDATE ir_attachment SET store_fname = %s WHERE id = %s',
                        (store_fname, ira.id))
                    fpath = os.path.join(boo.path, store_fname)
            else:
                fpath = os.path.join(boo.path, ira.store_fname)
            return nodefd_file(file_node, path=fpath, mode=mode)

        elif boo.type == 'db':
            # TODO: we need a better api for large files
            return nodefd_db(file_node, ira_browse=ira, mode=mode)

        elif boo.type == 'db64':
            return nodefd_db64(file_node, ira_browse=ira, mode=mode)

        elif boo.type == 'realstore':
            path, npath = self.__prepare_realpath(cr,
                                                  file_node,
                                                  ira,
                                                  boo.path,
                                                  do_create=(mode[0]
                                                             in ('w', 'a')))
            fpath = os.path.join(path, npath[-1])
            if (not os.path.exists(fpath)) and mode[0] == 'r':
                raise IOError("File not found: %s" % fpath)
            elif mode[0] in ('w', 'a') and not ira.store_fname:
                store_fname = os.path.join(*npath)
                cr.execute(
                    'UPDATE ir_attachment SET store_fname = %s WHERE id = %s',
                    (store_fname, ira.id))
            return nodefd_file(file_node, path=fpath, mode=mode)

        elif boo.type == 'virtual':
            raise ValueError('Virtual storage does not support static files')

        else:
            raise TypeError("No %s storage" % boo.type)

    def __get_data_3(self, cr, uid, boo, ira, context):
        if boo.type == 'filestore':
            if not ira.store_fname:
                # On a migrated db, some files may have the wrong storage type
                # try to fix their directory.
                if ira.file_size:
                    # carlo try to use database
                    cr.execute(
                        'SELECT db_datas FROM ir_attachment WHERE id = %s',
                        (ira.id, ))
                    res = cr.fetchone()
                    if res:
                        return res[0]
                self._doclog.warning(
                    "ir.attachment #%d does not have a filename, but is at filestore, fix it!"
                    % ira.id)
                return None
            fpath = os.path.join(boo.path, ira.store_fname)
            return file(fpath, 'rb').read()
        elif boo.type == 'db64':
            # TODO: we need a better api for large files
            if ira.db_datas:
                out = base64.decodestring(ira.db_datas)
            else:
                out = ''
            return out
        elif boo.type == 'db':
            # We do an explicit query, to avoid type transformations.
            cr.execute('SELECT db_datas FROM ir_attachment WHERE id = %s',
                       (ira.id, ))
            res = cr.fetchone()
            if res:
                return res[0]
            else:
                return ''
        elif boo.type == 'realstore':
            if not ira.store_fname:
                # On a migrated db, some files may have the wrong storage type
                # try to fix their directory.
                if ira.file_size:
                    self._doclog.warning(
                        "ir.attachment #%d does not have a filename, trying the name."
                        % ira.id)
                # sfname = ira.name
            fpath = os.path.join(boo.path, ira.store_fname or ira.name)
            if os.path.exists(fpath):
                return file(fpath, 'rb').read()
            elif not ira.store_fname:
                return None
            else:
                raise IOError(errno.ENOENT, "File not found: %s" % fpath)

        elif boo.type == 'virtual':
            raise ValueError('Virtual storage does not support static files')

        else:
            raise TypeError("No %s storage" % boo.type)

    def set_data(self,
                 cr,
                 uid,
                 id,
                 file_node,
                 data,
                 context=None,
                 fil_obj=None):
        """ store the data.
            This function MUST be used from an ir.attachment. It wouldn't make sense
            to store things persistently for other types (dynamic).
        """
        boo = self.browse(cr, uid, id, context=context)
        if fil_obj:
            ira = fil_obj
        else:
            ira = self.pool.get('ir.attachment').browse(cr,
                                                        uid,
                                                        file_node.file_id,
                                                        context=context)

        if not boo.online:
            raise IOError(errno.EREMOTE, 'medium offline')

        if boo.readonly:
            raise IOError(errno.EPERM, "Readonly medium")

        self._doclog.debug("Store data for ir.attachment #%d" % ira.id)
        store_fname = None
        fname = None
        if boo.type == 'filestore':
            path = boo.path
            try:
                store_fname = self.__get_random_fname(path)
                fname = os.path.join(path, store_fname)
                fp = open(fname, 'wb')
                try:
                    fp.write(data)
                finally:
                    fp.close()
                self._doclog.debug("Saved data to %s" % fname)
                filesize = len(data)  # os.stat(fname).st_size

                # TODO Here, an old file would be left hanging.

            except Exception, e:
                self._doclog.warning("Couldn't save data to %s",
                                     path,
                                     exc_info=True)
                raise except_orm(_('Error!'), str(e))
        elif boo.type == 'db':
            filesize = len(data)
            # will that work for huge data?
            out = psycopg2.Binary(data)
            cr.execute('UPDATE ir_attachment SET db_datas = %s WHERE id = %s',
                       (out, file_node.file_id))
                #logger.notifyChannel('bank_reference',netsvc.LOG_DEBUG,'Using finnish domestic reference as a root for RF number')

            #if myCompany.country_id.code in ('FI', 'RF_fi', 'fi'):
            #    cs = 98 - int(prefix) % 97
            #    if cs < 10:
            #        res = "RF0%s%s" % (cs,prefix)
            #    else:
            #        res = "RF%s%s" % (cs,prefix)
            
            #cs = 98 - int(prefix) % 97
            #if cs < 10:
            #    res = "RF0%s%s" % (cs,prefix)
            #else:
            #    res = "RF%s%s" % (cs,prefix)
            #self.write(cursor, user, ids, {'finref':res})
            reslist[inv.id] = res
        
        
        #logger.notifyChannel('bank_reference',netsvc.LOG_DEBUG,'reslist: %s' % reslist)
        return reslist

    _columns = {
        'bank_reference': fields.function(_reference, method=True, type='char',store=True, string='Bank reference'),
        'finref': fields.char('Finnish Reference', required=False)
    }
    
    #_defaults = {
    #    'finref': lambda self,ids: get_ref_number(self, ids),
    #}
account_invoice()
Exemplo n.º 18
0
class acount_invoice(osv.osv):
    _inherit = 'account.invoice'
    _columns = {
        'faktur_pajak_no':
        fields.char('No Faktur Pajak', size=20, required=True),
        'state':
        fields.selection(
            [
                ('draft', 'Draft'),
                ('submited',
                 'Submited'),  #NEW FLOW TO SUBMIT TO VALIDATE THE INVOICE
                ('proforma', 'Pro-forma'),
                ('proforma2', 'Pro-forma'),
                ('open', 'Open'),
                ('paid', 'Paid'),
                ('cancel', 'Cancelled'),
            ],
            'Status',
            select=True,
            readonly=True,
            track_visibility='onchange',
            help=
            ' * The \'Draft\' status is used when a user is encoding a new and unconfirmed Invoice. \
			\n* The \'Pro-forma\' when invoice is in Pro-forma status,invoice does not have an invoice number. \
			\n* The \'Open\' status is used when user create invoice,a invoice number is generated.Its in open status till user does not pay invoice. \
			\n* The \'Paid\' status is set automatically when the invoice is paid. Its related journal entries may or may not be reconciled. \
			\n* The \'Cancelled\' status is used when user cancel invoice.'),
    }

    def draft_submited(self, cr, uid, ids, context={}):
        for s in self.browse(cr, uid, ids, context=context):
            if s.state != 'submited':
                raise osv.except_osv(
                    _('Error'),
                    _('Tidak bisa merubah status menjadi draft karena status sudah validate!'
                      ))

        self.write(cr, uid, ids, {'state': 'draft'})

        return True

    def submit_to_validate(self, cr, uid, ids, context={}):
        # res = {}
        res = False
        for d in self.browse(cr, uid, ids, context=context):
            if d.faktur_pajak_no != '000.000-00.00000000' and d.faktur_pajak_no != '0000000000000000':
                num = d.faktur_pajak_no.split('.')
                fp = num[2]
                # search same number
                sameFP = self.search(cr, uid, [('faktur_pajak_no', 'like', fp),
                                               ('state', '!=', 'cancel'),
                                               ('id', '!=', d.id),
                                               ('type', '=', 'out_invoice')])
                # print "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",sameFP
                if len(sameFP) > 0:
                    # if exist
                    res = False
                    browseAllSame = self.browse(cr,
                                                uid,
                                                sameFP,
                                                context=context)
                    errSame = [str(bs.id) for bs in browseAllSame]
                    # print "==============================",errSame
                    raise osv.except_osv(
                        _('Error'),
                        _('Nomor Faktur Pajak Sudah dipakai, tidak bisa menggunakan nomor faktur lebih dari 1 kali, Jika Nomor tersebut diganti silahkan cancel terlebih dahulu invoice yang lama\r\n.'
                          + ',\r\n'.join(errSame)))
                    res = False
                else:
                    res = True

            else:
                if (d.faktur_pajak_no != '000.000-00.00000000'):
                    raise osv.except_osv(_('Error'),
                                         _('Cek nomor Faktur Pajak!'))
                    res = False
                else:
                    res = True

            # check submit only from draft
            if d.state != 'draft':
                res = False
                raise osv.except_osv(
                    _('Error'),
                    _("Hanya bisa mensubmit dokumen yang status awalnya adalah Draft"
                      ))

        if res:
            self.write(cr, uid, d.id, {'state': 'submited'}, context=context)
        return True

    def efak_invoices_export(self, cr, uid, ids, context={}):
        print ids, "Context Invoice E-Faktur Export----------------------", context
        if context is None:
            context = {}

        if 'active_model' in context and context[
                'active_model'] == 'sale.advance.payment.inv':
            ids = ids
        else:
            try:
                ids = context['active_ids']
                # print 'active_ids===============',ids
            except:
                ids = ids

            # print 'IDSSSSSSSSSSSSSSSS=========================+++++++',ids

        searchConf = self.pool.get('ir.config_parameter').search(
            cr, uid, [('key', '=', 'base.print')], context=context)
        browseConf = self.pool.get('ir.config_parameter').browse(
            cr, uid, searchConf, context=context)[0]
        urlTo = str(browseConf.value) + "service/get-invoices-csv&ids=" + str(
            ','.join(map(str, ids))) + "&uid=" + str(uid)
        for browse in self.browse(cr, uid, ids):
            if browse.partner_id.npwp == '11111111111111111111':
                raise osv.except_osv(
                    _('Error!'),
                    _('NPWP ' + browse.partner_id.name + ' = ' +
                      browse.partner_id.npwp +
                      '\r\n\r\nTolong Update NPWP terlebih dahulu untuk export data. Jika Customer ini tidak mempunyai NPWP atau merupakan Customer Perorangan maka Update NPWP menjadi 00.000.000.0-000.000'
                      ))
            elif browse.partner_id.npwp == False:
                raise osv.except_osv(
                    _('Error'),
                    _('NPWP ' + browse.partner_id.name +
                      ' kosong.\r\n\r\nHarus diisi..!!!'))

        return {
            'type': 'ir.actions.client',
            'target': 'new',
            'tag': 'print.out',
            'params': {
                'redir': urlTo
            },
        }

    def efak_invoice_data(self, cr, uid, ids, context={}):

        faktur_data = []
        res = False
        outp = StringIO()
        # sw = csv.writer(outp,delimiter=',',quotechar='"')
        tax_obj = self.pool.get('account.tax')
        cur_obj = self.pool.get('res.currency')
        for inv in self.browse(cr, uid, ids, context):
            cur = inv.currency_id

            KD_JENIS_TRANSAKSI = '01'
            FG_PENGGANTI = '0'
            NOMOR_FAKTUR = inv.kwitansi
            date_invoice = datetime.datetime.strptime(inv.date_invoice,
                                                      '%Y-%m-%d')
            MASA_PAJAK = date_invoice.month
            TAHUN_PAJAK = date_invoice.year
            TANGGAL_FAKTUR = datetime.datetime.strftime(
                date_invoice, '%d/%m/%Y')
            NPWP = inv.partner_id.parent_id.npwp or inv.partner_id.npwp
            NAMA = inv.partner_id.parent_id.name or inv.partner_id.name
            ALAMAT_LENGKAP = inv.partner_id.street
            JUMLAH_DPP = inv.amount_total
            JUMLAH_PPN = inv.amount_tax
            JUMLAH_PPNBM = 0
            ID_KETERANGAN_TAMBAHAN = ""
            FG_UANG_MUKA = "0"
            UANG_MUKA_DPP = "0"
            UANG_MUKA_PPN = "0"
            UANG_MUKA_PPNBM = "0"
            REFERENSI = inv.comment

            faktur_data.append([
                'FK', KD_JENIS_TRANSAKSI, FG_PENGGANTI, NOMOR_FAKTUR,
                MASA_PAJAK, TAHUN_PAJAK, TANGGAL_FAKTUR, NPWP, NAMA,
                ALAMAT_LENGKAP, JUMLAH_DPP, JUMLAH_PPN, JUMLAH_PPNBM,
                ID_KETERANGAN_TAMBAHAN, FG_UANG_MUKA, UANG_MUKA_DPP,
                UANG_MUKA_PPN, UANG_MUKA_PPNBM, REFERENSI
            ])
            # CUSTOMER / SUPLIER DATA
            partner = inv.partner_id.parent_id or inv.partner_id

            faktur_data.append([
                "FAPR", "PT. SUPRABAKRI MANDIRI",
                "Jl. Danau Sunter Utara Blok A No. 9 Tanjung Priok - Jakarta Utara",
                "", "", "", "", ""
            ])

            # LOOP EACH INVOICE ITEM

            for item in inv.invoice_line:

                ppn_total = 0
                tax_compute = tax_obj.compute_all(
                    cr, uid, item.invoice_line_tax_id,
                    (item.price_unit * (1 - (item.discount or 0.0) / 100.0)),
                    item.quantity, item.product_id, inv.partner_id)['taxes'][0]
                # print tax_compute
                faktur_data.append([
                    "OF", item.product_id.default_code, item.name,
                    item.price_unit, item.quantity, item.price_subtotal,
                    inv.total_discount, item.price_subtotal,
                    tax_compute['amount'], "0", "0.0"
                ])
            # sw.writerows(faktur_data)
        # outp.seek(0)
        # data = outp.read()
        # outp.close()
        return faktur_data
Exemplo n.º 19
0
                args = eval(act.args)
                args.insert(0, self.pool.get(act.model.model).search(cr, uid, []))
                act.args=str(args)
            self._callback(cr, uid, act.model.model, act.name, act.args)
        ###################################
        warning = self.make_warning_message()
        res_text = '\n'
        for i in sorted(self.imported_records):
            res_text+=i+': '+str(len(self.imported_records[i]))+'\n'
        self.imported_records.clear()
        self.warning_text = []
        self.write(cr, uid, self_id, {'log':warning+res_text,'state':'done'})
        return

    _columns = {
        'name':fields.char('Name', size=64),
        'date': fields.date('Date', required=True),
        'import_model_ids':fields.many2many('migration.import_models', 'schedule_models_rel', 'schedule_id', 'import_model_id', 'Import Models'),
        'actions_ids': fields.many2many('migration.model_actions', 'schedule_actions_rel', 'schedule_id', 'action_id', 'Actions'),
        'state':fields.selection([('ready','Ready'),('running','Running'),('error','Error'),('done','Done'),('stop','Stopped')], 'State'),
        'log': fields.text('Log'),
        'print_log':fields.boolean('Print Log to Console'),
        'cron_id':fields.many2one('ir.cron', 'Scheduler', readonly=True),
                
    }
    _defaults = {
        'date': lambda *a: time.strftime('%Y-%m-%d'),
        'state': lambda *a: 'ready',
    }

    def set_start(self, cr, uid, ids, context={}):
Exemplo n.º 20
0
class fiscal_summary(osv.osv):

    _name = 'fiscal.summary'

    _description = ''

    ##-------------------------------------------------------------------------

    def default_get(self, cr, uid, fields, context=None):
        data = super(fiscal_summary, self).default_get(cr, uid, fields,
                                                       context)
        if data.get('date'):
            # ~ obj_per = self.pool.get('account.period')
            dt = datetime.strptime(data['date'], '%Y-%m-%d')
            date_from = (dt - relativedelta(weeks=1))
            date_start = date_from.strftime('%Y-%m-%d')
            date_end = (date_from + relativedelta(days=6)).strftime('%Y-%m-%d')

            data.update({'date_start': date_start, 'date_end': date_end})
            data.update(
                self._get_fiscal_book_ids(cr, uid, date_start, date_end))
        return data

    def name_get(self, cr, uid, ids, context):
        ids = isinstance(ids, (int, long)) and [ids] or ids
        res = []
        for item in self.browse(cr, uid, ids, context={}):
            res.append((item.id, '%s - %s' % (item.date_start, item.date_end)))
        return res

    ##------------------------------------------------------- _internal methods

    def _get_fiscal_book_ids(self, cr, uid, date_start, date_end):
        res = {}
        if not date_start or not date_end:
            return res
        obj_fb = self.pool.get('fiscal.book')
        # ~ obj_per = self.pool.get('account.period')
        # ~ per_brw = obj_per.browse(cr, uid, period_id, context=None)
        dt = datetime.strptime(date_start, '%Y-%m-%d')
        prior_date = (dt - timedelta(days=1)).strftime('%Y-%m-%d')
        fb_ids = False
        # ~ prior_period_id = obj_per.find(cr, uid, prior_date)[0]
        if prior_date:
            prior_summary_id = self.search(cr, uid,
                                           [('date_end', '=', prior_date)])
            if prior_summary_id:
                res.update({'prior_summary_id': prior_summary_id[0]})
                prior_summary = self.browse(cr,
                                            uid,
                                            prior_summary_id[0],
                                            context=None)
                fb_ids = obj_fb.search(
                    cr, uid, [('date_start', '=', prior_summary.date_start),
                              ('date_end', '=', prior_summary.date_end)])
            if fb_ids and len(fb_ids) == 2:
                for fb in obj_fb.browse(cr, uid, fb_ids, context=None):
                    if fb.type == 'purchase':
                        res.update({'fb_purchase_id': fb.id})
                    elif fb.type == 'sale':
                        res.update({'fb_sale_id': fb.id})
        return res

    def _clear_lines(self, cr, uid, ids, context):
        ids = isinstance(ids, (int, long)) and [ids] or ids
        unlink_ids = []
        for item in self.browse(cr, uid, ids, context={}):
            for l in item.line_ids:
                unlink_ids.append((2, l.id))
            self.write(cr, uid, ids, {'line_ids': unlink_ids}, context=context)
        return True

    def _get_summary_key_value(self, cr, uid, summary_lines, line_key,
                               context):
        #~ summary_lines is a dict with lines values
        #~ like: {key :{'base': x, 'tax': x}}
        return summary_lines.get(line_key, {'amount_base': 0, 'amount_tax': 0})

    def _get_summary_lines(self, cr, uid, sum_brw, context):
        res = {}
        if sum_brw:
            for line in sum_brw.line_ids:
                res.update({
                    line.sequence: {
                        'id': line.id,
                        'amount_base': line.amount_base,
                        'amount_tax': line.amount_tax
                    }
                })
        return res

    def _get_sale_book_key(self, sale_totals, line_key):
        res = {'amount_base': 0, 'amount_tax': 0}
        if line_key == 10100:
            res['amount_base'] = sale_totals['exe']['base']
            res['amount_tax'] = sale_totals['exe']['tax']
        elif line_key == 10200:
            res['amount_base'] = sale_totals['exp']['base']
            res['amount_tax'] = sale_totals['exp']['tax']
        elif line_key == 10300:
            res['amount_base'] = sale_totals['int']['gen']['base']
            res['amount_tax'] = sale_totals['int']['gen']['tax']
        elif line_key == 10400:
            res['amount_base'] = sale_totals['int']['adi']['base']
            res['amount_tax'] = sale_totals['int']['adi']['tax']
        elif line_key == 10500:
            res['amount_base'] = sale_totals['int']['red']['base']
            res['amount_tax'] = sale_totals['int']['red']['tax']
        #~ Moved by seniat to 30850
        elif line_key == 10700:
            res['amount_base'] = sale_totals['aju']['base']
            res['amount_tax'] = sale_totals['aju']['tax']
        elif line_key == 30800:
            res['amount_base'] = sale_totals['ret']['base']
            res['amount_tax'] = sale_totals['ret']['tax']
        elif line_key == 30850:
            res['amount_base'] = sale_totals['aj2']['base']
            res['amount_tax'] = sale_totals['aj2']['tax']
        return res

    def _get_purchase_book_key(self, purchase_totals, line_key):
        res = {'amount_base': 0, 'amount_tax': 0}
        if line_key == 20100:
            res['amount_base'] = purchase_totals['exe']['base']
            res['amount_tax'] = purchase_totals['exe']['tax']
        elif line_key == 20200:
            res['amount_base'] = purchase_totals['im']['gen']['base']
            res['amount_tax'] = purchase_totals['im']['gen']['tax']
        elif line_key == 20300:
            res['amount_base'] = purchase_totals['im']['adi']['base']
            res['amount_tax'] = purchase_totals['im']['adi']['tax']
        elif line_key == 20400:
            res['amount_base'] = purchase_totals['im']['red']['base']
            res['amount_tax'] = purchase_totals['im']['red']['tax']
        elif line_key == 20500:
            res['amount_base'] = purchase_totals['do']['gen']['base']
            res['amount_tax'] = purchase_totals['do']['gen']['tax']
        elif line_key == 20600:
            res['amount_base'] = purchase_totals['do']['adi']['base']
            res['amount_tax'] = purchase_totals['do']['adi']['tax']
        elif line_key == 20700:
            res['amount_base'] = purchase_totals['do']['red']['base']
            res['amount_tax'] = purchase_totals['do']['red']['tax']
        elif line_key == 21500:
            res['amount_base'] = purchase_totals['aju']['base']
            res['amount_tax'] = purchase_totals['aju']['tax']
        return res

    def _compute_summary_totals(self, cr, uid, summary_lines, context):
        def sum_lines(summary_lines, keys, amounts=('base', 'tax')):
            data = {'amount_base': 0, 'amount_tax': 0}
            for key in keys:
                if 'base' in amounts:
                    data['amount_base'] += summary_lines[key]['amount_base']
                if 'tax' in amounts:
                    data['amount_tax'] += summary_lines[key]['amount_tax']
            return data

        for key in __summary_total_lines__:
            summary_lines[key].update({'amount_base': 0, 'amount_tax': 0})

        summary_lines[10600].update(
            sum_lines(summary_lines, [10100, 10200, 10300, 10400, 10500]))
        summary_lines[10900].update(
            sum_lines(summary_lines, [10600, 10700, 10800], amounts=('tax')))
        summary_lines[20800].update(
            sum_lines(summary_lines,
                      [20100, 20200, 20300, 20400, 20500, 20600, 20700]))
        summary_lines[20900].update(
            sum_lines(summary_lines, [20800], amounts=('tax')))
        summary_lines[21100].update(
            sum_lines(summary_lines, [20900, 21000], amounts=('tax')))
        summary_lines[21700].update(
            sum_lines(summary_lines,
                      [21100, 21200, 21300, 21400, 21500, 21600],
                      amounts=('tax')))
        if summary_lines[10900]['amount_tax'] > \
                summary_lines[21700]['amount_tax']:
            summary_lines[30100]['amount_tax'] = (
                summary_lines[10900]['amount_tax'] -
                summary_lines[21700]['amount_tax'])
        else:
            summary_lines[30200]['amount_tax'] = (
                summary_lines[21700]['amount_tax'] -
                summary_lines[10900]['amount_tax'])
        summary_lines[30600]['amount_tax'] = (
            summary_lines[30100]['amount_tax'] -
            summary_lines[30300]['amount_base'] -
            summary_lines[30400]['amount_base'] +
            summary_lines[30450]['amount_base'] -
            summary_lines[30500]['amount_base'])
        summary_lines[31100].update(
            sum_lines(summary_lines, [30700, 30800, 30850, 30900, 31000]))
        if summary_lines[30600]['amount_tax']:
            summary_lines[31200]['amount_tax'] = \
                summary_lines[30600]['amount_tax'] if \
                summary_lines[30600]['amount_tax'] <= \
                summary_lines[31100]['amount_base'] else \
                summary_lines[31100]['amount_base']
        summary_lines[31300]['amount_base'] = (
            summary_lines[31100]['amount_base'] -
            summary_lines[31200]['amount_tax'])
        summary_lines[31400]['amount_tax'] = (
            summary_lines[30600]['amount_tax'] -
            summary_lines[31200]['amount_tax'])
        summary_lines[31900].update(
            sum_lines(summary_lines, [31500, 31500, 31600, 31700, 31800]))
        if summary_lines[31400]['amount_tax']:
            summary_lines[32000]['amount_tax'] = \
                summary_lines[31400]['amount_tax'] if \
                summary_lines[31400]['amount_tax'] <= \
                summary_lines[31900]['amount_base'] else \
                summary_lines[31900]['amount_base']
        summary_lines[32100]['amount_base'] = (
            summary_lines[31900]['amount_base'] -
            summary_lines[32000]['amount_tax'])
        summary_lines[32200]['amount_tax'] = (
            summary_lines[31400]['amount_tax'] -
            summary_lines[32000]['amount_tax'])
        return summary_lines

    ##--------------------------------------------------------- function fields

    _rec_name = 'date_start, date_end'

    _order = 'date_end desc'

    _columns = {
        'company_id':
        fields.many2one('res.company',
                        'Company',
                        required=True,
                        readonly=True,
                        ondelete='restrict'),
        'prior_summary_id':
        fields.many2one('fiscal.summary',
                        'Prior summary book',
                        readonly=False,
                        required=False,
                        help='Show prior fiscal summary book',
                        ondelete='restrict'),
        'date':
        fields.date('Date dec.',
                    required=True,
                    readonly=True,
                    states={'draft': [('readonly', False)]},
                    select=True,
                    help="Date when document was declared to SENIAT"),
        'period_id':
        fields.many2one('account.period',
                        'Period',
                        required=False,
                        readonly=True,
                        states={'draft': [('readonly', False)]},
                        ondelete='restrict'),
        'date_start':
        fields.date(  # Set required in view
            'Date from',
            readonly=True,
            states={'draft': [('readonly', False)]}),
        'date_end':
        fields.date(  # Set required in view
            'Date to',
            readonly=True,
            states={'draft': [('readonly', False)]}),
        'fb_purchase_id':
        fields.many2one('fiscal.book',
                        'Fiscal Purchase Book',
                        readonly=True,
                        required=False,
                        help='Show Fiscal Purchase Book',
                        ondelete='restrict'),
        'fb_sale_id':
        fields.many2one('fiscal.book',
                        'Fiscal Sale Book',
                        readonly=True,
                        required=False,
                        help='Show Fiscal Sale Book',
                        ondelete='restrict'),
        'line_ids':
        fields.one2many(
            'fiscal.summary.lines',
            'line_id',
            'Fiscal summary lines',
            readonly=True,
            states={'draft': [('readonly', False)]},
        ),
        'move_id':
        fields.many2one('account.move',
                        'Accounting entries',
                        ondelete='restrict',
                        help="The move of this entry line.",
                        select=True,
                        readonly=False),
        'state':
        fields.selection([('draft', 'Draft'), ('done', 'Done'),
                          ('cancel', 'Cancelled')],
                         string='State',
                         required=True,
                         readonly=True),
        'narration':
        fields.text('Notes', readonly=False),
        'number':
        fields.char('Number',
                    size=64,
                    required=False,
                    readonly=True,
                    states={'draft': [('readonly', False)]},
                    help="Declaration number"),
        'certificate':
        fields.char('Certificate',
                    size=64,
                    required=False,
                    readonly=True,
                    states={'draft': [('readonly', False)]},
                    help="Certificate number"),
    }

    _defaults = {
        'date':
        lambda *a: time.strftime('%Y-%m-%d'),
        'state':
        lambda *a: 'draft',
        'company_id':
        lambda self, cr, uid, c: self.pool.get('res.company').
        _company_default_get(cr, uid, self._name, context=c),
    }

    _sql_constraints = [
        ('period_uniq', 'UNIQUE(date_end)', 'The period must be unique!'),
    ]

    ##-------------------------------------------------------------------------

    ##---------------------------------------------------------- public methods

    def get_line_values(self, cr, uid, ids, params, context=None):
        ids = isinstance(ids, (int, long)) and [ids] or ids
        context = context or {}
        line_key = params.get('line_key')
        sum_brw = params.get('sum_brw')
        #~ Get values in sale book
        if line_key in __sale_book_keys__:
            return self._get_sale_book_key(params['sale_totals'], line_key)
        #~ Get values in purchase book
        elif line_key in __purchase_book_keys__:
            return self._get_purchase_book_key(params['purchase_totals'],
                                               line_key)
        #~ Load values from prior summary
        elif line_key in __prior_sumary_key_relations__ and \
                sum_brw.prior_summary_id:
            old_key = __prior_sumary_key_relations__[line_key]
            return self._get_summary_key_value(cr, uid,
                                               params['old_summary_lines'],
                                               old_key, context)
        else:
            return {'amount_base': 0, 'amount_tax': 0}

    ##-------------------------------------------------------- buttons (object)

    def button_load(self, cr, uid, ids, context=None):
        obj_fb = self.pool.get('fiscal.book')
        for item in self.browse(cr, uid, ids, context={}):
            self._clear_lines(cr, uid, [item.id], context)
            lines = []
            sale_totals = obj_fb._compute_sale_book_totals(
                cr, uid, item.fb_sale_id, context)
            purchase_totals = obj_fb._compute_purchase_book_totals(
                cr, uid, item.fb_purchase_id, context)
            params = {
                'sum_brw':
                item,
                'sale_totals':
                sale_totals[0],
                'purchase_totals':
                purchase_totals[0],
                'old_summary_lines':
                self._get_summary_lines(cr, uid, item.prior_summary_id,
                                        context)
            }
            for key in __summary_book_1__:
                data = {
                    'name': __summary_book_lines__.get(key, ''),
                    'sequence': key,
                }
                params['line_key'] = key
                data.update(self.get_line_values(cr, uid, ids, params,
                                                 context))
                lines.append((0, 0, data))
            self.write(cr, uid, [item.id], {'line_ids': lines}, context)
            self.button_compute(cr, uid, ids, context)
        return True

    def button_compute(self, cr, uid, ids, context=None):
        #~ Compute summary totals. Any computed value must be
        #~ added to __summary_total_lines__ tuple
        ids = isinstance(ids, (int, long)) and [ids] or ids
        obj_lin = self.pool.get('fiscal.summary.lines')
        for item in self.browse(cr, uid, ids, context={}):
            summary_lines = self._get_summary_lines(cr, uid, item, context)
            summary_lines = self._compute_summary_totals(
                cr, uid, summary_lines, context)
            for key in __summary_total_lines__:
                data = summary_lines.get(key)
                id = data.pop('id')
                obj_lin.write(cr, uid, [id], data, context=context)
        return True

    ##------------------------------------------------------------ on_change...

    def on_change_period_id(self, cr, uid, ids, date_start, date_end):
        res = {'fb_purchase_id': 0, 'fb_sale_id': 0}
        if date_start and date_end:
            res.update(self._get_fiscal_book_ids(cr, uid, date_start,
                                                 date_end))
        return {'value': res}

    ##----------------------------------------------------- create write unlink

    def create(self, cr, uid, vals, context=None):
        # ~ if vals.get('period_id'):
        # ~ vals.update(self._get_fiscal_book_ids(cr, uid, vals['period_id']))
        res = super(fiscal_summary, self).create(cr, uid, vals, context)
        return res

    def write(self, cr, uid, ids, vals, context=None):
        # ~ if vals.get('period_id'):
        # ~ vals.update(
        # ~ self._get_fiscal_book_ids(cr, uid, vals['period_id']) or
        # ~ {'fb_purchase_id': 0, 'fb_sale_id': 0})
        res = super(fiscal_summary, self).write(cr, uid, ids, vals, context)
        return res

    ##---------------------------------------------------------------- Workflow

    def button_draft(self, cr, uid, ids, context=None):
        vals = {'state': 'draft'}
        return self.write(cr, uid, ids, vals, context)

    def button_done(self, cr, uid, ids, context=None):
        vals = {'state': 'done'}
        return self.write(cr, uid, ids, vals, context)

    def button_cancel(self, cr, uid, ids, context=None):
        vals = {'state': 'cancel'}
        return self.write(cr, uid, ids, vals, context)

    def test_draft(self, cr, uid, ids, *args):
        return True

    def test_done(self, cr, uid, ids, *args):
        for item in self.browse(cr, uid, ids, context={}):
            if not item.line_ids:
                raise osv.except_osv(_('Error!'),
                                     _('You must load some lines'))
            elif len(item.line_ids) != len(__summary_book_lines__):
                raise osv.except_osv(
                    _('Error!'),
                    _('Invalid lines, please reload summary lines'))
            if item.fb_purchase_id and item.fb_purchase_id.state != 'done':
                raise osv.except_osv(_('Error!'),
                                     _('You must set "Done" purchases book'))
            if item.fb_sale_id and item.fb_sale_id.state != 'done':
                raise osv.except_osv(_('Error!'),
                                     _('You must set "Done" sales book'))
        return True

    def test_cancel(self, cr, uid, ids, *args):
        return True
Exemplo n.º 21
0
                                input_model[ir_model_field.name]

                record_dict['fields'] = field_dict
                data_json.append(record_dict)

        out = base64.encodestring(json.dumps(data_json, indent=4))

        return self.write(cr, uid, ids, {
            'state': 'done', 'dm_export_data_wizard_data': out,
            'name': json_exported_file_name
        }, context=context)

    EXPORT_TYPE = (('a', 'based on model'), ('b', 'based on module'))

    _columns = {
        'name': fields.char('Filename', size=128, readonly=True),
        'dm_export_data_wizard_type': fields.selection(EXPORT_TYPE,
            'DM Export Data Wizard Type'),
        'ir_model_id': fields.many2one('ir.model', 'IR Model'),
        'ir_module_module_id': fields.many2one('ir.module.module',
                                               'IR Module Module'),
        'dm_export_data_wizard_data': fields.binary('Export Data Wizard Data',
                                                 readonly=True),
        'state': fields.selection([('init', 'init'), ('done', 'done')],
                                  'state', readonly=True),
    }

    _defaults = {'state': 'init', 'dm_export_data_wizard_type': 'a'}

DmExportDataWizard()
Exemplo n.º 22
0
class document_file(osv.osv):
    _inherit = 'ir.attachment'
    _rec_name = 'datas_fname'

    def _attach_parent_id(self, cr, uid, ids=None, context=None):
        """Migrate ir.attachments to the document module.

        When the 'document' module is loaded on a db that has had plain attachments,
        they will need to be attached to some parent folder, and be converted from
        base64-in-bytea to raw-in-bytea format.
        This function performs the internal migration, once and forever, for these
        attachments. It cannot be done through the nominal ORM maintenance code,
        because the root folder is only created after the document_data.xml file
        is loaded.
        It also establishes the parent_id NOT NULL constraint that ir.attachment
        should have had (but would have failed if plain attachments contained null
        values).
        It also updates the  File Size for the previously created attachments.
        """

        parent_id = self.pool.get('document.directory')._get_root_directory(
            cr, uid)
        if not parent_id:
            logging.getLogger('document').warning(
                "at _attach_parent_id(), still not able to set the parent!")
            return False

        if ids is not None:
            raise NotImplementedError(
                "Ids is just there by convention! Don't use it yet, please.")

        cr.execute("UPDATE ir_attachment " \
                    "SET parent_id = %s, db_datas = decode(encode(db_datas,'escape'), 'base64') " \
                    "WHERE parent_id IS NULL", (parent_id,))

        cr.execute(
            "UPDATE ir_attachment SET file_size=length(db_datas) WHERE file_size = 0 and type = 'binary'"
        )

        cr.execute("ALTER TABLE ir_attachment ALTER parent_id SET NOT NULL")

        return True

    def _get_filestore(self, cr):
        return os.path.join(DMS_ROOT_PATH, cr.dbname)

    def _data_get(self, cr, uid, ids, name, arg, context=None):
        if context is None:
            context = {}
        fbrl = self.browse(cr, uid, ids, context=context)
        nctx = nodes.get_node_context(cr, uid, context={})
        # nctx will /not/ inherit the caller's context. Most of
        # it would be useless, anyway (like active_id, active_model,
        # bin_size etc.)
        result = {}
        bin_size = context.get('bin_size', False)
        for fbro in fbrl:
            fnode = nodes.node_file(None, None, nctx, fbro)
            if not bin_size:
                data = fnode.get_data(cr, fbro)
                result[fbro.id] = base64.encodestring(data or '')
            else:
                result[fbro.id] = fnode.get_data_len(cr, fbro)

        return result

    #
    # This code can be improved
    #
    def _data_set(self, cr, uid, id, name, value, arg, context=None):
        if not value:
            return True
        fbro = self.browse(cr, uid, id, context=context)
        nctx = nodes.get_node_context(cr, uid, context={})
        fnode = nodes.node_file(None, None, nctx, fbro)
        res = fnode.set_data(cr, base64.decodestring(value), fbro)
        return res

    _columns = {
        # Columns from ir.attachment:
        'create_date':
        fields.datetime('Date Created', readonly=True),
        'create_uid':
        fields.many2one('res.users', 'Creator', readonly=True),
        'write_date':
        fields.datetime('Date Modified', readonly=True),
        'write_uid':
        fields.many2one('res.users', 'Last Modification User', readonly=True),
        'res_model':
        fields.char('Attached Model',
                    size=64,
                    readonly=True,
                    change_default=True),
        'res_id':
        fields.integer('Attached ID', readonly=True),

        # If ir.attachment contained any data before document is installed, preserve
        # the data, don't drop the column!
        'db_datas':
        fields.binary('Data', oldname='datas'),
        'datas':
        fields.function(_data_get,
                        fnct_inv=_data_set,
                        string='File Content',
                        type="binary",
                        nodrop=True),

        # Fields of document:
        'user_id':
        fields.many2one('res.users', 'Owner', select=1),
        # 'group_ids': fields.many2many('res.groups', 'document_group_rel', 'item_id', 'group_id', 'Groups'),
        # the directory id now is mandatory. It can still be computed automatically.
        'parent_id':
        fields.many2one('document.directory',
                        'Directory',
                        select=1,
                        required=True,
                        change_default=True),
        'index_content':
        fields.text('Indexed Content'),
        'partner_id':
        fields.many2one('res.partner', 'Partner', select=1),
        'file_size':
        fields.integer('File Size', required=True),
        'file_type':
        fields.char('Content Type', size=128),

        # fields used for file storage
        'store_fname':
        fields.char('Stored Filename', size=200),
    }
    _order = "id desc"

    def __get_def_directory(self, cr, uid, context=None):
        dirobj = self.pool.get('document.directory')
        return dirobj._get_root_directory(cr, uid, context)

    _defaults = {
        'user_id': lambda self, cr, uid, ctx: uid,
        'file_size': lambda self, cr, uid, ctx: 0,
        'parent_id': __get_def_directory
    }
    _sql_constraints = [
        # filename_uniq is not possible in pure SQL
    ]

    def _check_duplication(self, cr, uid, vals, ids=[], op='create'):
        name = vals.get('name', False)
        parent_id = vals.get('parent_id', False)
        res_model = vals.get('res_model', False)
        res_id = vals.get('res_id', 0)
        if op == 'write':
            for file in self.browse(cr, uid, ids):  # FIXME fields_only
                if not name:
                    name = file.name
                if not parent_id:
                    parent_id = file.parent_id and file.parent_id.id or False
                if not res_model:
                    res_model = file.res_model and file.res_model or False
                if not res_id:
                    res_id = file.res_id and file.res_id or 0
                res = self.search(cr, uid, [('id', '<>', file.id),
                                            ('name', '=', name),
                                            ('parent_id', '=', parent_id),
                                            ('res_model', '=', res_model),
                                            ('res_id', '=', res_id)])
                if len(res):
                    return False
        if op == 'create':
            res = self.search(cr, uid, [('name', '=', name),
                                        ('parent_id', '=', parent_id),
                                        ('res_id', '=', res_id),
                                        ('res_model', '=', res_model)])
            if len(res):
                return False
        return True

    def check(self, cr, uid, ids, mode, context=None, values=None):
        """Check access wrt. res_model, relax the rule of ir.attachment parent

        With 'document' installed, everybody will have access to attachments of
        any resources they can *read*.
        """
        return super(document_file, self).check(cr,
                                                uid,
                                                ids,
                                                mode='read',
                                                context=context,
                                                values=values)

    def search(self,
               cr,
               uid,
               args,
               offset=0,
               limit=None,
               order=None,
               context=None,
               count=False):
        # Grab ids, bypassing 'count'
        ids = super(document_file, self).search(cr,
                                                uid,
                                                args,
                                                offset=offset,
                                                limit=limit,
                                                order=order,
                                                context=context,
                                                count=False)
        if not ids:
            return 0 if count else []

        # Work with a set, as list.remove() is prohibitive for large lists of documents
        # (takes 20+ seconds on a db with 100k docs during search_count()!)
        orig_ids = ids
        ids = set(ids)

        # Filter out documents that are in directories that the user is not allowed to read.
        # Must use pure SQL to avoid access rules exceptions (we want to remove the records,
        # not fail), and the records have been filtered in parent's search() anyway.
        cr.execute(
            'SELECT id, parent_id from "%s" WHERE id in %%s' % self._table,
            (tuple(ids), ))
        doc_pairs = cr.fetchall()
        parent_ids = set(zip(*doc_pairs)[1])
        visible_parent_ids = self.pool.get('document.directory').search(
            cr, uid, [('id', 'in', list(parent_ids))])
        disallowed_parents = parent_ids.difference(visible_parent_ids)
        for doc_id, parent_id in doc_pairs:
            if parent_id in disallowed_parents:
                ids.remove(doc_id)

        # sort result according to the original sort ordering
        result = [id for id in orig_ids if id in ids]
        return len(result) if count else result

    def copy(self, cr, uid, id, default=None, context=None):
        if not default:
            default = {}
        if 'name' not in default:
            name = self.read(cr, uid, [id], ['name'])[0]['name']
            default.update({'name': name + " " + _("(copy)")})
        return super(document_file, self).copy(cr,
                                               uid,
                                               id,
                                               default,
                                               context=context)

    def write(self, cr, uid, ids, vals, context=None):
        result = False
        if not isinstance(ids, list):
            ids = [ids]
        res = self.search(cr, uid, [('id', 'in', ids)])
        if not len(res):
            return False
        if not self._check_duplication(cr, uid, vals, ids, 'write'):
            raise osv.except_osv(_('ValidateError'),
                                 _('File name must be unique!'))

        # if nodes call this write(), they must skip the code below
        from_node = context and context.get('__from_node', False)
        if (('parent_id' in vals) or ('name' in vals)) and not from_node:
            # perhaps this file is renaming or changing directory
            nctx = nodes.get_node_context(cr, uid, context={})
            dirobj = self.pool.get('document.directory')
            if 'parent_id' in vals:
                dbro = dirobj.browse(cr,
                                     uid,
                                     vals['parent_id'],
                                     context=context)
                dnode = nctx.get_dir_node(cr, dbro)
            else:
                dbro = None
                dnode = None
            ids2 = []
            for fbro in self.browse(cr, uid, ids, context=context):
                if ('parent_id' not in vals or fbro.parent_id.id == vals['parent_id']) \
                    and ('name' not in vals or fbro.name == vals['name']):
                    ids2.append(fbro.id)
                    continue
                fnode = nctx.get_file_node(cr, fbro)
                res = fnode.move_to(cr, dnode or fnode.parent,
                                    vals.get('name', fbro.name), fbro, dbro,
                                    True)
                if isinstance(res, dict):
                    vals2 = vals.copy()
                    vals2.update(res)
                    wid = res.get('id', fbro.id)
                    result = super(document_file, self).write(cr,
                                                              uid,
                                                              wid,
                                                              vals2,
                                                              context=context)
                    # TODO: how to handle/merge several results?
                elif res == True:
                    ids2.append(fbro.id)
                elif res == False:
                    pass
            ids = ids2
        if 'file_size' in vals:  # only write that field using direct SQL calls
            del vals['file_size']
        if ids and vals:
            result = super(document_file, self).write(cr,
                                                      uid,
                                                      ids,
                                                      vals,
                                                      context=context)
        return result

    def create(self, cr, uid, vals, context=None):
        if context is None:
            context = {}
        vals['parent_id'] = context.get('parent_id', False) or vals.get(
            'parent_id', False)
        if not vals['parent_id']:
            vals['parent_id'] = self.pool.get(
                'document.directory')._get_root_directory(cr, uid, context)
        if not vals.get('res_id', False) and context.get(
                'default_res_id', False):
            vals['res_id'] = context.get('default_res_id', False)
        if not vals.get('res_model', False) and context.get(
                'default_res_model', False):
            vals['res_model'] = context.get('default_res_model', False)
        if vals.get('res_id', False) and vals.get('res_model', False) \
                and not vals.get('partner_id', False):
            vals['partner_id'] = self.__get_partner_id(cr, uid, \
                vals['res_model'], vals['res_id'], context)

        datas = None
        if vals.get('link', False):
            import urllib
            datas = base64.encodestring(urllib.urlopen(vals['link']).read())
        else:
            datas = vals.get('datas', False)

        if datas:
            vals['file_size'] = len(datas)
        else:
            if vals.get('file_size'):
                del vals['file_size']
        result = self._check_duplication(cr, uid, vals)
        if not result:
            domain = [
                ('res_id', '=', vals['res_id']),
                ('res_model', '=', vals['res_model']),
                ('datas_fname', '=', vals['datas_fname']),
            ]
            attach_ids = self.search(cr, uid, domain, context=context)
            super(document_file, self).write(cr,
                                             uid,
                                             attach_ids,
                                             {'datas': vals['datas']},
                                             context=context)
            result = attach_ids[0]
        else:
            #raise osv.except_osv(_('ValidateError'), _('File name must be unique!'))
            result = super(document_file, self).create(cr, uid, vals, context)
        return result

    def __get_partner_id(self, cr, uid, res_model, res_id, context=None):
        """ A helper to retrieve the associated partner from any res_model+id
            It is a hack that will try to discover if the mentioned record is
            clearly associated with a partner record.
        """
        obj_model = self.pool.get(res_model)
        if obj_model._name == 'res.partner':
            return res_id
        elif 'partner_id' in obj_model._columns and obj_model._columns[
                'partner_id']._obj == 'res.partner':
            bro = obj_model.browse(cr, uid, res_id, context=context)
            return bro.partner_id.id
        elif 'address_id' in obj_model._columns and obj_model._columns[
                'address_id']._obj == 'res.partner.address':
            bro = obj_model.browse(cr, uid, res_id, context=context)
            return bro.address_id.partner_id.id
        return False

    def unlink(self, cr, uid, ids, context=None):
        stor = self.pool.get('document.storage')
        unres = []
        # We have to do the unlink in 2 stages: prepare a list of actual
        # files to be unlinked, update the db (safer to do first, can be
        # rolled back) and then unlink the files. The list wouldn't exist
        # after we discard the objects
        ids = self.search(cr, uid, [('id', 'in', ids)])
        for f in self.browse(cr, uid, ids, context=context):
            # TODO: update the node cache
            par = f.parent_id
            storage_id = None
            while par:
                if par.storage_id:
                    storage_id = par.storage_id
                    break
                par = par.parent_id
            #assert storage_id, "Strange, found file #%s w/o storage!" % f.id #TOCHECK: after run yml, it's fail
            if storage_id:
                r = stor.prepare_unlink(cr, uid, storage_id, f)
                if r:
                    unres.append(r)
            else:
                logging.getLogger('document').warning(
                    "Unlinking attachment #%s %s that has no storage", f.id,
                    f.name)
        res = super(document_file, self).unlink(cr, uid, ids, context)
        stor.do_unlink(cr, uid, unres)
        return res
Exemplo n.º 23
0
        ofile = open(full_path, 'w')
        try:
            ofile.write(base64.decodestring(b64_file))
        finally:
            ofile.close()
        return True

    def _set_image(self, cr, uid, id, name, value, arg, context=None):
        local_media_repository = self.pool.get('res.company').get_local_media_repository(cr, uid, context=context)
        if local_media_repository:
            image = self.browse(cr, uid, id, context=context)
            return self._save_file(os.path.join(local_media_repository, image.product_id.default_code), '%s%s'%(image.name, image.extention), value)
        return self.write(cr, uid, id, {'file_db_store' : value}, context=context)

    _columns = {
        'name':fields.char('Image Title', size=100, required=True),
        'extention': fields.char('file extention', size=6),
        'link':fields.boolean('Link?', help="Images can be linked from files on your file system or remote (Preferred)"),
        'file_db_store':fields.binary('Image stored in database'),
        'file':fields.function(_get_image, fnct_inv=_set_image, type="binary", method=True, filters='*.png,*.jpg,*.gif'),
        'url':fields.char('File Location', size=250),
        'comments':fields.text('Comments'),
        'product_id':fields.many2one('product.product', 'Product')
    }

    _defaults = {
        'link': lambda *a: False,
    }

    _sql_constraints = [('uniq_name_product_id', 'UNIQUE(product_id, name)',
                _('A product can have only one image with the same name'))]
Exemplo n.º 24
0
        regexStr = '^((https|http|ftp|rtsp|mms)?://)+'
        regex = re.compile(regexStr)
        print regex.match(url)
        return regex.match(url)
        
    def _get_image(self, cursor, user, ids, name, arg, context=None):
        image = {}
        res = self.read(cursor, user, ids, ['image_link'])
        image_link = res[0]['image_link']
        if image_link:
            if not self.is_url(image_link):
                raise osv.except_osv('URL Error','URL should start with https|http|ftp|rtsp|mms.')
            req = Request(image_link)
            try:
                respose = urlopen(req)
            except IOError, e:
                if hasattr(e, 'reason'):
                    raise osv.except_osv('URL Error','We failed to reach a server.' + 'Reason: ', e.reason)
                elif hasattr(e, 'code'):
                    raise osv.except_osv('URL Error','The server couldn\'t fulfill the request.\n' + 'Error code: ', e.code)
            pic = base64.encodestring(respose.read())
            for id in ids:
                image[id] = pic
        return image

    _columns = {
        'image_link' : fields.char('Image Link', size=180),
        'image' : fields.function(_get_image, method=True, string='Product Image', type='binary', store=False), 
    }

product_template_img()
Exemplo n.º 25
0
        return [(r["code"], r["name"]) for r in res]

    def _get_xml_id(self, cr, uid, ids, *args, **kwargs):
        model_data_obj = self.pool.get("ir.model.data")
        data_ids = model_data_obj.search(cr, uid, [("model", "=", self._name), ("res_id", "in", ids)])
        data_results = model_data_obj.read(cr, uid, data_ids, ["module", "name", "res_id"])
        result = {}
        for id in ids:
            result[id] = False
        for record in data_results:
            result[record["res_id"]] = "%(module)s.%(name)s" % record
        return result

    _columns = {
        "charset": fields.selection(_get_encodings, string="Charset", required=True),
        "content_fname": fields.char("Override Extension", size=64, help="Here you can override output file extension"),
        "styles_mode": fields.selection(
            [("default", "Not used"), ("global", "Global"), ("specified", "Specified")], string="Stylesheet"
        ),
        #'report_styles' : fields.binary('Template Styles', help='OpenOffice stylesheet (.odt)'),
        "stylesheet_id": fields.many2one("report.stylesheets", "Template Stylesheet"),
        "preload_mode": fields.selection([("static", _("Static")), ("preload", _("Preload"))], "Preload Mode"),
        "tml_source": fields.selection(
            [("database", "Database"), ("file", "File"), ("parser", "Parser")], "Template source", select=True
        ),
        "parser_def": fields.text("Parser Definition"),
        "parser_loc": fields.char(
            "Parser location",
            size=128,
            help="Path to the parser location. Beginning of the path must be start with the module name!\nLike this: {module name}/{path to the parser.py file}",
        ),
Exemplo n.º 26
0
class report_timesheet_task_user(osv.osv):
    _name = "report.timesheet.task.user"
    _auto = False
    _order = "name"

    def _get_task_hours(self, cr, uid, ids, name, args, context):
        result = {}
        for record in self.browse(cr, uid, ids, context):
            last_date = datetime.strptime(
                record.name,
                '%Y-%m-%d') + relativedelta(months=1) - relativedelta(days=1)
            task_obj = self.pool.get('project.task.work')
            task_ids = task_obj.search(
                cr, uid, [('user_id', '=', record.user_id.id),
                          ('date', '>=', record.name),
                          ('date', '<=', last_date.strftime('%Y-%m-%d'))])
            tsk_hrs = task_obj.read(cr, uid, task_ids,
                                    ['hours', 'date', 'user_id'])
            total = 0.0
            for hrs in tsk_hrs:
                total += hrs['hours']
            result[record.id] = total
        return result

    def get_hrs_timesheet(self, cr, uid, ids, name, args, context):
        result = {}
        sum = 0.0
        for record in self.browse(cr, uid, ids, context):
            last_date = datetime.strptime(
                record.name,
                '%Y-%m-%d') + relativedelta(months=1) - relativedelta(days=1)
            obj = self.pool.get('hr_timesheet_sheet.sheet.day')
            sheet_ids = obj.search(
                cr, uid, [('sheet_id.user_id', '=', record.user_id.id),
                          ('name', '>=', record.name),
                          ('name', '<=', last_date.strftime('%Y-%m-%d'))])
            data_days = obj.read(
                cr, uid, sheet_ids,
                ['name', 'sheet_id.user_id', 'total_attendance'])
            total = 0.0
            for day_attendance in data_days:
                total += day_attendance['total_attendance']
            result[record.id] = total
        return result

    _columns = {
        'name':
        fields.char('Date', size=64),
        'year':
        fields.char('Year', size=64, required=False, readonly=True),
        'month':
        fields.selection([('01', 'January'), ('02', 'February'),
                          ('03', 'March'), ('04', 'April'), ('05', 'May'),
                          ('06', 'June'), ('07', 'July'), ('08', 'August'),
                          ('09', 'September'), ('10', 'October'),
                          ('11', 'November'), ('12', 'December')],
                         'Month',
                         readonly=True),
        'user_id':
        fields.many2one('res.users', 'User', readonly=True),
        'timesheet_hrs':
        fields.function(get_hrs_timesheet,
                        method=True,
                        string="Timesheet Hours"),
        'task_hrs':
        fields.function(_get_task_hours, method=True, string="Task Hours"),
    }

    def init(self, cr):
        tools.drop_view_if_exists(cr, 'report_timesheet_task_user')
        cr.execute(""" create or replace view report_timesheet_task_user as (
        select
         ((r.id*12)+to_number(months.m_id,'99'))::integer as id,
               months.name as name,
               r.id as user_id,
               to_char(to_date(months.name, 'YYYY/MM/DD'),'YYYY') as year,
               to_char(to_date(months.name, 'YYYY/MM/DD'),'MM') as month
        from res_users r,
                (select to_char(p.date,'YYYY-MM-01') as name,
            to_char(p.date,'MM') as m_id
                from project_task_work p

            union
                select to_char(h.name,'YYYY-MM-01') as name,
                to_char(h.name,'MM') as m_id
                from hr_timesheet_sheet_sheet_day h) as months

            group by
                r.id,months.m_id,months.name,
                to_char(to_date(months.name, 'YYYY/MM/DD'),'YYYY') ,
                to_char(to_date(months.name, 'YYYY/MM/DD'),'MM')
              ) """)
Exemplo n.º 27
0
class budget_info_inv(osv.osv):
    _name = 'budget.info.inv'
    _description = 'Budget Info Supplier Invoice'

    def _amount_budget(self, cr, uid, ids, name, args, context=None):
        print "AAAAAAA"
        res = {}
        for line in self.browse(cr, uid, ids, context=None):
            account_analytic_id = line.account_analytic_id.id
            #date_end = line.voucher_id.date_end[:4]
            #print "+++++++++++++++++++++++++++", line.cash_advance_id.req_date[:4]

            date_end = line.invoice_id.date_invoice and line.invoice_id.date_invoice[:
                                                                                     4] or False
            #date_end = 2014
            if not date_end:
                return res

            #date_from = str(line.period_id.date_start)
            #date_to = str(line.period_id.date_stop)
            #date_from = line.period_id.date_start
            #date_to = line.period_id.date_stop
            #print "+++++++++++++++", account_analytic_id, date_end
            #acc_ids = line.budget_item_id.
            cr.execute(
                "select sum(a.amount) as amount_budget from ad_budget_line a, account_period b "
                " where a.analytic_account_id = %s and a.period_id = b.id and to_char(b.date_start,'yyyy') = %s ",
                (
                    str(account_analytic_id),
                    str(date_end),
                ))
            #            result = cr.dictfetchone()
            #            #print "line.id",line.id
            #            if result['amount_budget'] is None:
            #                result.update({'amount_budget': 0.0})
            #            result.update({'amount_budget':abs(result['amount_budget'])})
            #            res.update({line.id:result})

            amount = cr.fetchone()
            #print "amount", amount
            amount = amount[0] or 0.00
            res[line['id']] = amount
            print "res", res
        return res

    def _amount_spent(self, cr, uid, ids, name, args, context=None):
        res = {}
        for line in self.browse(cr, uid, ids, context=None):
            account_analytic_id = line.account_analytic_id.id
            #date_end = line.material_req_id.date_end[:4]
            date_end = line.invoice_id.date_invoice and line.invoice_id.date_invoice[:
                                                                                     4] or False
            if not date_end:
                return res
            #acc_ids = line.budget_item_id.
            cr.execute(
                "SELECT SUM(amount) as balance_real FROM account_analytic_line "
                "WHERE account_id=%s AND to_char(date,'yyyy') = %s ", (
                    str(account_analytic_id),
                    str(date_end),
                ))
            amount_real = cr.fetchone()
            amount_real = amount_real[0] or 0.00
            #print amount_real

            cr.execute(
                "select SUM(x.product_qty*x.price_unit) as balance_virtual FROM purchase_order_line x, purchase_order y "
                " where y.state in ('approved') and x.order_id = y.id "
                "  and x.account_analytic_id = %s and to_char(x.date_planned,'yyyy') = %s ",
                (
                    str(account_analytic_id),
                    str(date_end),
                ))
            amount_virtual1 = cr.fetchone()
            amount_virtual1 = amount_virtual1[0] or 0.00

            cr.execute(
                "SELECT SUM(a.product_qty*a.price_unit) as balance_virtual FROM purchase_order_line x, purchase_order y, stock_move a "
                " WHERE x.order_id = y.id and a.purchase_line_id = x.id and a.state in ('cancel','done') and "
                " x.order_id in (select a.id from purchase_order a, account_invoice b, purchase_invoice_rel c "
                "  where a.id=c.purchase_id and b.id= c.invoice_id and (a.state in ('approved') and b.state in ('open','paid','cancel')) and a.id=y.id) and "
                " x.account_analytic_id = %s and to_char(x.date_planned,'yyyy') = %s ",
                (
                    str(account_analytic_id),
                    str(date_end),
                ))
            amount_virtual2 = cr.fetchone()
            amount_virtual2 = amount_virtual2[0] or 0.00
            res[line['id']] = (amount_virtual1 -
                               amount_virtual2) + abs(amount_real)
        return res

    def _amount_current(self, cr, uid, ids, name, args, context=None):
        print "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"
        res = {}
        for line in self.browse(cr, uid, ids, context=None):
            account_analytic_id = line.account_analytic_id.id
            invoice_id = line.invoice_id.id
            date_end = line.invoice_id.date_invoice and line.invoice_id.date_invoice[:
                                                                                     4] or False
            #date_end = 2014
            if not date_end:
                return res
            #acc_ids = line.budget_item_id.

            cr.execute(
                "select sum((a.quantity * a.price_unit) - (a.quantity * a.price_unit) * a.discount / 100) from account_invoice c, account_invoice_line a, budget_info_inv b "
                " where c.id=a.invoice_id and a.invoice_id = %s and a.account_analytic_id=b.account_analytic_id and b.account_analytic_id = %s and c.id = b.invoice_id and to_char(c.date_invoice,'yyyy') = %s and c.state = 'draft' ",
                (
                    invoice_id,
                    str(account_analytic_id),
                    str(date_end),
                ))

            amount1 = cr.fetchone()
            amount1 = amount1[0] or 0.00

            #            cr.execute(" select sum(e.subtotal) from purchase_order a, purchase_requisition b, stock_picking c, material_requisition d, material_requisition_line e, budget_info f "
            #                       " where a.requisition_id = b.id and b.int_move_id = c.id and c.material_req_id = d.id and a.state in ('done','approved') "
            #                       " and d.id = f.material_req_id  and e.account_analytic_id = f.account_analytic_id and d.id = e.requisition_id "
            #                       " and e.requisition_id = %s and f.account_analytic_id = %s and to_char(d.date_end,'yyyy') = %s ",(material_req_id,str(account_analytic_id),str(date_end),))
            #            amount2 = cr.fetchone()
            #            amount2 = amount2[0] or 0.00
            amount2 = 0.00
            #print "xxxxxxxxxxxx",amount,material_req_id,str(account_analytic_id),str(date_end)
            res[line['id']] = amount1 - amount2
        return res

    def _amount_utilized(self, cr, uid, ids, name, args, context=None):
        res = {}
        for line in self.browse(cr, uid, ids, context=None):
            account_analytic_id = line.account_analytic_id.id
            invoice_id = line.invoice_id.id
            date_end = line.invoice_id.date_invoice and line.invoice_id.date_invoice[:
                                                                                     4] or False
            if not date_end:
                return res
            #acc_ids = line.budget_item_id.

            cr.execute(
                "SELECT SUM(amount) as balance_real FROM account_analytic_line "
                "WHERE account_id=%s AND to_char(date,'yyyy') = %s ", (
                    str(account_analytic_id),
                    str(date_end),
                ))
            amount_real = cr.fetchone()
            amount_real = amount_real[0] or 0.00

            #===================================================================
            # cr.execute("SELECT SUM(x.product_qty*x.price_unit) as balance_virtual FROM purchase_order_line x, purchase_order y "
            #        " WHERE x.state in ('approved','confirmed','done') and x.order_id = y.id and "
            #        " x.order_id in (select a.id from purchase_order a, account_invoice b, purchase_invoice_rel c "
            #            " where a.id=c.purchase_id and b.id= c.invoice_id and (a.state in ('confirmed','approved','done') and b.state not in ('open','paid','cancel')) and a.id=y.id) and "
            #           " x.account_analytic_id = %s and to_char(x.date_planned,'yyyy') = %s ",(str(account_analytic_id),str(date_end),))
            # amount_spent = cr.fetchone()
            # amount_spent = amount_spent[0] or 0.00
            #===================================================================
            cr.execute(
                "select SUM(x.product_qty*x.price_unit) as balance_virtual FROM purchase_order_line x, purchase_order y "
                " where y.state in ('approved') and x.order_id = y.id "
                "  and x.account_analytic_id = %s and to_char(x.date_planned,'yyyy') = %s ",
                (
                    str(account_analytic_id),
                    str(date_end),
                ))
            amount_virtual1 = cr.fetchone()
            amount_virtual1 = amount_virtual1[0] or 0.00

            cr.execute(
                "SELECT SUM(a.product_qty*a.price_unit) as balance_virtual FROM purchase_order_line x, purchase_order y, stock_move a "
                " WHERE x.order_id = y.id and a.purchase_line_id = x.id and a.state in ('cancel','done') and "
                " x.order_id in (select a.id from purchase_order a, account_invoice b, purchase_invoice_rel c "
                "  where a.id=c.purchase_id and b.id= c.invoice_id and (a.state in ('approved') and b.state in ('open','paid','cancel')) and a.id=y.id) and "
                " x.account_analytic_id = %s and to_char(x.date_planned,'yyyy') = %s ",
                (
                    str(account_analytic_id),
                    str(date_end),
                ))
            amount_virtual2 = cr.fetchone()
            amount_virtual2 = amount_virtual2[0] or 0.00
            amount_spent = amount_virtual1 - amount_virtual2
            #res[line['id']] = amount_spent
            #===================================================================
            cr.execute(
                "select sum((a.quantity * a.price_unit) - (a.quantity * a.price_unit) * a.discount / 100) from account_invoice c, account_invoice_line a, budget_info_inv b "
                " where c.id=a.invoice_id and a.invoice_id = %s and a.account_analytic_id=b.account_analytic_id and b.account_analytic_id = %s and c.id = b.invoice_id and to_char(c.date_invoice,'yyyy') = %s and c.state = 'draft' ",
                (
                    invoice_id,
                    str(account_analytic_id),
                    str(date_end),
                ))

            amount1 = cr.fetchone()
            amount1 = amount1[0] or 0.00

            #            cr.execute(" select sum(e.subtotal) from purchase_order a, purchase_requisition b, stock_picking c, material_requisition d, material_requisition_line e, budget_info f "
            #                       " where a.requisition_id = b.id and b.int_move_id = c.id and c.material_req_id = d.id and a.state in ('done','approved') "
            #                       " and d.id = f.material_req_id  and e.account_analytic_id = f.account_analytic_id and d.id = e.requisition_id "
            #                       " and e.requisition_id = %s and f.account_analytic_id = %s and to_char(d.date_end,'yyyy') = %s ",(material_req_id,str(account_analytic_id),str(date_end),))
            #            amount2 = cr.fetchone()
            #            amount2 = amount2[0] or 0.00
            amount2 = 0.00
            amount_current = amount1 - amount2
            #===================================================================

            res[line['id']] = amount_spent + amount_current + abs(amount_real)
        return res

    def _amount_remain(self, cr, uid, ids, name, args, context=None):
        res = {}
        for line in self.browse(cr, uid, ids, context=None):
            account_analytic_id = line.account_analytic_id.id
            invoice_id = line.invoice_id.id
            date_end = line.invoice_id.date_invoice and line.invoice_id.date_invoice[:
                                                                                     4] or False
            if not date_end:
                return res
            #acc_ids = line.budget_item_id.

            cr.execute(
                "SELECT SUM(amount) as balance_real FROM account_analytic_line "
                "WHERE account_id=%s AND to_char(date,'yyyy') = %s ", (
                    str(account_analytic_id),
                    str(date_end),
                ))
            amount_real = cr.fetchone()
            amount_real = amount_real[0] or 0.00

            cr.execute(
                "select sum(a.amount) as amount_budget from ad_budget_line a, account_period b "
                " where a.analytic_account_id = %s and a.period_id = b.id and to_char(b.date_start,'yyyy') = %s ",
                (
                    str(account_analytic_id),
                    str(date_end),
                ))
            amount_budget = cr.fetchone()
            amount_budget = amount_budget[0] or 0.00

            #===================================================================
            # cr.execute("SELECT SUM(x.product_qty*x.price_unit) as balance_virtual FROM purchase_order_line x, purchase_order y "
            #        " WHERE x.state in ('approved','confirmed','done') and x.order_id = y.id and "
            #        " x.order_id in (select a.id from purchase_order a, account_invoice b, purchase_invoice_rel c "
            #            " where a.id=c.purchase_id and b.id= c.invoice_id and (a.state in ('confirmed','approved','done') and b.state not in ('open','paid','cancel')) and a.id=y.id) and "
            #           " x.account_analytic_id = %s and to_char(x.date_planned,'yyyy') = %s ",(str(account_analytic_id),str(date_end),))
            # amount_spent = cr.fetchone()
            # amount_spent = amount_spent[0] or 0.00
            #===================================================================
            cr.execute(
                "select SUM(x.product_qty*x.price_unit) as balance_virtual FROM purchase_order_line x, purchase_order y "
                " where y.state in ('approved') and x.order_id = y.id "
                "  and x.account_analytic_id = %s and to_char(x.date_planned,'yyyy') = %s ",
                (
                    str(account_analytic_id),
                    str(date_end),
                ))
            amount_virtual1 = cr.fetchone()
            amount_virtual1 = amount_virtual1[0] or 0.00

            cr.execute(
                "SELECT SUM(a.product_qty*a.price_unit) as balance_virtual FROM purchase_order_line x, purchase_order y, stock_move a "
                " WHERE x.order_id = y.id and a.purchase_line_id = x.id and a.state in ('cancel','done') and "
                " x.order_id in (select a.id from purchase_order a, account_invoice b, purchase_invoice_rel c "
                "  where a.id=c.purchase_id and b.id= c.invoice_id and (a.state in ('approved') and b.state in ('open','paid','cancel')) and a.id=y.id) and "
                " x.account_analytic_id = %s and to_char(x.date_planned,'yyyy') = %s ",
                (
                    str(account_analytic_id),
                    str(date_end),
                ))
            amount_virtual2 = cr.fetchone()
            amount_virtual2 = amount_virtual2[0] or 0.00
            amount_spent = amount_virtual1 - amount_virtual2
            #res[line['id']] = amount_spent
            #===================================================================
            cr.execute(
                "select sum((a.quantity * a.price_unit) - (a.quantity * a.price_unit) * a.discount / 100) from account_invoice c, account_invoice_line a, budget_info_inv b "
                " where c.id=a.invoice_id and a.invoice_id = %s and a.account_analytic_id=b.account_analytic_id and b.account_analytic_id = %s and c.id = b.invoice_id and to_char(c.date_invoice,'yyyy') = %s and c.state = 'draft'",
                (
                    invoice_id,
                    str(account_analytic_id),
                    str(date_end),
                ))

            amount1 = cr.fetchone()
            amount1 = amount1[0] or 0.00

            #            cr.execute(" select sum(e.subtotal) from purchase_order a, purchase_requisition b, stock_picking c, material_requisition d, material_requisition_line e, budget_info f "
            #                       " where a.requisition_id = b.id and b.int_move_id = c.id and c.material_req_id = d.id and a.state in ('done','approved') "
            #                       " and d.id = f.material_req_id  and e.account_analytic_id = f.account_analytic_id and d.id = e.requisition_id "
            #                       " and e.requisition_id = %s and f.account_analytic_id = %s and to_char(d.date_end,'yyyy') = %s ",(material_req_id,str(account_analytic_id),str(date_end),))
            #            amount2 = cr.fetchone()
            #            amount2 = amount2[0] or 0.00
            amount2 = 0.00
            amount_current = amount1 - amount2
            #print amount1,amount2,amount_current,amount_budget - (amount_spent + amount_current + abs(amount_real))
            #===================================================================
            res[line['id']] = amount_budget - (amount_spent + amount_current +
                                               abs(amount_real))
        return res

    _columns = {
        'name':
        fields.char('Name', 64),
        'account_analytic_id':
        fields.many2one(
            'account.analytic.account',
            'Analytic Account',
        ),
        #'material_req_id': fields.many2one('material.requisition', 'Material Request'),
        'invoice_id':
        fields.many2one('account.invoice', 'Invoice'),
        #'budget_line_id': fields.many2one('ad_budget.line', 'Budget Lines'),
        'amount_budget':
        fields.function(_amount_budget,
                        digits=(20, 0),
                        method=True,
                        string='Budget Amount',
                        type='float'),
        'amount_spent':
        fields.function(_amount_spent,
                        digits=(20, 0),
                        method=True,
                        string='Budget Spent',
                        type='float'),
        'amount_current':
        fields.function(_amount_current,
                        digits=(20, 0),
                        method=True,
                        string='Budget Current',
                        type='float'),
        'amount_utilized':
        fields.function(_amount_utilized,
                        digits=(20, 0),
                        method=True,
                        string='Budget Utilized',
                        type='float'),
        'amount_remain':
        fields.function(_amount_remain,
                        digits=(20, 0),
                        method=True,
                        string='Budget Remain',
                        type='float'),
    }
Exemplo n.º 28
0
class medical_patient (osv.osv):
	_name = "medical.patient"
	_inherit = "medical.patient"
	_columns = {
		'excercise' : fields.boolean ('Excersise'),
		'excercise_minutes_day' : fields.integer ('Minutes / day',help="How many minutes a day the patient excersises"),
		'sleep_hours' : fields.integer ('Hours of sleep',help="Average hours of sleep per day"),
		'sleep_during_daytime' : fields.boolean ('Sleeps at daytime',help="Check if the patient sleep hours are during daylight rather than at night"),		
		'number_of_meals' : fields.integer ('Meals per day'),
		'eats_alone' : fields.boolean ('Eats alone',help="Check this box if the patient eats by him / herself."),
		'salt' : fields.boolean ('Salt',help="Check if patient consumes salt with the food"),
		'coffee' : fields.boolean ('Coffee'),
		'coffee_cups' : fields.integer ('Cups per day',help="Number of cup of coffee a day"),
		'soft_drinks' : fields.boolean ('Soft drinks (sugar)',help="Check if the patient consumes soft drinks with sugar"),
		'diet' : fields.boolean ('Currently on a diet',help="Check if the patient is currently on a diet"),
		'diet_info' : fields.char ('Diet info',size=256,help="Short description on the diet"),
		'smoking' : fields.boolean ('Smokes'),
		'smoking_number' : fields.integer ('Cigarretes a day'),
		'ex_smoker' : fields.boolean ('Ex-smoker'),
		'second_hand_smoker' : fields.boolean ('Passive smoker', help="Check it the patient is a passive / second-hand smoker"),
		'age_start_smoking' : fields.integer ('Age started to smoke'),
		'age_quit_smoking' : fields.integer ('Age of quitting',help="Age of quitting smoking"),
		'alcohol' : fields.boolean ('Drinks Alcohol'),
		'age_start_drinking' : fields.integer ('Age started to drink ',help="Date to start drinking"),
		'age_quit_drinking' : fields.integer ('Age quit drinking ',help="Date to stop drinking"),
		'ex_alcoholic' : fields.boolean ('Ex alcoholic'),
		'alcohol_beer_number' : fields.integer ('Beer / day'),
		'alcohol_wine_number' : fields.integer ('Wine / day'),
		'alcohol_liquor_number' : fields.integer ('Liquor / day'),
		'drug_usage' : fields.boolean ('Drug Habits'),
		'ex_drug_addict' : fields.boolean ('Ex drug addict'),
		'drug_iv' : fields.boolean ('IV drug user',help="Check this option if the patient injects drugs"),
		'age_start_drugs' : fields.integer ('Age started drugs ',help="Age of start drugs"),
		'age_quit_drugs' : fields.integer ('Age quit drugs ',help="Date of quitting drugs"),
		'drugs' : fields.many2many ('medical.drugs_recreational','patient_drugs_recreational_rel','patient_id','drugs_recreational_id','Drugs', help="Name of drugs that the patient consumes"), 

		'traffic_laws' : fields.boolean ('Obeys Traffic Laws', help="Check if the patient is a safe driver"),
		'car_revision' : fields.boolean ('Car Revision', help="Maintain the vehicle. Do periodical checks - tires, engine, breaks ..."),
		'car_seat_belt' : fields.boolean ('Seat belt', help="Safety measures when driving : safety belt"),
		'car_child_safety' : fields.boolean ('Car Child Safety', help="Safety measures when driving : child seats, proper seat belting, not seating on the front seat, ...."),
		'home_safety' : fields.boolean ('Home safety', help="Keep safety measures for kids in the kitchen, correct storage of chemicals, ..."),
		'motorcycle_rider' : fields.boolean ('Motorcycle Rider', help="The patient rides motorcycles"),		
		'helmet' : fields.boolean ('Uses helmet', help="The patient uses the proper motorcycle helmet"),		
				
		'lifestyle_info' :fields.text ('Extra Information'),


		'sexual_preferences' : fields.selection([
                                ('h','Heterosexual'),
                                ('g','Homosexual'),
				('b','Bisexual'),
				('t','Transexual'),
                                ], 'Sexual Orientation'),

		'sexual_practices' : fields.selection([
                                ('s','Safe / Protected sex'),
                                ('r','Risky / Unprotected sex'),
				], 'Sexual Practices'),

		'sexual_partners': fields.selection([
				('m','Monogamous'),
				('t','Polygamous'),
                                ], 'Sexual Partners'),

		'sexual_partners_number': fields.integer ('Number of sexual partners'),

		'first_sexual_encounter': fields.integer ('Age first sexual encounter'),

		'anticonceptive': fields.selection ([
			('0','None'),
			('1','Pill / Minipill'),
			('2','Male condom'),
			('3','Vasectomy'),
			('4','Female sterilisation'),
			('5','Intra-uterine device'),
			('6','Withdrawal method'),
			('7','Fertility cycle awareness'),
			('8','Contraceptive injection'),
			('9','Skin Patch'),
			('10','Female condom'),
			], 'Anticonceptive Method'),

		'sex_oral': fields.selection ([
			('0','None'),
			('1','Active'),
			('2','Passive'),
			('3','Both'),			
			], 'Oral Sex'),

		'sex_anal': fields.selection ([
			('0','None'),
			('1','Active'),
			('2','Passive'),
			('3','Both'),			
			], 'Anal Sex'),
			
		'prostitute' : fields.boolean ('Prostitute', help="Check if the patient (he or she) is a prostitute"),
		'sex_with_prostitutes' : fields.boolean ('Sex with prostitutes', help="Check if the patient (he or she) has sex with prostitutes"),

		'sexuality_info' :fields.text ('Extra Information'),

	}
Exemplo n.º 29
0
            except KeyError, e:
                _logger.warning(
                      'Data not found for items of %s', module_rec.name)
            except AttributeError, e:
                _logger.warning(
                      'Data not found for items of %s %s', module_rec.name, str(e))
            except Exception, e:
                _logger.warning('Unknown error while fetching data of %s',
                      module_rec.name, exc_info=True)
        for key, value in res.iteritems():
            for k, v in res[key].iteritems():
                res[key][k] = "\n".join(sorted(v))
        return res

    _columns = {
        'name': fields.char("Name", size=128, readonly=True, required=True, select=True),
        'category_id': fields.many2one('ir.module.category', 'Category', readonly=True, select=True),
        'shortdesc': fields.char('Short Description', size=256, readonly=True, translate=True),
        'description': fields.text("Description", readonly=True, translate=True),
        'author': fields.char("Author", size=128, readonly=True),
        'maintainer': fields.char('Maintainer', size=128, readonly=True),
        'contributors': fields.text('Contributors', readonly=True),
        'website': fields.char("Website", size=256, readonly=True),

        # attention: Incorrect field names !!
        #   installed_version refer the latest version (the one on disk)
        #   latest_version refer the installed version (the one in database)
        #   published_version refer the version available on the repository
        'installed_version': fields.function(_get_latest_version, 
            string='Latest version', type='char'),
        'latest_version': fields.char('Installed version', size=64, readonly=True),
Exemplo n.º 30
0
class webmail_mailbox(osv.osv):
    _name = "webmail.mailbox"
    _description = "User Mailbox"
    _columns = {
        'user_id':
        fields.many2one('res.users', 'User'),
        'name':
        fields.char('Name', size=64, required=True),
        'parent_id':
        fields.many2one('webmail.mailbox', 'Parent Folder', select=True),
        'child_id':
        fields.one2many('webmail.mailbox', 'parent_id', string='Child Folder'),
        'account_id':
        fields.many2one('webmail.server', 'Server'),
    }
    _default = {
        'user_id': lambda obj, cr, uid, context: uid,
    }

    def select(self, cr, uid, ids, context, mail_acc):
        server_obj = pooler.get_pool(cr.dbname).get('webmail.server')
        obj = server_obj.login(cr, uid, ids, context, mail_acc.iserver_name,
                               mail_acc.iconn_port, mail_acc.iconn_type,
                               mail_acc.iserver_type, mail_acc.user_name,
                               mail_acc.password)
        return obj.list()[1]

    def new(self, cr, uid, ids, context, name):
        mailbox_obj = self.pool.get('webmail.mailbox')
        server_obj = self.pool.get('webmail.server')

        mailbox = mailbox_obj.browse(cr, uid, ids[0])
        server = server_obj.browse(cr, uid, mailbox.account_id.id)
        if server.iserver_type == 'imap':
            obj = server.login(cr, uid, ids, context, server.iserver_name,
                               server.iconn_port, server.iconn_type,
                               server.iserver_type, server.user_name,
                               server.password)
            if obj:
                obj.create(name)
                mailbox_obj.create(cr, uid, {
                    'name': name,
                    'parent_id': mailbox.parent_id
                })

    def rename(self, cr, uid, ids, context, old, new):
        mailbox_obj = self.pool.get('webmail.mailbox')
        server_obj = self.pool.get('webmail.server')

        mailbox = mailbox_obj.browse(cr, uid, ids[0])
        server = server_obj.browse(cr, uid, mailbox.account_id.id)
        if server.iserver_type == 'imap':
            obj = server.login(cr, uid, ids, context, server.iserver_name,
                               server.iconn_port, server.iconn_type,
                               server.iserver_type, server.user_name,
                               server.password)
            if obj:
                obj.rename(old, new)
                mailbox_obj.write(cr, uid, ids, {'name': new_name})

    def delete(self, cr, uid, ids, context):
        mailbox_obj = self.pool.get('webmail.mailbox')
        server_obj = self.pool.get('webmail.server')

        mailbox = mailbox_obj.browse(cr, uid, ids[0])
        server = server_obj.browse(cr, uid, mailbox.account_id.id)
        if server.iserver_type == 'imap':
            obj = server.login(cr, uid, ids, context, server.iserver_name,
                               server.iconn_port, server.iconn_type,
                               server.iserver_type, server.user_name,
                               server.password)
            if obj:
                obj.delete(mailbox.name)
                mailbox_obj.unlink(cr, uid, ids)

    def fetch_mail(self, cr, uid, ids, context):
        pass
Exemplo n.º 31
0
        except Exception, e:
            path = os.path.join('report_aeroo','config_pixmaps','module_banner.png')
            image_file = file_data = tools.file_open(path,'rb')
            try:
                file_data = image_file.read()
                self._logo_image = base64.encodestring(file_data)
                return self._logo_image
            finally:
                image_file.close()
        else:
            self._logo_image = base64.encodestring(im.read())
            return self._logo_image

    def _get_image_fn(self, cr, uid, ids, name, args, context=None):
        image = self._get_image(cr, uid, context)
        return dict.fromkeys(ids, image) # ok to use .fromkeys() as the image is same for all 

    _columns = {
        'link':fields.char('Original developer', size=128, readonly=True),
        'config_logo': fields.function(_get_image_fn, string='Image', type='binary', method=True),
        
    }

    _defaults = {
        'config_logo': _get_image,
        'link':'http://www.alistek.com',
    }

report_aeroo_installer()

Exemplo n.º 32
0
class perintah_kerja_internal(osv.osv):
    _name = "perintah.kerja.internal"
    _columns = {
        'name':
        fields.char('Work Order',
                    required=True,
                    size=64,
                    readonly=True,
                    states={'draft': [('readonly', False)]}),
        'date':
        fields.date('Order Date',
                    required=True,
                    readonly=True,
                    states={'draft': [('readonly', False)]}),
        'type':
        fields.selection([('other', 'Others'), ('pabrikasi', 'Pabrikasi'),
                          ('man', 'Man Power'), ('service', 'Service')],
                         'Type',
                         readonly=True,
                         states={'draft': [('readonly', False)]}),
        # 'sale_id': fields.many2one('sale.order', 'Sale Order', required=True, readonly=True, domain=[('state','in', ('progress','manual'))], states={'draft': [('readonly', False)]}),
        'no_pb':
        fields.char('No PB',
                    required=True,
                    readonly=True,
                    states={'draft': [('readonly', False)]}),
        'partner_id':
        fields.many2one('res.partner',
                        'Customer',
                        domain=[('customer', '=', True)],
                        readonly=True,
                        states={'draft': [('readonly', False)]}),
        'kontrak':
        fields.char('Contract No',
                    size=64,
                    readonly=True,
                    states={'draft': [('readonly', False)]}),
        'kontrakdate':
        fields.date('Contract Date',
                    required=True,
                    readonly=True,
                    states={'draft': [('readonly', False)]}),
        'workshop':
        fields.char('Working Place',
                    size=64,
                    readonly=True,
                    states={'draft': [('readonly', False)]}),
        'state':
        fields.selection([('draft', 'Draft'), ('approve', 'Approved'),
                          ('done', 'Done')],
                         'State',
                         readonly=True),
        'perintah_lines':
        fields.one2many('perintah.kerja.line.internal',
                        'perintah_id',
                        'Work Lines',
                        readonly=True,
                        states={'draft': [('readonly', False)]}),
        'delivery_date':
        fields.date('Delivery Date',
                    required=True,
                    readonly=True,
                    states={'draft': [('readonly', False)]}),
        'write_date':
        fields.datetime('Date Modified', readonly=True),
        'write_uid':
        fields.many2one('res.users', 'Last Modification User', readonly=True),
        'create_date':
        fields.datetime('Date Created', readonly=True),
        'create_uid':
        fields.many2one('res.users', 'Creator', readonly=True),
        'creator':
        fields.many2one('res.users', 'Created by'),
        'checker':
        fields.many2one('res.users', 'Checked by'),
        'approver':
        fields.many2one('res.users', 'Approved by'),
        'note':
        fields.text('Notes'),
        'terms':
        fields.text('Terms & Condition'),
        'location_src_id':
        fields.many2one('stock.location',
                        'Raw Materials Location',
                        required=True,
                        readonly=True,
                        states={'draft': [('readonly', False)]}),
        'location_dest_id':
        fields.many2one('stock.location',
                        'Finished Products Location',
                        required=True,
                        readonly=True,
                        states={'draft': [('readonly', False)]}),
    }

    _defaults = {
        'name': '/',
        'note': '-',
        'type': 'pabrikasi',
        'state': 'draft',
        'location_src_id': 12,
        'location_dest_id': 12,
        'date': time.strftime('%Y-%m-%d'),
        'kontrakdate': time.strftime('%Y-%m-%d'),
    }

    _order = "name desc"

    def create(self, cr, uid, vals, context=None):
        person = self.pool.get('res.users').browse(cr, uid, uid)
        rom = [
            0, 'I', 'II', 'III', 'IV', 'V', 'VI', 'VII', 'VIII', 'IX', 'X',
            'XI', 'XII'
        ]
        # usa = str(self.pool.get('sale.order').browse(cr, uid, vals['sale_id']).user_id.initial)
        val = self.pool.get('ir.sequence').get(cr, uid,
                                               'perintah.kerja').split('/')
        use = str(person.initial)
        vals['creator'] = person.id
        # vals['name'] = val[-1]+'A/SBM-ADM/'+usa+'-'+use+'/'+rom[int(val[2])]+'/'+val[1]
        vals['name'] = val[-1] + 'A/SBM-ADM/' + '-' + use + '/' + rom[int(
            val[2])] + '/' + val[1]
        return super(perintah_kerja_internal, self).create(cr,
                                                           uid,
                                                           vals,
                                                           context=context)

    def sale_change(self, cr, uid, ids, sale):
        if sale:
            res = {}
            line = []
            obj_sale = self.pool.get('sale.order').browse(cr, uid, sale)
            for x in obj_sale.order_line:
                line.append({
                    'product_id': x.product_id.id,
                    'product_qty': x.product_uom_qty,
                    'product_uom': x.product_uom.id,
                    'name': x.name
                    # 'name': '['+str(x.product_id.code)+']' + ' ' + str(x.product_id.name)
                })

            res['perintah_lines'] = line
            res['kontrak'] = obj_sale.client_order_ref
            res['partner_id'] = obj_sale.partner_id.id
            res['kontrakdate'] = obj_sale.date_order
            res['delivery_date'] = obj_sale.delivery_date

            return {'value': res}
        return True

    def work_cancel(self, cr, uid, ids, context=None):
        self.write(cr, uid, ids, {'state': 'draft'})
        return True

    def work_confirm(self, cr, uid, ids, context=None):
        val = self.browse(cr, uid, ids)[0]
        if not val.perintah_lines:
            raise osv.except_osv(('Perhatian !'),
                                 ('Tabel work line harus diisi !'))
        self.write(
            cr, uid, ids, {
                'state': 'approve',
                'checker': self.pool.get('res.users').browse(cr, uid, uid).id
            })
        return True

    def work_validate(self, cr, uid, ids, context=None):
        val = self.browse(cr, uid, ids, context={})[0]
        if val.type == 'pabrikasi':
            seq_out_mnfct = self.pool.get('ir.sequence').get(
                cr, uid, 'stock.picking.out.manufacture')
            seq_from_mnfct = self.pool.get('ir.sequence').get(
                cr, uid, 'stock.picking.from.manufacture')

            if not seq_out_mnfct:
                raise osv.except_osv(
                    _('Error'),
                    _('stock.picking.out.manufacture Sequence not exist.\nPlease contact system administrator'
                      ))

            if not seq_from_mnfct:
                raise osv.except_osv(
                    _('Error'),
                    _('stock.picking.from.manufacture Sequence not exist.\nPlease contact system administrator.'
                      ))

            material_id = self.pool.get('stock.picking').create(
                cr, uid, {
                    'name': seq_out_mnfct,
                    'origin': val.name,
                    'type': 'internal',
                    'move_type': 'one',
                    'state': 'draft',
                    'date': val.date,
                    'auto_picking': True,
                    'company_id': 1,
                })

            goods_id = self.pool.get('stock.picking').create(
                cr, uid, {
                    'name': seq_from_mnfct,
                    'origin': val.name,
                    'type': 'internal',
                    'move_type': 'one',
                    'state': 'draft',
                    'date': val.date,
                    'auto_picking': True,
                    'company_id': 1,
                })

            prodlot = self.pool.get('stock.production.lot')
            for x in val.perintah_lines:
                prodlot_obj_id = False
                if x.product_id.track_production:
                    # check if manufacture lot exists
                    lot_name_ws = x.product_id.default_code + '-WS'
                    get_lot = prodlot.search(
                        cr, uid, [('product_id', '=', x.product_id.id),
                                  ('name', '=', lot_name_ws)])
                    if not get_lot:
                        # set new serial
                        prodlot_obj_id = prodlot.create(
                            cr,
                            uid, {
                                'name': lot_name_ws,
                                'product_id': x.product_id.id,
                                'desc': 'Manufacture Lot',
                            },
                            context=context)
                    else:
                        prodlot_obj_id = get_lot[0]

                    # set serial number for manufacture lot
                print prodlot_obj_id, ">>>>>>>>>>>>>>"

                self.pool.get('stock.move').create(
                    cr, uid, {
                        'name':
                        x.product_id.default_code + x.product_id.name_template,
                        'picking_id': goods_id,
                        'product_id': x.product_id.id,
                        'product_qty': x.product_qty,
                        'product_uom': x.product_uom.id,
                        'date': val.date,
                        'location_id': 7,
                        'location_dest_id': val.location_dest_id.id,
                        'state': 'waiting',
                        'company_id': 1,
                        'prodlot_id': prodlot_obj_id or False
                    })

            wf_service = netsvc.LocalService("workflow")
            wf_service.trg_validate(uid, 'stock.picking', goods_id,
                                    'button_confirm', cr)
            wf_service.trg_validate(uid, 'stock.picking', material_id,
                                    'button_confirm', cr)

            self.pool.get('stock.picking').force_assign(
                cr, uid, [goods_id, material_id], context)

        self.write(
            cr, uid, ids, {
                'state': 'done',
                'approver': self.pool.get('res.users').browse(cr, uid, uid).id
            })
        return True

    def unlink(self, cr, uid, ids, context=None):
        val = self.browse(cr, uid, ids, context={})[0]
        if val.state != 'draft':
            raise osv.except_osv(
                ('Invalid action !'),
                ('Cannot delete a work order which is in state \'%s\'!') %
                (val.state, ))
        return super(perintah_kerja, self).unlink(cr,
                                                  uid,
                                                  ids,
                                                  context=context)

    def print_perintah(self, cr, uid, ids, context=None):
        data = {}
        val = self.browse(cr, uid, ids)[0]
        data['form'] = {}
        data['ids'] = context.get('active_ids', [])
        data['form']['data'] = self.read(cr, uid, ids)[0]

        qty = ''
        product = ''
        for x in val.perintah_lines:
            qty = qty + str(x.product_qty) + ' ' + x.product_uom.name + '\n\n'
            product = product + x.name + '\n\n'

        product = product + '\n\n' + val.note

        data['form']['data']['qty'] = qty
        data['form']['data']['product'] = product
        data['form']['data']['creator'] = val.creator.name
        data['form']['data']['checker'] = val.checker.name
        data['form']['data']['approver'] = val.approver.name

        return {
            'type': 'ir.actions.report.xml',
            'report_name': 'perintah.A4',
            'datas': data,
            'nodestroy': True
        }
Exemplo n.º 33
0
            self.write(cr, uid, id, {"state": "na"}, context)
        return result

    def historise(self, cr, uid, ids, message="", context=None):
        for id in ids:
            history = self.read(cr, uid, id, ["history"], context).get("history", "")
            self.write(
                cr,
                uid,
                id,
                {"history": (history or "") + "\n" + time.strftime("%Y-%m-%d %H:%M:%S") + ": " + tools.ustr(message)},
                context,
            )

    _columns = {
        "email_from": fields.char("From", size=64),
        "email_to": fields.char("Recipient (To)", size=250),
        "email_cc": fields.char("CC", size=250),
        "email_bcc": fields.char("BCC", size=250),
        "reply_to": fields.char("Reply-To", size=250),
        "message_id": fields.char("Message-ID", size=250),
        "subject": fields.char("Subject", size=200),
        "body_text": fields.text("Standard Body (Text)"),
        "body_html": fields.text("Body (Rich Text Clients Only)"),
        "attachments_ids": fields.many2many(
            "ir.attachment", "mail_attachments_rel", "mail_id", "att_id", "Attachments"
        ),
        "account_id": fields.many2one("email_template.account", "User account", required=True),
        "user": fields.related("account_id", "user", type="many2one", relation="res.users", string="User"),
        "server_ref": fields.integer("Server Reference of mail", help="Applicable for inward items only"),
        "mail_type": fields.selection(
Exemplo n.º 34
0
class Session(osv.Model):
	_name = 'openacademy.session'

	def _calculate_percentage_filled(self, cr, uid, ids, name, arg, context = None):
		res = {}
		for session in self.browse(cr, uid, ids, context):
			res[session.id] = 100 * float(len(session.attendee_ids)) / float(session.seats) if session.seats else 0
		return res

	def _default_start_date(self, cr, uid, context):
		return time.strftime('%Y-%m-%d')

	def _calculate_stop_date(self, cr, uid, ids, name, args, context=None):
		res = {}
		for sess in self.browse(cr, uid, ids, context):
			if sess.startdate and sess.duration > 0:
				dt = datetime.strptime(sess.startdate, '%Y-%m-%d %H:%M:%S')
				dt = dt + timedelta(days=sess.duration, minutes=-1)
			res[sess.id] = dt.strftime('%Y-%m-%d %H:%M:%S')
		return res

	def action_reset(self, cr, uid, ids, context=None):
		for session in self.browse(cr, uid, ids, context):
			return self.write(cr, uid, session.id, {"state" : "draft"})

	def action_confirm(self, cr, uid, ids, context=None):
		for session in self.browse(cr, uid, ids, context):
			return self.write(cr, uid, session.id, {"state" : "confirmed"})

	def action_done(self, cr, uid, ids, context=None):
		for session in self.browse(cr, uid, ids, context):
			return self.write(cr, uid, session.id, {"state" : "done"})

	_columns = {
		'name': fields.char('Name', size = 128, required = True),
		"startdate" : fields.datetime("StartDate"),
		"stopdate" : fields.function(_calculate_stop_date, type="datetime", string="Stop Date"),
		"duration" : fields.float("Duration", digits = (5,1), help = "The duration of the session in days"),
		"seats" : fields.integer("Seats"),
		"percentage_filled" : fields.function(_calculate_percentage_filled, type="integer", string="Percentage Filled"),

		"state" : fields.selection(
			[
				("draft", "Draft"),
				("confirmed", "Confirmed"),
				("done", "Done")
			], 'Stage', required=True, readonly=True
		),

		"instructor_id" : fields.many2one("res.partner", string="Instructor", 
			domain = ['|', ("is_instructor", '=', True), ("category_id", "child_of", "Teacher")]
		),
		"course_id" : fields.many2one("openacademy.course", required = True, string = "Related Course"),
		"attendee_ids" : fields.one2many("openacademy.attendee", "session_id", string = "Attendees"),
		"partner_id" : fields.many2one("res.partner")
	}

	_defaults = {
		"startdate" : _default_start_date,
		"state" : "draft"
	}
Exemplo n.º 35
0
                                        'delay' : product_obj.browse(cr,uid,prod_shipping_id).product_tmpl_id.sale_delay,
                                        'invoiced' : False,
                                        'state' : 'confirmed',
                                        'product_id' : prod_shipping_id,
                                    }
                                    shiplineid = sale_order_line_obj.create(cr,uid,shiporderlinevals)
                        company_id = self.pool.get('res.users').browse(cr,uid,uid).company_id.id
                        defaults = {'company_id':company_id}
                        paid = True
                        amazon_oe_status = self.amazon_oe_status(cr, uid, saleorderid, paid, context, defaults)
            else:
                print"No data is available"
        return True
    
    _columns = {
        'name' : fields.char('Name',size=64, required=True),
        'aws_access_key_id' : fields.char('Access Key',size=64,required=True),
        'aws_secret_access_key' : fields.char('Secret Key',size=64,required=True),
        'aws_market_place_id' : fields.char('Market Place ID',size=64,required=True),
        'aws_merchant_id' : fields.char('Merchant ID',size=64,required=True),
        }
amazon_instance()

class amazon_browse_node(osv.osv):
    _name = 'amazon.browse.node'
    _columns = {
        'browse_node_name' : fields.char('Name',size=64, required=True),
        'browse_node_country' : fields.many2many('res.country','browse_node_country_rel','browse_node_id','country_id','Browse Node Country')
        }
amazon_browse_node()
class request_financial_ratification(osv.osv_memory):
    _name = "request.financial.ratification"
    _description = "Request Financial Ratification Report"
    USERS_SELECTION = [
        ('admin', 'Supply Department'),
        ('tech', 'Techncial Services Department'),
        ('arms', 'Arms Department'),
    ]
    report_type = [('with_items', 'With Items'),
                   ('without_items', 'Without Items'),
                   ('suppliers_only', 'Suppliers Only')]

    _columns = {
        'request_financial_ids':
        fields.many2many('ireq.m',
                         'request_financial_wizard_rel',
                         'counter',
                         'request_id',
                         string='Requests'),
        'company_id':
        fields.many2one('res.company', 'Company', readonly=True),
        'executing_agency':
        fields.selection(
            USERS_SELECTION,
            'Executing Agency',
            readonly=True,
            select=True,
            help='Department Which this request will executed it'),
        'report_type':
        fields.selection(report_type,
                         'Report Type ',
                         select=True,
                         required=True),
        'purchase_purposes':
        fields.char(
            'Purchase purposes',
            size=256,
            required=True,
        ),
    }

    _defaults = {
        'company_id':
        lambda self, cr, uid, c: self.pool.get('res.company').
        _company_default_get(
            cr, uid, 'request.financial.ratification', context=c),
        'report_type':
        'without_items',
        'executing_agency':
        lambda self, cr, uid, c: self.pool.get('res.users').browse(
            cr, uid, uid, context=c).belong_to,
    }

    def print_report(self, cr, uid, ids, context=None):
        data = self.read(cr, uid, ids, [], context=context)[0]

        datas = {
            'ids': [],
            'model': 'ireq.m',
            'form': data,
        }
        if not datas['form']['request_financial_ids']:
            raise osv.except_osv(
                _('No Selected Data !'),
                _('Please make sure you selected at least one request..'))
        return {
            'type': 'ir.actions.report.xml',
            'report_name': 'request_financial_ratification',
            'datas': datas,
        }
Exemplo n.º 37
0
        no_update = dict(context, update=False)
        try:
            if not wait:
                cr.execute("SAVEPOINT update_entity_last_activity")
                cr.execute('select id from sync_server_entity_activity where entity_id=%s for update nowait', (entity.id,), log_exceptions=False)
            cr.execute('update sync_server_entity_activity set datetime=%s, activity=%s where entity_id=%s', (now, activity, entity.id))
        except psycopg2.OperationalError, e:
            if not wait and e.pgcode == '55P03':
                # can't acquire lock: ok the show must go on
                cr.execute("ROLLBACK TO update_entity_last_activity")
                logging.getLogger('sync.server').info("Can't acquire lock to set last_activity")
                return
            raise

    _columns = {
        'name':fields.char('Instance Name', size=64, required=True, select=True),
        'identifier':fields.char('Identifier', size=64, readonly=True, select=True),
        'hardware_id' : fields.char('Hardware Identifier', size=128, select=True),
        'parent_id':fields.many2one('sync.server.entity', 'Parent Instance', ondelete='cascade'),
        'group_ids':fields.many2many('sync.server.entity_group', 'sync_entity_group_rel', 'entity_id', 'group_id', string="Groups"),
        'state' : fields.selection([('pending', 'Pending'), ('validated', 'Validated'), ('invalidated', 'Invalidated'), ('updated', 'Updated')], 'State'),
        'email':fields.char('Contact Email', size=512),
        'user_id': fields.many2one('res.users', 'User', ondelete='restrict', required=True),
        
        #just in case, since the many2one exist it has no cost in database
        'children_ids' : fields.one2many('sync.server.entity', 'parent_id', 'Children Instances'),
        'update_token' : fields.char('Update security token', size=256),

        'activity' : fields.function(_get_activity, type='char', string="Activity", method=True, multi="_get_act"),
        'last_dateactivity': fields.function(_get_activity, type='datetime', string="Date of last activity", method=True, multi="_get_act"),
        #'last_activity' : fields.datetime("Date of last activity", readonly=True),
Exemplo n.º 38
0
class account_invoice_report(osv.osv):
    _name = "account.invoice.report"
    _description = "Invoices Statistics"
    _auto = False
    _rec_name = 'date'
    _columns = {
        'date':
        fields.date('Date', readonly=True),
        'year':
        fields.char('Year', size=4, readonly=True),
        'day':
        fields.char('Day', size=128, readonly=True),
        'month':
        fields.selection([('01', 'January'), ('02', 'February'),
                          ('03', 'March'), ('04', 'April'), ('05', 'May'),
                          ('06', 'June'), ('07', 'July'), ('08', 'August'),
                          ('09', 'September'), ('10', 'October'),
                          ('11', 'November'), ('12', 'December')],
                         'Month',
                         readonly=True),
        'product_id':
        fields.many2one('product.product', 'Product', readonly=True),
        'product_qty':
        fields.float('Qty', readonly=True),
        'uom_name':
        fields.char('Reference UoM', size=128, readonly=True),
        'payment_term':
        fields.many2one('account.payment.term', 'Payment Term', readonly=True),
        'period_id':
        fields.many2one('account.period',
                        'Force Period',
                        domain=[('state', '<>', 'done')],
                        readonly=True),
        'fiscal_position':
        fields.many2one('account.fiscal.position',
                        'Fiscal Position',
                        readonly=True),
        'currency_id':
        fields.many2one('res.currency', 'Currency', readonly=True),
        'categ_id':
        fields.many2one('product.category',
                        'Category of Product',
                        readonly=True),
        'journal_id':
        fields.many2one('account.journal', 'Journal', readonly=True),
        'partner_id':
        fields.many2one('res.partner', 'Partner', readonly=True),
        'company_id':
        fields.many2one('res.company', 'Company', readonly=True),
        'user_id':
        fields.many2one('res.users', 'Salesman', readonly=True),
        'price_total':
        fields.float('Total Without Tax', readonly=True),
        'price_average':
        fields.float('Average Price', readonly=True, group_operator="avg"),
        'currency_rate':
        fields.float('Currency Rate', readonly=True),
        'nbr':
        fields.integer('# of Lines', readonly=True),
        'type':
        fields.selection([
            ('out_invoice', 'Customer Invoice'),
            ('in_invoice', 'Supplier Invoice'),
            ('out_refund', 'Customer Refund'),
            ('in_refund', 'Supplier Refund'),
        ],
                         'Type',
                         readonly=True),
        'state':
        fields.selection([('draft', 'Draft'), ('proforma', 'Pro-forma'),
                          ('proforma2', 'Pro-forma'), ('open', 'Open'),
                          ('paid', 'Done'), ('cancel', 'Cancelled')],
                         'Invoice State',
                         readonly=True),
        'date_due':
        fields.date('Due Date', readonly=True),
        'address_contact_id':
        fields.many2one('res.partner.address',
                        'Contact Address Name',
                        readonly=True),
        'address_invoice_id':
        fields.many2one('res.partner.address',
                        'Invoice Address Name',
                        readonly=True),
        'account_id':
        fields.many2one('account.account', 'Account', readonly=True),
        'account_line_id':
        fields.many2one('account.account', 'Account Line', readonly=True),
        'partner_bank_id':
        fields.many2one('res.partner.bank', 'Bank Account', readonly=True),
        'residual':
        fields.float('Total Residual', readonly=True),
        'delay_to_pay':
        fields.float('Avg. Delay To Pay', readonly=True, group_operator="avg"),
        'due_delay':
        fields.float('Avg. Due Delay', readonly=True, group_operator="avg"),
    }
    _order = 'date desc'

    def init(self, cr):
        tools.drop_view_if_exists(cr, 'account_invoice_report')
        cr.execute("""
            create or replace view account_invoice_report as (
                 select min(ail.id) as id,
                    ai.date_invoice as date,
                    to_char(ai.date_invoice, 'YYYY') as year,
                    to_char(ai.date_invoice, 'MM') as month,
                    to_char(ai.date_invoice, 'YYYY-MM-DD') as day,
                    ail.product_id,
                    ai.partner_id as partner_id,
                    ai.payment_term as payment_term,
                    ai.period_id as period_id,
                    (case when u.uom_type not in ('reference') then
                        (select name from product_uom where uom_type='reference' and active and category_id=u.category_id LIMIT 1)
                    else
                        u.name
                    end) as uom_name,
                    ai.currency_id as currency_id,
                    ai.journal_id as journal_id,
                    ai.fiscal_position as fiscal_position,
                    ai.user_id as user_id,
                    ai.company_id as company_id,
                    count(ail.*) as nbr,
                    ai.type as type,
                    ai.state,
                    pt.categ_id,
                    ai.date_due as date_due,
                    ai.address_contact_id as address_contact_id,
                    ai.address_invoice_id as address_invoice_id,
                    ai.account_id as account_id,
                    ail.account_id as account_line_id,
                    ai.partner_bank_id as partner_bank_id,
                    sum(case when ai.type in ('out_refund','in_invoice') then
                         -ail.quantity / u.factor
                        else
                         ail.quantity / u.factor
                        end) as product_qty,

                    sum(case when ai.type in ('out_refund','in_invoice') then
                         -ail.price_subtotal
                        else
                          ail.price_subtotal
                        end) / cr.rate as price_total,

                    (case when ai.type in ('out_refund','in_invoice') then
                      sum(-ail.price_subtotal)
                    else
                      sum(ail.price_subtotal)
                    end) / (CASE WHEN sum(ail.quantity/u.factor) <> 0
                       THEN
                         (case when ai.type in ('out_refund','in_invoice')
                          then sum(-ail.quantity/u.factor)
                          else sum(ail.quantity/u.factor) end)
                       ELSE 1
                       END)
                     / cr.rate as price_average,

                    cr.rate as currency_rate,
                    sum((select extract(epoch from avg(date_trunc('day',aml.date_created)-date_trunc('day',ail.create_date)))/(24*60*60)::decimal(16,2)
                        from account_move_line as aml
                        WHERE ai.move_id=aml.move_id AND ail.product_id=aml.product_id AND ai.partner_id=aml.partner_id
                        )) as delay_to_pay,
                    (select extract(epoch from avg(date_trunc('day',ai.date_due)-date_trunc('day',ai.date_invoice)))/(24*60*60)::decimal(16,2)) as due_delay,
                    (case when ai.type in ('out_refund','in_invoice') then
                      -ai.residual
                    else
                      ai.residual
                    end)/ (CASE WHEN
                        (select count(l.id) from account_invoice_line as l
                         left join account_invoice as a ON (a.id=l.invoice_id)
                         where a.id=ai.id) <> 0
                       THEN
                        (select count(l.id) from account_invoice_line as l
                         left join account_invoice as a ON (a.id=l.invoice_id)
                         where a.id=ai.id)
                       ELSE 1
                       END) / cr.rate as residual
                from account_invoice_line as ail
                left join account_invoice as ai ON (ai.id=ail.invoice_id)
                left join product_product pr on (pr.id=ail.product_id)
                left join product_template pt on (pt.id=pr.product_tmpl_id)
                left join product_uom u on (u.id=ail.uos_id),
                res_currency_rate cr
                where cr.id in (select id from res_currency_rate cr2  where (cr2.currency_id = ai.currency_id)
                and ((ai.date_invoice is not null and cr.name <= ai.date_invoice) or (ai.date_invoice is null and cr.name <= NOW())) limit 1)
                group by ail.product_id,
                    ai.date_invoice,
                    ai.id,
                    cr.rate,
                    to_char(ai.date_invoice, 'YYYY'),
                    to_char(ai.date_invoice, 'MM'),
                    to_char(ai.date_invoice, 'YYYY-MM-DD'),
                    ai.partner_id,
                    ai.payment_term,
                    ai.period_id,
                    u.name,
                    ai.currency_id,
                    ai.journal_id,
                    ai.fiscal_position,
                    ai.user_id,
                    ai.company_id,
                    ai.type,
                    ai.state,
                    pt.categ_id,
                    ai.date_due,
                    ai.address_contact_id,
                    ai.address_invoice_id,
                    ai.account_id,
                    ail.account_id,
                    ai.partner_bank_id,
                    ai.residual,
                    ai.amount_total,
                    u.uom_type,
                    u.category_id
            )
        """)
		'width': fields.integer('Width (mm)'),
		'height': fields.integer('Height (mm)'),
		'weight': fields.integer('Weight (kg)')
		'product_id': fields.many2one('product.product', 'Pallet Product'),
		}
	_defaults = {
		'active': lambda *a, 1,
		}
	_sql_constraints = [('name_uniq', 'unique(name)', 'Pallet names must be unique!')]
pallet_types()

class pallet_stack_layout(osv.osv):

	_name = 'pallet.stack.layout'
    _columns = {
		'name': fields.char('Description', size=128, required=True),
		'active': fields.boolean('Active'),
		'program': fields.integer('Program Number', help='If this is stacked on a palletiser, this is the palletiser program number'),
		'pallet_type_id': fields.many2one('pallet.types','Pallet Type', ondelete='set null'),
		'layout_diagram': fields.binary('Layout diagram', filters='*.bmp,*.jpg,*.gif')
		'slipsheeted': fields.boolean('Slipsheeted', help='If product is stacked onto slipsheets, this box should be ticked.'),
		'layer_qty': fields.integer('Packages per layer'),
		'layer_height': fields.integer('Height per layer (mm)'),
		'layer_ids': fields.one2many('pallet_stack_layers', 'layout_id','Layer options'),
        }
	_defaults = {
		'active': lambda *a, 1,
		}
pallet_stack_layout()

class pallet_stack_layers(osv.osv):
Exemplo n.º 40
0
class ir_filters(osv.osv):
    '''
    Filters
    '''
    _name = 'ir.filters'
    _description = 'Filters'

    def _list_all_models(self, cr, uid, context=None):
        cr.execute("SELECT model, name from ir_model")
        return cr.fetchall()

    def copy(self, cr, uid, id, default=None, context=None):
        name = self.read(cr, uid, [id], ['name'])[0]['name']
        default.update({'name': _('%s (copy)') % name})
        return super(ir_filters, self).copy(cr, uid, id, default, context)

    def get_filters(self, cr, uid, model):
        act_ids = self.search(cr, uid, [('model_id', '=', model),
                                        ('user_id', '=', uid)])
        my_acts = self.read(cr, uid, act_ids, ['name', 'domain', 'context'])
        return my_acts

    def create_or_replace(self, cr, uid, vals, context=None):
        filter_id = None
        lower_name = vals['name'].lower()
        matching_filters = [
            x for x in self.get_filters(cr, uid, vals['model_id'])
            if x['name'].lower() == lower_name
        ]
        if matching_filters:
            self.write(cr, uid, matching_filters[0]['id'], vals, context)
            return False
        return self.create(cr, uid, vals, context)

    def _auto_init(self, cr, context=None):
        super(ir_filters, self)._auto_init(cr, context)
        # Use unique index to implement unique constraint on the lowercase name (not possible using a constraint)
        cr.execute(
            "SELECT indexname FROM pg_indexes WHERE indexname = 'ir_filters_name_model_uid_unique_index'"
        )
        if not cr.fetchone():
            cr.execute(
                'CREATE UNIQUE INDEX "ir_filters_name_model_uid_unique_index" ON ir_filters (lower(name), model_id, user_id)'
            )

    _columns = {
        'name':
        fields.char('Filter Name', size=64, translate=True, required=True),
        'user_id':
        fields.many2one(
            'res.users',
            'User',
            help=
            "The user this filter is available to. When left empty the filter is usable by the system only."
        ),
        'domain':
        fields.text('Domain Value', required=True),
        'context':
        fields.text('Context Value', required=True),
        'model_id':
        fields.selection(_list_all_models, 'Object', size=64, required=True),
    }
    _defaults = {
        'domain': '[]',
        'context': '{}',
    }
Exemplo n.º 41
0
# -*- encoding: utf-8 -*-

import pooler
from osv import fields, osv

_columns = {
    'domain': fields.char('Domain', size=32, help="This field is only used if you develop your own module allowing developers to create specific taxes in a custom domain."),
    'tax_discount': fields.boolean('Tax Discounted in Price', help="Mark it for (ICMS, PIS e etc.)."),
    'tax_include': fields.boolean('Include the Tax Amount in Price', help="Mark it to include the Tax Amount in Price."),
    }


class account_tax_code_template(osv.osv):
    _inherit = 'account.tax.code.template'
    _columns = _columns

account_tax_code_template()


class account_tax_code(osv.osv):
    _inherit = 'account.tax.code'
    _columns = _columns

account_tax_code()


def change_digit_tax(cr):
    res = pooler.get_pool(cr.dbname).get('decimal.precision').precision_get(cr, 1, 'Account')
    return (16, res + 2)

Exemplo n.º 42
0
class tcv_dispatch_lots_lines(osv.osv_memory):

    _name = 'tcv.dispatch.lots.lines'

    _description = ''

    ##-------------------------------------------------------------------------

    ##------------------------------------------------------- _internal methods

    ##--------------------------------------------------------- function fields

    _columns = {
        'line_id':
        fields.many2one('tcv.dispatch.lots',
                        'Lots',
                        required=True,
                        ondelete='cascade'),
        'name':
        fields.char('Name', size=64, required=False, readonly=False),
        'origin':
        fields.char('Origin', size=64, required=False, readonly=False),
        'date_invoice':
        fields.date('Date invoice',
                    required=True,
                    readonly=True,
                    states={'draft': [('readonly', False)]},
                    select=True),
        'invoice_number':
        fields.char('Invoice number', size=64, required=False, readonly=False),
        'prod_lot_id':
        fields.many2one('stock.production.lot',
                        'Production lot',
                        required=False),
        'partner_id':
        fields.many2one('res.partner',
                        'Partner',
                        change_default=True,
                        readonly=True,
                        required=True,
                        tstates={'draft': [('readonly', False)]},
                        ondelete='restrict'),
        'product_id':
        fields.many2one('product.product', 'Product', ondelete='restrict'),
        'invoice_id':
        fields.many2one('account.invoice',
                        'Invoice Reference',
                        ondelete='restrict',
                        select=True),
        'order_id':
        fields.many2one('sale.order',
                        'Order Reference',
                        ondelete='restrict',
                        select=True),
        'picking_id':
        fields.many2one('stock.picking',
                        'Picking',
                        ondelete='restrict',
                        select=True),
    }

    _defaults = {}

    _sql_constraints = []
Exemplo n.º 43
0
            debug(submit_result)
            uo.close()
        except:
            if not crm_case_id:
                # TODO schedule a retry (ir.cron)
                return False
        return True

    def _valid_get(self, cr, uid, ids, field_name, arg, context=None):
        res = {}
        for contract in self.browse(cr, uid, ids, context=context):
            res[contract.id] = ("unvalid", "valid")[contract.date_stop >= time.strftime('%Y-%m-%d')]
        return res

    _columns = {
        'name' : fields.char('Contract ID', size=256, required=True, readonly=True),
        'password' : fields.char('Password', size=64, invisible=True, required=True, readonly=True),
        'date_start' : fields.date('Starting Date', readonly=True),
        'date_stop' : fields.date('Ending Date', readonly=True),
        'module_ids' : fields.many2many('maintenance.contract.module', 'maintenance_contract_module_rel', 'contract_id', 'module_id', 'Covered Modules', readonly=True),
        'state' : fields.function(_valid_get, method=True, string="State", type="selection", selection=[('valid', 'Valid'),('unvalid', 'Unvalid')], readonly=True),
        'kind' : fields.selection([('full', 'Full'),('partial', 'Partial')], 'Kind', required=True, readonly=True),
    }
    _defaults = {
        'password' : lambda obj,cr,uid,context={} : '',
    }
    _sql_constraints = [
        ('uniq_name', 'unique(name)', "Your maintenance contract is already subscribed in the system !")
    ]

maintenance_contract()
Exemplo n.º 44
0
class buildbot_test(osv.osv):
    _name = "buildbot.test"
    _order = 'test_date desc'

    def _get_test_result(self, cr, uid, ids, name, args, context=None):
        res = {}
        for test in self.browse(cr, uid, ids):
            res[test.id] = 'unknown'
            for step in test.test_step_ids:
                if step.state == 'fail':
                    res[test.id] = 'fail'
                    break
                elif step.state == 'skip':
                    res[test.id] = 'skip'
                    break
                elif step.state == 'pass' and res[test.id] == 'unknown':
                    res[test.id] = 'pass'
        return res

    def _get_test_ids(self, cr, uid, ids, context=None):
        if context is None:
            context = {}
        result = {}
        for step in self.pool.get('buildbot.test.step').browse(
                cr, uid, ids, context=context):
            result[step.test_id.id] = True
        return result.keys()

    _columns = {
        'name':
        fields.char('Test Name', size=500, help="Test Name"),
        'test_date':
        fields.datetime('Date of Test',
                        required=True,
                        help="Date on which the test was conducted"),
        'branch_id':
        fields.many2one('buildbot.lp.branch',
                        'Branch',
                        required=True,
                        help="Name of the Launchpad Branch Tested"),
        'environment':
        fields.text('Test Environment',
                    help="Environment on which test was conducted"),
        'commiter_id':
        fields.many2one('buildbot.lp.user',
                        'Branch Committer',
                        required=True,
                        help="Commiter of the revision"),
        'commit_date':
        fields.datetime('Date Of Commit', required=True,
                        help="Date of commit"),
        'commit_comment':
        fields.text('Comment On Commit', help="Cooment on commit"),
        'commit_rev_id':
        fields.char('Revision Id', size=128, help="Revision ID of the commit"),
        'commit_rev_no':
        fields.integer('Revision No.', help="Revision No of the commit"),
        'new_files':
        fields.text('Files Added', help="New Files added in the Commit"),
        'update_files':
        fields.text('Files Updated', help="Files Updated in the Commit"),
        'remove_files':
        fields.text('Files Removed', help="Files Removed in the Commit"),
        'rename_files':
        fields.text('Files Renamed', help="Files Renamed in the Commit"),
        'patch_attached':
        fields.boolean('Patch Attached',
                       readonly=True,
                       help="Patch Attached in the Commit"),
        'state':
        fields.function(
            _get_test_result,
            method=True,
            type='selection',
            string="Test Result",
            selection=[('unknown', 'Unknown'), ('fail', 'Failed'),
                       ('pass', 'Passed'), ('skip', 'Skipped')],
            store={'buildbot.test.step': (_get_test_ids, ['test_id'], 10)},
            help="Final State of the Test"),
        'test_step_ids':
        fields.one2many('buildbot.test.step', 'test_id', 'Test Steps'),
        'failure_reason':
        fields.text('Failure Reason',
                    help="Reason for the failure of the test")
    }
    _defaults = {'state': 'unknown'}
Exemplo n.º 45
0
        if not email:
            return False

        email = email.replace(';',',') #replace separator emails ; -> ,
        emails = email.split(',')
        if len(emails)>0:
            for email in emails:
                if not get_validate_email(email):
                    return False
                    break
        return True

    _columns = {
            'pem_from':fields.char(
                            'From',
                            size=64),
            'pem_to':fields.char(
                            'Recepient (To)',
                            size=250,),
            'pem_cc':fields.char(
                            ' CC',
                            size=250),
            'pem_bcc':fields.char(
                            ' BCC',
                            size=250),
            'pem_subject':fields.char(
                            ' Subject',
                            size=200,),
            'pem_body_text':fields.text(
                            'Standard Body (Text)'),
Exemplo n.º 46
0
class buildbot_lp_branch(osv.osv):
    _name = "buildbot.lp.branch"

    def _get_name(self, cr, uid, ids, name, args, context=None):
        res = {}
        for branch in self.browse(cr, uid, ids):
            res[branch.id] = {'build_directory': '', 'dbname': ''}
            dir_name = ''
            if branch.lp_group_id and branch.lp_group_id.name:
                dir_name += branch.lp_group_id.name + '_'
            if branch.lp_project_id and branch.lp_project_id.name:
                dir_name += branch.lp_project_id.name + '_'
            if branch.name:
                dir_name += branch.name
            res[branch.id]['build_directory'] = dir_name
            res[branch.id]['dbname'] = branch.name.replace('-', '_')
        return res

    _columns = {
        'name':
        fields.char('LP Branch',
                    size=128,
                    required=True,
                    help="Launchpad Branch Name"),
        'lp_group_id':
        fields.many2one('buildbot.lp.group',
                        'LP Group',
                        help="Launchpad Group"),
        'lp_user_id':
        fields.many2one('buildbot.lp.user', 'LP User', help="Launchpad User"),
        'url':
        fields.char('Source Url', size=128, required=True, help="Source Url"),
        'latest_rev_id':
        fields.char('Revision Id', size=128, help="Latest Revision ID Tested"),
        'latest_rev_no':
        fields.integer('Revision Number', help="Latest Revision No Tested"),
        'active':
        fields.boolean('Active', help="Branch Active/Inactive"),
        "is_test_branch":
        fields.boolean("Test Branch", help="Is this branch a test branch"),
        "is_root_branch":
        fields.boolean("Root Branch", help="Is this branch a root branch"),
        'treestabletimer':
        fields.integer('Tree Stable Timer', help="Timer for the branch"),
        'build_directory':
        fields.function(
            _get_name,
            multi='name',
            method=True,
            type='char',
            string='Build Directoy',
            size=128,
            help="The Directory in which this branch will be built"),
        'dbname':
        fields.function(
            _get_name,
            method=True,
            multi='name',
            type='char',
            size=128,
            string='Database Name',
            help=
            "The Name of the Database which will be created for testing this branch"
        ),
        'port':
        fields.integer('port', help="Port for the openerp-server to start"),
        'netport':
        fields.integer('net-port',
                       help="net-port for the openerp-server to start"),
        'merge_addons':
        fields.boolean(
            'Merge with Addons',
            help="Whether you want the branch to be merged with Trunk Addons"),
        'merge_server':
        fields.boolean(
            'Merge with Server',
            help="Whether you want the branch to be merged with Trunk Server"),
        'merge_extra_addons':
        fields.boolean(
            'Merge with Extra Addons',
            help=
            "Whether you want the branch to be merged with Trunk Extra-Addons"
        ),
        'merge_community_addons':
        fields.boolean(
            'Merge with Community Addons',
            help=
            "Whether you want the branch to be merged with Community Addons"),
        'addons_include':
        fields.text('Include addons',
                    help="Space-separated list of addons to always test"),
        'addons_exclude':
        fields.text(
            'Exclude addons',
            help="Never test these addons, blacklist. Space-separated"),
    }
    _defaults = {
        'active': lambda *a: 1,
    }
    _sql_constraints = [
        # ('dbname_build_dir_uniq', 'unique (dbname, build_directory)', 'The database name and build directory must be unique !')
    ]
Exemplo n.º 47
0
			total_price = est_price * (1 - valid_discount / 100.00) + shipping_cost
		return {'value': {
						'base_discount': valid_discount,
						'price_buffer':total_price,
# 		'balance':balance,
				}
			}

	def _estado_defecto(self, cr, uid, context=None):
		estado = self.pool.get('bag.state').search(cr, uid, [('name', '=', 'Recibido')])
		return estado

	_name = 'bag.service'
	_description = 'Service Order'
	_columns = {
		'name': fields.char('Numero Orden', size=24),
		'type': fields.selection([('particular', 'Particular'), ('airline', 'Aerolinea')], 'Type', required=True),
		'date': fields.date('Fecha', required=True),
		'date_promised': fields.date('Fecha Prometida', required=True),
		'partner_id' : fields.many2one('res.partner', 'Cliente'),
		'address_str': fields.related('partner_id', 'street', type='char', size='128'),
		'address_num_str': fields.related('partner_id', 'street_num', type='char'),
		'piso_dpto_str': fields.related('partner_id', 'piso_dpto', type='char', size='128'),
		'phone_str': fields.related('partner_id', 'phone_str', type='char', size='128'),
		'zone_str': fields.related('partner_id', 'street', type='char'),
		'type_id': fields.many2one('bag.type', 'Tipo'),
		'format_id': fields.many2one('bag.format', 'Formato'),
		'color_id': fields.many2one('bag.color', 'Color'),
		'material_id': fields.many2one('bag.material', 'Material'),
		'size_id': fields.many2one('bag.size', 'Tamano'),
		'description': fields.char('Descripcion', size=64),
Exemplo n.º 48
0
class loan_draft(osv.osv):
    _name = "loan.draft"
    _description = "Зээлийн бүтээгдэхүүн тодорхойлох"

    def _get_loan_type(self, cr, uid, ids, fields, args, context={}, query=''):
        res = {}
        for loan in self.browse(cr, uid, ids):
            res[loan.id] = ''
            if loan.for_own:
                res[loan.id] = u"Хувийн болон өрхийн хэрэглээнд"
            elif loan.for_bussines:
                res[loan.id] = u"Бизнестээ"
        return res

    _columns = {
        'loan_type':
        fields.function(_get_loan_type,
                        type='char',
                        string="Zeeliin zoriulalt",
                        size=64,
                        method=True),  #Зээлийн хэрэгцээ
        'partner_id':
        fields.many2one('xac.partner', 'Hariltsagch'),  # Харилцагч
        'for_own':
        fields.boolean('Huviin bolon orhiin heregtseend'),
        'for_business':
        fields.boolean('Biznest'),
        'amount':
        fields.float('Hussen zeeliin hemjee', required=True),
        'time':
        fields.integer(' Zeeliin hugatsaa (Saraar)', required=True),
        'velocity':
        fields.integer('Banknaas avch bui hed deh zeel', required=True),
        'currency':
        fields.selection([('mnt', 'Төгрөг'), ('usd', 'Доллар')], 'Currency'),
        'usage':
        fields.selection([('ok', 'OK'), ('no', 'NO')],
                         'Loan Usage',
                         required=True),
        'telephone':
        fields.char('Holboo barih utas', size=128),
        'other_usage':
        fields.char('Busad zoriulalt', size=128),
        'manager_id':
        fields.many2one('res.users', 'Hariutsah zeeliin mergejilten'),
        'loans':
        fields.many2many('xac.loan', 'xacloan_partnerloan_rel', 'partner_id',
                         'loan_id', 'Zeeliin medeelel'),
        'family_cap':
        fields.integer('Am bul'),
        'family_income':
        fields.integer('Orlogotoi'),

        # Тухайн зээлийн мэдээлэл
        'name_for_loan':
        fields.char('Sanhuugiin tureeseer avah baraa buteegdehuunii ner',
                    size=128),
        'address_for_loan':
        fields.char('Oron suuts, hashaa baishnii bairshil,hayg', size=128),
        'school_of_student':
        fields.char('Oyutnii zeeliin huvid suraltsdag surguuli', size=128),
        'company_for_loan':
        fields.char('Baraa avahaar songoson baiguullaga', size=128),
        'commend':
        fields.char('Zuuchluulsan baiguullaga', size=128),
        'prepay_of_loan':
        fields.float('Uridchilgaa tolbor'),
        'is_vitanna':
        fields.boolean('Vitanna zeeldegch eseh'),
        'is_eko':
        fields.boolean('Eko zeel eseh'),

        # Харилцагчийн Орлогын мэдээлэл
        'personal_incomes':
        fields.one2many('loan.income.personal', 'draft_id', 'Ooriin orlogo'),
        'other_incomes':
        fields.one2many('loan.income.other', 'draft_id', 'Busad orlogo'),
        'family_incomes':
        fields.one2many('loan.income.family', 'draft_id', 'Ger buliin orlogo'),

        # Барьцаа хөрөнгийн мэдээлэл
        'moveable_sureties':
        fields.one2many('loan.surety.moveable', 'draft_id', 'Hodloh horongo'),
        'unmoveable_sureties':
        fields.one2many('loan.surety.unmoveable', 'draft_id',
                        'Ul hodloh horongo'),
        'thirdparty_sureties':
        fields.one2many('loan.surety.thirdparty', 'draft_id',
                        'Guravdagch etgeediin horongo'),
    }

    def _get_partners(self, cr, uid, context={}):
        if context.get('partner_id', False):
            return context['partner_id']
        return False

    _defaults = {
        'for_own': lambda *a: True,
        'velocity': lambda *a: 1,
        'partner_id': _get_partners,
    }
Exemplo n.º 49
0
class fuel_monthly_plan_wizard(osv.osv_memory):
    """
    To manage fule monthly plane """

    _name = "fuel.monthly.plan.wizard"
    _description = "Fuel Monthly Plan"

    def _get_months(sel, cr, uid, context):
        """
        To read plane monthes .

        @return: list of months
       """
        months = [(str(n), str(n)) for n in range(1, 13)]
        return months

    _rec_name = "month"
    _columns = {
        'date':
        fields.date(
            'Plan Date',
            required=True,
        ),
        'month':
        fields.selection(_get_months, 'Month', required=True),
        'year':
        fields.char('Year', size=32, required=True),
        'type_plan':
        fields.selection([('constant_fuel', 'Constant Fuel'),
                          ('mission_extra', 'Mission Extra')],
                         'Plan Type',
                         required=True),
        'type':
        fields.selection([('departments', 'Departments'),
                          ('general_dapartments', '')],
                         'Plan Type',
                         required=True),
        'dept_cat_id':
        fields.many2one(
            'hr.department.cat',
            'Department Category',
        ),
        'company_id':
        fields.many2one('res.company', 'Company', required=True),
        'extra_fuel_lines':
        fields.one2many('extra.fuel.lines',
                        'monthly_plan_id',
                        'Extra Fuel',
                        required=True),
    }
    _defaults = {
        'year': str(time.strftime('%Y')),
        'date': time.strftime('%Y-%m-%d'),
        'type_plan': 'constant_fuel',
    }

    def get_child_dept(self, cr, uid, dept_id, context=None):
        """
        To get department 
        @param dept_id: department_id
        @return: department ids 
        """
        department_obj = self.pool.get('hr.department')
        reads = department_obj.read(cr,
                                    uid, [dept_id], ['id', 'child_ids'],
                                    context=context)
        child_ids = []
        for record in reads:
            if record['child_ids']:
                child_ids = record['child_ids']
                for child in record['child_ids']:
                    child_ids += self.get_child_dept(cr,
                                                     uid,
                                                     child,
                                                     context=context)

        return child_ids

    def compute_plan(self, cr, uid, ids, context=None):
        """
        To compute fule plane 

        @return: empty dictionary
        """
        fuel_plan_obj = self.pool.get('fuel.plan')
        fuel_qty_obj = self.pool.get('fuel.quantity')
        fuel_qty_line_obj = self.pool.get('fuel.qty.line')
        vehicle_obj = self.pool.get('fleet.vehicle')
        department_obj = self.pool.get('hr.department')
        department_extra_obj = self.pool.get('extra.fuel.lines')
        center_obj = self.pool.get('account.analytic.account')
        user_obj = self.pool.get('res.users')

        for record in self.browse(cr, uid, ids, context=context):
            check_plan = fuel_plan_obj.search(
                cr,
                uid, [('month', '=', record.month), ('year', '=', record.year),
                      ('company_id', '=', record.company_id.id),
                      ('type_plan', '=', 'constant_fuel')],
                context=context)
            if check_plan and record.type_plan != 'mission_extra':
                raise osv.except_osv(
                    'ERROR', 'Fuel Plan For This Month Already Computed')


#           extra fuel quantity check
            for line in record.extra_fuel_lines:
                if line.product_qty <= 0:
                    raise osv.except_osv(
                        'Fuel quantity!!',
                        'Extra fuel quantity must be bigger than zero')
                if line.budget_depart < line.product_id.standard_price * line.product_qty and record.type_plan == 'constant_fuel':
                    raise osv.except_osv(
                        'Department Budget',
                        'The Budget for this department is less than quntatiy')
            domain = []
            user_id = user_obj.browse(cr, uid, [uid], context=context)[0]
            fuel_plan_dict = {
                'date': record.date,
                'month': record.month,
                'year': record.year,
                'company_id': record.company_id.id,
                #'department_id':user_id.context_department_id and user_id.context_department_id.id,
                'department_id': 1,
                'type_plan': record.type_plan,
            }
            plan_id = fuel_plan_obj.create(cr,
                                           uid,
                                           fuel_plan_dict,
                                           context=context)
            department_dict = {
                'plan_id': plan_id,
                'fuel_type': 'extra_fuel',
            }
            for line in record.extra_fuel_lines:
                vehicle_dict = []
                #department_extra_ids= department_extra_obj.search(cr,uid,[('id','=',line.id)],context=context)
                for extra in department_extra_obj.browse(cr,
                                                         uid, [line.id],
                                                         context=context):
                    department_dict.update(
                        {'department_id': extra.department_id_fuel.id})
                    extra_fuel_id = fuel_qty_obj.create(cr,
                                                        uid,
                                                        department_dict,
                                                        context=context)
                    vehicle_dict = {
                        'qty_id': extra_fuel_id,
                    }
                    #for line in record.extra_fuel_lines:
                    vehicle_dict.update({
                        'product_id':
                        line.product_id.id,
                        'product_qty':
                        line.product_qty,
                        'product_uom':
                        line.product_id.uom_id.id,
                        'price_unit':
                        line.product_id.standard_price,
                        'name':
                        u' وقود اضافي' + line.product_id.name,
                        'department_id':
                        line.department_id_fuel.id,
                    })
                    fuel_qty_line_obj.create(cr,
                                             uid,
                                             vehicle_dict,
                                             context=context)
            if record.type == 'general_dapartments':

                domain += [('cat_id', '=', record.dept_cat_id.id),
                           ('company_id', '=', record.company_id.id)]
            elif record.type == 'departments':
                domain += [('company_id', '=', record.company_id.id)]
            anltic_dep2 = []
            department_ids = department_obj.search(cr,
                                                   uid,
                                                   domain,
                                                   context=context)
            for dept in department_obj.browse(cr,
                                              uid,
                                              department_ids,
                                              context=context):
                child_ids = [dept.id]
                #if record.type=='general_dapartments':
                #  child_ids +=self.get_child_dept(cr,uid,dept.id,context=context)
                vehicle_ids = vehicle_obj.search(
                    cr,
                    uid, [('department_id', 'in', tuple(child_ids)),
                          ('status', '=', 'active'),
                          ('monthly_plan', '=', True)],
                    context=context)
                if vehicle_ids:
                    department_dict.update({
                        'department_id': dept.id,
                        'fuel_type': 'fixed_fuel',
                    })
                    fuel_qty_id = fuel_qty_obj.create(cr,
                                                      uid,
                                                      department_dict,
                                                      context=context)

                    for vehicle in vehicle_obj.browse(cr,
                                                      uid,
                                                      vehicle_ids,
                                                      context=context):
                        if vehicle.fuel_lines:

                            vehicle_dict.update({
                                'vehicles_id':
                                vehicle.id,
                                'department_id':
                                vehicle.department_id.id,
                                'product_id':
                                vehicle.fuel_lines[0].product_id.id,
                                'product_qty':
                                vehicle.fuel_lines[0].product_qty,
                                'product_uom':
                                vehicle.fuel_lines[0].product_id.uom_id.id,
                                'price_unit':
                                vehicle.fuel_lines[0].product_id.
                                standard_price,
                                'qty_id':
                                fuel_qty_id,
                                'name':
                                vehicle.name,
                            })
                            fuel_qty_line_obj.create(cr,
                                                     uid,
                                                     vehicle_dict,
                                                     context=context)

        return {}
Exemplo n.º 50
0
class lang(osv.osv):
    _name = "res.lang"
    _description = "Languages"

    _disallowed_datetime_patterns = tools.DATETIME_FORMATS_MAP.keys()
    _disallowed_datetime_patterns.remove(
        '%y')  # this one is in fact allowed, just not good practice

    def install_lang(self, cr, uid, **args):
        """

        This method is called from openerp/addons/base/base_data.xml to load
        some language and set it as the default for every partners. The
        language is set via tools.config by the RPC 'create' method on the
        'db' object. This is a fragile solution and something else should be
        found.

        """
        lang = tools.config.get('lang')
        if not lang:
            return False
        lang_ids = self.search(cr, uid, [('code', '=', lang)])
        if not lang_ids:
            self.load_lang(cr, uid, lang)
        ir_values_obj = self.pool.get('ir.values')
        default_value = ir_values_obj.get(cr, uid, 'default', False,
                                          ['res.partner'])
        if not default_value:
            ir_values_obj.set(cr, uid, 'default', False, 'lang',
                              ['res.partner'], lang)
        default_value = ir_values_obj.get(cr, uid, 'default', False,
                                          ['res.users'])
        if not default_value:
            ir_values_obj.set(cr, uid, 'default', False, 'context_lang',
                              ['res.users'], lang)
        return True

    def load_lang(self, cr, uid, lang, lang_name=None):
        # create the language with locale information
        fail = True
        iso_lang = tools.get_iso_codes(lang)
        for ln in tools.get_locales(lang):
            try:
                locale.setlocale(locale.LC_ALL, str(ln))
                fail = False
                break
            except locale.Error:
                continue
        if fail:
            lc = locale.getdefaultlocale()[0]
            msg = 'Unable to get information for locale %s. Information from the default locale (%s) have been used.'
            _logger.warning(msg, lang, lc)

        if not lang_name:
            lang_name = tools.get_languages().get(lang, lang)

        def fix_xa0(s):
            """Fix badly-encoded non-breaking space Unicode character from locale.localeconv(),
               coercing to utf-8, as some platform seem to output localeconv() in their system
               encoding, e.g. Windows-1252"""
            if s == '\xa0':
                return '\xc2\xa0'
            return s

        def fix_datetime_format(format):
            """Python's strftime supports only the format directives
               that are available on the platform's libc, so in order to
               be 100% cross-platform we map to the directives required by
               the C standard (1989 version), always available on platforms
               with a C standard implementation."""
            for pattern, replacement in tools.DATETIME_FORMATS_MAP.iteritems():
                format = format.replace(pattern, replacement)
            return str(format)

        lang_info = {
            'code': lang,
            'iso_code': iso_lang,
            'name': lang_name,
            'translatable': 1,
            'date_format':
            fix_datetime_format(locale.nl_langinfo(locale.D_FMT)),
            'time_format':
            fix_datetime_format(locale.nl_langinfo(locale.T_FMT)),
            'decimal_point':
            fix_xa0(str(locale.localeconv()['decimal_point'])),
            'thousands_sep':
            fix_xa0(str(locale.localeconv()['thousands_sep'])),
        }
        lang_id = False
        try:
            lang_id = self.create(cr, uid, lang_info)
        finally:
            tools.resetlocale()
        return lang_id

    def _check_format(self, cr, uid, ids, context=None):
        for lang in self.browse(cr, uid, ids, context=context):
            for pattern in self._disallowed_datetime_patterns:
                if (lang.time_format and pattern in lang.time_format)\
                    or (lang.date_format and pattern in lang.date_format):
                    return False
        return True

    def _get_default_date_format(self, cursor, user, context=None):
        return '%m/%d/%Y'

    def _get_default_time_format(self, cursor, user, context=None):
        return '%H:%M:%S'

    _columns = {
        'name':
        fields.char('Name', size=64, required=True),
        'code':
        fields.char('Locale Code',
                    size=16,
                    required=True,
                    help='This field is used to set/get locales for user'),
        'iso_code':
        fields.char(
            'ISO code',
            size=16,
            required=False,
            help='This ISO code is the name of po files to use for translations'
        ),
        'translatable':
        fields.boolean('Translatable'),
        'active':
        fields.boolean('Active'),
        'direction':
        fields.selection([('ltr', 'Left-to-Right'), ('rtl', 'Right-to-Left')],
                         'Direction',
                         required=True),
        'date_format':
        fields.char('Date Format', size=64, required=True),
        'time_format':
        fields.char('Time Format', size=64, required=True),
        'grouping':
        fields.char(
            'Separator Format',
            size=64,
            required=True,
            help=
            "The Separator Format should be like [,n] where 0 < n :starting from Unit digit.-1 will end the separation. e.g. [3,2,-1] will represent 106500 to be 1,06,500;[1,2,-1] will represent it to be 106,50,0;[3] will represent it as 106,500. Provided ',' as the thousand separator in each case."
        ),
        'decimal_point':
        fields.char('Decimal Separator', size=64, required=True),
        'thousands_sep':
        fields.char('Thousands Separator', size=64),
    }
    _defaults = {
        'active': lambda *a: 1,
        'translatable': lambda *a: 0,
        'direction': lambda *a: 'ltr',
        'date_format': _get_default_date_format,
        'time_format': _get_default_time_format,
        'grouping': lambda *a: '[]',
        'decimal_point': lambda *a: '.',
        'thousands_sep': lambda *a: ',',
    }
    _sql_constraints = [
        ('name_uniq', 'unique (name)',
         'The name of the language must be unique !'),
        ('code_uniq', 'unique (code)',
         'The code of the language must be unique !'),
    ]

    _constraints = [(
        _check_format,
        'Invalid date/time format directive specified. Please refer to the list of allowed directives, displayed when you edit a language.',
        ['time_format', 'date_format'])]

    @tools.ormcache(skiparg=3)
    def _lang_data_get(self, cr, uid, lang_id, monetary=False):
        conv = localeconv()
        lang_obj = self.browse(cr, uid, lang_id)
        thousands_sep = lang_obj.thousands_sep or conv[
            monetary and 'mon_thousands_sep' or 'thousands_sep']
        decimal_point = lang_obj.decimal_point
        grouping = lang_obj.grouping
        return (grouping, thousands_sep, decimal_point)

    def write(self, cr, uid, ids, vals, context=None):
        for lang_id in ids:
            self._lang_data_get.clear_cache(self)
        return super(lang, self).write(cr, uid, ids, vals, context)

    def unlink(self, cr, uid, ids, context=None):
        if context is None:
            context = {}
        languages = self.read(cr,
                              uid,
                              ids, ['code', 'active'],
                              context=context)
        for language in languages:
            ctx_lang = context.get('lang')
            if language['code'] == 'en_US':
                raise osv.except_osv(
                    _('User Error'),
                    _("Base Language 'en_US' can not be deleted !"))
            if ctx_lang and (language['code'] == ctx_lang):
                raise osv.except_osv(
                    _('User Error'),
                    _("You cannot delete the language which is User's Preferred Language !"
                      ))
            if language['active']:
                raise osv.except_osv(
                    _('User Error'),
                    _("You cannot delete the language which is Active !\nPlease de-activate the language first."
                      ))
            trans_obj = self.pool.get('ir.translation')
            trans_ids = trans_obj.search(cr,
                                         uid,
                                         [('lang', '=', language['code'])],
                                         context=context)
            trans_obj.unlink(cr, uid, trans_ids, context=context)
        return super(lang, self).unlink(cr, uid, ids, context=context)

    def format(self,
               cr,
               uid,
               ids,
               percent,
               value,
               grouping=False,
               monetary=False,
               context=None):
        """ Format() will return the language-specific output for float values"""

        if percent[0] != '%':
            raise ValueError(
                "format() must be given exactly one %char format specifier")

        lang_grouping, thousands_sep, decimal_point = self._lang_data_get(
            cr, uid, ids[0], monetary)
        eval_lang_grouping = eval(lang_grouping)

        formatted = percent % value
        # floats and decimal ints need special action!
        if percent[-1] in 'eEfFgG':
            seps = 0
            parts = formatted.split('.')

            if grouping:
                parts[0], seps = intersperse(parts[0], eval_lang_grouping,
                                             thousands_sep)

            formatted = decimal_point.join(parts)
            while seps:
                sp = formatted.find(' ')
                if sp == -1: break
                formatted = formatted[:sp] + formatted[sp + 1:]
                seps -= 1
        elif percent[-1] in 'diu':
            if grouping:
                formatted = intersperse(formatted, eval_lang_grouping,
                                        thousands_sep)[0]

        return formatted
        # todo: - except dont allow : __terp__file in order to allow them to add the certificate number
        #       - move this function on pressing on button to put module in 'done' state
        m.update(module.module_zip)
        m.update(module.name)
        cert_num = int(m.hexdigest(), 16)
        oldcertif = obj_module.search(cr, uid, [("certificate", "=", cert_num)])
        if oldcertif:
            raise osv.except_osv(_("Error !"), _("Certificate code Already Exists."))
        else:
            obj_module.write(cr, uid, module.id, {"certificate": cert_num})
        return True

    _name = "maintenance.maintenance.module"
    _description = "maintenance modules"
    _columns = {
        "name": fields.char("Name", size=128, required=True, readonly=False),
        "version": fields.char("Versions", size=64, readonly=False),
        "certificate": fields.char("Certificate Code", size=42, required=False, readonly=False),
        "path": fields.function(_get_module_path, method=True, string="Path", type="char", size=512, readonly=True),
        "technical_certificate": fields.selection(
            [("not_started", "Not Started"), ("failed", "Failed"), ("succeeded", "Succeeded"), ("skipped", "Skipped")],
            "Technical Certification",
        ),
        "functional_certificate": fields.selection(
            [("not_started", "Not Started"), ("failed", "Failed"), ("succeeded", "Succeeded"), ("skipped", "Skipped")],
            "Functional Certification",
        ),
        "sale_ids": fields.many2many(
            "sale.order", "maintenance_module_sale_rel", "module_id", "sale_id", "Sale orders"
        ),
        "nbr_source_line": fields.integer(
Exemplo n.º 52
0
class gencumann_wizard(osv.osv_memory):
    _name = 'hr_gp.wizard.gencumann'

    _columns = {
        'name': fields.char('wiz name', size=32),
        'year2cum': fields.integer('year2cum'),
        'state': fields.char('state', size=8),
    }
    _defaults = {
        'name': 'Clic to begin',
        'year2cum': lambda *a: datetime.now().year,
        'state': 'begin',
    }

    def gen_cumann(self, cr, uid, id, context=None):
        wizard = self.browse(cr, uid, id[0], context=context)
        if wizard.state == "begin":
            curr_year = wizard.year2cum
            cr.execute(
                "select id, name, title, cum_epe, cum_ctt, cum_act, cum_cpy from hr_gp_params_dict where cum_epe = True or cum_ctt = True or cum_act = True or cum_cpy = True"
            )
            param2cum = cr.fetchall()
            compteurs = {}
            cpt = []
            curr_date = date.today()
            curr_year = wizard.year2cum
            for param in param2cum:
                par = param[0]
                cumlev = []
                if param[3]: cumlev.append('epe')
                if param[4]: cumlev.append('ctt')
                if param[5]: cumlev.append('act')
                if param[6]: cumlev.append('cpy')
                sql_req = """
				SELECT psl.param_name, psl.pay_id, ps.ctt_id, pr.date_begin, pr.date_end, psl.registee, psl.register,
				ctt.employee_id, ctt.activity, act.company 
				FROM hr_gp_payslip_line psl
				LEFT JOIN hr_gp_payslip ps ON (ps.id = psl.pay_id)
				LEFT JOIN hr_gp_payrun pr ON (ps.pay_run = pr.id)
				LEFT JOIN hr_gp_contractframe ctt ON (ps.ctt_id = ctt.id)
				LEFT JOIN hr_gp_activity act ON (ctt.activity = act.id)
				WHERE psl.param_name = %s AND psl.pay_id IS NOT NULL
				""" % (par, )

                cr.execute(sql_req)
                paysliplines = cr.fetchall()
                for paylig in paysliplines:
                    for level in cumlev:
                        keylev = ""
                        if level == 'ctt': keylev = paylig[2]
                        elif level == 'epe': keylev = paylig[7]
                        elif level == 'act': keylev = paylig[8]
                        elif level == 'cpy': keylev = paylig[9]
                        cledict = level + "_" + str(
                            paylig[0]) + "_" + str(keylev)
                        ddebpay = datetime.strptime(paylig[3], "%Y-%m-%d")
                        m = ddebpay.month
                        a = ddebpay.year
                        if a == curr_year:
                            if compteurs.has_key(cledict) == False:
                                compteurs[cledict] = [
                                    0.00, 0.00, 0.00, 0.00, 0.00, 0.00, 0.00,
                                    0.00, 0.00, 0.00, 0.00, 0.00
                                ]
                            cpt = compteurs[cledict]
                            if paylig[5]: reg_e = paylig[5]
                            else: reg_e = 0.00
                            if paylig[6]: reg_r = paylig[6]
                            else: reg_r = 0.00
                            cpt[m - 1] = cpt[m - 1] + reg_e + reg_r
                            compteurs[cledict] = cpt
            pool = pooler.get_pool(cr.dbname)
            obj = pool.get('hr_gp.cumann')
            fldkey = {
                'ctt': 'contractframe_key.id',
                'epe': 'employee_key.id',
                'act': 'activityframe_key.id',
                'cpy': 'company_key.id'
            }
            fldname = {
                'ctt': 'contractframe_key',
                'epe': 'employee_key',
                'act': 'activityframe_key',
                'cpy': 'company_key'
            }
            for keycpt in compteurs.keys():
                lev = keycpt.split("_")[0]
                parname = keycpt.split("_")[1]
                entitykey = keycpt.split("_")[2]
                crit = [(fldkey[lev], '=', entitykey),
                        ('param_name.id', '=', parname),
                        ('year_cum', '=', curr_year)]
                ides = obj.search(cr, uid, crit)
                rec = obj.read(cr,
                               uid,
                               ides,
                               fields=['id', fldname[lev], 'param_name'],
                               context=None)
                cum_an = ""
                for m in range(0, 12):
                    cum_an += "%1.2f;" % (compteurs[keycpt][m])
                fields = {'cum_an': cum_an}
                fields.update({fldname[lev]: entitykey})
                fields.update({'year_cum': curr_year})
                fields.update({'param_name': parname})
                fields.update({'last_update': curr_date})
                if len(rec) > 0:
                    obj.write(cr, uid, [rec[0]['id']], fields, context=None)
                else:
                    obj.create(cr, uid, fields, context=None)
            self.write(cr, uid, id, {
                'state': 'completed',
            }, context=context)

        return True
Exemplo n.º 53
0
            _logger.debug("Creating new OpenERP user \"%s\" from LDAP" % login)
            user_obj = self.pool.get('res.users')
            values = self.map_ldap_attributes(cr, uid, conf, login, ldap_entry)
            if conf['user']:
                user_id = user_obj.copy(cr, SUPERUSER_ID, conf['user'],
                                        default={'active': True})
                user_obj.write(cr, SUPERUSER_ID, user_id, values)
            else:
                user_id = user_obj.create(cr, SUPERUSER_ID, values)
        return user_id

    _columns = {
        'sequence': fields.integer('Sequence'),
        'company': fields.many2one('res.company', 'Company', required=True,
            ondelete='cascade'),
        'ldap_server': fields.char('LDAP Server address', size=64, required=True),
        'ldap_server_port': fields.integer('LDAP Server port', required=True),
        'ldap_binddn': fields.char('LDAP binddn', size=64,
            help=("The user account on the LDAP server that is used to query "
                  "the directory. Leave empty to connect anonymously.")),
        'ldap_password': fields.char('LDAP password', size=64,
            help=("The password of the user account on the LDAP server that is "
                  "used to query the directory.")),
        'ldap_filter': fields.char('LDAP filter', size=256, required=True),
        'ldap_base': fields.char('LDAP base', size=64, required=True),
        'user': fields.many2one('res.users', 'Model User',
            help="Model used for user creation"),
        'create_user': fields.boolean('Create user',
            help="Create the user if not in database"),
        'ldap_tls': fields.boolean('Use TLS',
            help="Request secure TLS/SSL encryption when connecting to the LDAP server. "
Exemplo n.º 54
0
class priority(osv.osv):
    _name = 'fts.priority'
    _columns = {
        'name': fields.char('Name', size=1),
        'value': fields.float('Value (0-1.0)')
    }
Exemplo n.º 55
0
            raise osv.except_osv("Connection Test Failed!",
                                 "Here is what we got instead:\n %s" % tools.ustr(e))
        finally:
            try:
                if transport: transport.close()
            except Exception:
                # ignored, just a consequence of the previous exception
                pass
        raise osv.except_osv("Connection Test Succeeded!", "Everything seems properly set up!")

    _name = 'vehicle.config'
    _description = 'Configuration for Vehicle Fits ftp link'
    _table = 'vehiclefits_config'
    _columns = {

        'name': fields.char('Name', size=20),
        'vf_url': fields.char('Url', size=30, help="Url to Magento Web"),
        'sftp_user': fields.char('Ftp user', size=20, required=True),
        'sftp_password': fields.char('Ftp password', size=20, required=False),
        'sftp_pem': fields.binary('RSA Key', required=True),
        'sftp_host': fields.char('FTP IP host', size=15, required=True),
        'sftp_port': fields.integer('Ftp Port', help='Port of the connection'),
        'sftp_local_file': fields.char('Full path to local csv file'),
        'sftp_remote_file': fields.char('Name of remote file', help="Default name for import is"
                                                                    " product-fitments-import.csv"),
        'sftp_remote_dir': fields.char('Full remote path'),
        'erp_host': fields.char('Erp host', size=20, required=True),
        'erp_user': fields.char('Erp DB user', size=15, required=True),
        'erp_password': fields.char('Erp password', size=20, required=True),
        'erp_db': fields.char('Erp DB', size=20, required=True)
Exemplo n.º 56
0
class xml_export_wizard(osv.osv_memory):
    _name = "xml.export.wizard"

    _columns = {
        'profile_id': fields.many2one('xml.profile', 'Profile', required=True),
        'res_model': fields.char('Export model', size=64, required=True),
        'res_id': fields.integer('Export ID'),
        'data_xml': fields.binary('Output file', readonly=True),
        'errors': fields.text('Errors', readonly=True),
    }

    def default_get(self, cr, uid, names, context=None):
        if context is None:
            context = {}
        res = {}

        if 'profile_id' in names:
            res['profile_id'] = context.get('xml_profile')
        if 'res_model' in names:
            res['res_model'] = context.get('active_model')
        if 'res_id' in names:
            res['res_id'] = context.get('active_id')
        if 'data_xml' in names and 'xml_profile' in names:
            xml_data = self.get_data_xml(cr, uid, res, context)
            res['data_xml'] = base64.b64encode(xml_data)
        return res

    def get_data_xml(self, cr, uid, data, context=None):
        if context is None:
            context = {}

        try:
            profile = self.pool.get('xml.profile').browse(
                cr, uid, data['profile_id'])
            log.info("XML Export of %s, records : %s using %s profile" %
                     (data['res_model'], data['res_id'], profile.name))
            nsmap = {}
            if profile.export_id.namespace:
                nsmap[None] = profile.export_id.namespace
            if profile.export_id.schema:
                nsmap['xsi'] = profile.export_id.schema

            def get_next_line(line, sub_line, context=None):
                #               log.info("Finding next line for %s" % sub_line.name)
                line_obj = self.pool.get('xml.profile.line')
                lines = line_obj.search(
                    cr,
                    uid, [('profile_id', '=', line.profile_id.id),
                          ('xml_field', '=', sub_line.id)],
                    context=context)
                for tnew_line in line_obj.browse(cr,
                                                 uid,
                                                 lines,
                                                 context=context):
                    log.debug("Yielding %s" % tnew_line)
                    yield tnew_line

            def parse_line(element,
                           line,
                           parent=None,
                           model=None,
                           cur_id=None,
                           context=None):
                log.debug("%s, action %s, model %s, id %s" %
                          (element.tag, line.action, model, cur_id))
                # First we find where the original element is (parents etc)
                #               parent = parent.xpath( path ) and parent.xpath( path ) or None
                objects = None
                field = None

                if model and cur_id:
                    objects = self.pool.get(model).browse(cr,
                                                          uid,
                                                          cur_id,
                                                          context=context)
                    log.debug("Browsed on objects %s" % (objects))
                if objects and line.openerp_field:
                    field = getattr(objects, line.openerp_field.name)
                    log.debug("Field value of <%s> %s : %s" %
                              (element.tag, line.openerp_field.name, field))

                if line.action == 'compute':
                    try:
                        value = eval(str(line.code))
                        element.text = "%s" % (value or '')
                    except Exception, e:
                        raise osv.except_osv(
                            _("Compute Error"),
                            _("The code for field %s failed with following message:\n%s"
                              ) % (element.tag, e))

                if line.action == 'field':
                    element.text = "%s" % (field or '')

                if line.action == 'attribute':
                    value = eval(str(line.code))
                    log.debug("Attribute result: %s" % value)

                    # No duplicates ! (was duplicated by parent)
                    parent.remove(element)
                    # Find the original
                    element = parent.find(line.xml_field.name)
                    # Set attribute
                    element.set(value['key'], value['value'])
                    return  # When setting attributes we dont want to loop on child fields

                if line.xml_field.child_ids and element.getparent(
                ):  # We can skip children if the element has been disconnected
                    # Checking what kind of loop is needed
                    if line.action in ['repeat', 'repeat_sub']:
                        if line.action == 'repeat' and not field:
                            # This way it will loop on the records
                            field = [objects]

                        # Remove our element, as we will re-create them on the fly
                        parent.remove(element)

                        log.info("%s on following ids: %s" %
                                 (line.action, field))
                        new_model = line.openerp_field.relation or line.openerp_model.model
                        for record in field:
                            record_tag = etree.SubElement(parent, element.tag)
                            log.info("Handling record %s" % record)
                            for sub_line in line.xml_field.child_ids:
                                for new_line in get_next_line(line, sub_line):
                                    new_tag = etree.SubElement(
                                        record_tag, sub_line.name)
                                    log.info("Creating tag %s" % new_tag.tag)
                                    parse_line(new_tag, new_line, record_tag,
                                               new_model, record.id, context)
                    else:
                        for sub_line in line.xml_field.child_ids:
                            for new_line in get_next_line(line, sub_line):
                                # Its in the profile, so create it if it passes include_code test
                                try:
                                    include = eval(str(new_line.include_code))
                                    if not include: continue
                                except Exception, e:
                                    raise osv.except_osv(
                                        _("Compute Error"),
                                        _("Error while running include_code for %s in child %s\n%s"
                                          ) % (line.xml_field.name,
                                               sub_line.name, e))

                                new_tag = etree.SubElement(
                                    element, sub_line.name)
                                log.info("Creating tag %s" % new_tag.tag)
                                parse_line(new_tag, new_line, element, model,
                                           cur_id, context)

            # END OF PARSE_LINE

            # Now we loop over the lines again and duplicate where needed
            for line in profile.lines:
                if line.xml_field.parent_id: continue  # Skip non-root items

                # Create our root node
                doc = etree.Element(profile.export_id.root, nsmap=nsmap)
                cts = etree.SubElement(doc, line.xml_field.name)
                parse_line(cts, line, doc, data['res_model'], data['res_id'],
                           context)

            result = "%s%s%s" % (
                profile.export_id.head_xml or '',
                etree.tounicode(doc, pretty_print=False),
                profile.export_id.feet_xml or '',
            )

            return result
Exemplo n.º 57
0
 
 def _generate_serial_number(self, cursor, user, *args):
     try:
         cursor.execute("select serial_number from mttl_serials where id=(select max(id) from mttl_serials)")
         for item in cursor.dictfetchall():
             serial_number = int(item["serial_number"]) + 1
         return serial_number
     except Exception, ex:
         return False
 
 _name = "mttl.serials"
 _description = "Serials"
 _rec_name="serial"
 _order = "id desc"
 _columns = {
     'serial':fields.char('Serial', size=50, help="Add text here"),
     
     # Information
     'model':fields.many2one('mttl.models', 'Model', required=True, help="Metro Tow Trucks Model"),
     'year':fields.many2one('mttl.years', 'Year', required=True, help="Year of model"),
     'location':fields.many2one('mttl.locations', 'Location', required=True, help="Manufacture location"),
     'country':fields.many2one('mttl.countries', 'Country', required=True, help="Country of manufacture"),
     'chassis':fields.many2one('mttl.chassis', 'Chassis', required=True, help="Who supplied the chassis, or was it sold as a kit?"),
     'partner_id':fields.many2one('res.partner', 'Customer', change_default=True, help="Last known owner of this unit", domain="[('customer','=',True)]"),
     'dealer_id':fields.many2one('res.partner', 'Dealer', change_default=True, help="Last known owner of this unit", domain="[('dealer','=',True)]"),
     'destination_id':fields.many2one('res.partner', 'Destination', change_default=True, help="Last known location of this unit", domain="[('customer','=',True)]"),
     'serial_number':fields.char('Serial Number',size=17, required=True, help="The Computed Serial Number.  Verify that no periods or question marks are present"),
     
     #Vehicle Information
     'chassis_weight': fields.char("Chassis Weight", size=10, help="Weight of Chassis without Wrecker"),
     'wrecker_weight': fields.char("Wrecker Weight", size=10, help="Weight of Wrecker without Chassis"),
Exemplo n.º 58
0
            fname = os.path.join(path, flag, filename)
            fp = file(fname,'wb')
            v = base64.decodestring(value)
            fp.write(v)
            filesize = os.stat(fname).st_size
            cr.execute('update ir_attachment set store_fname=%s,store_method=%s,file_size=%s where id=%s', (os.path.join(flag,filename),'fs',len(v),id))
            return True
        except Exception,e :
            raise except_orm(_('Error!'), str(e))

    _columns = {
        'user_id': fields.many2one('res.users', 'Owner', select=1),
        'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'),
        'parent_id': fields.many2one('document.directory', 'Directory', select=1),
        'file_size': fields.integer('File Size', required=True),
        'file_type': fields.char('Content Type', size=32),
        'index_content': fields.text('Indexed Content'),
        'write_date': fields.datetime('Date Modified', readonly=True),
        'write_uid':  fields.many2one('res.users', 'Last Modification User', readonly=True),
        'create_date': fields.datetime('Date Created', readonly=True),
        'create_uid':  fields.many2one('res.users', 'Creator', readonly=True),
        'store_method': fields.selection([('db','Database'),('fs','Filesystem'),('link','Link')], "Storing Method"),
        'datas': fields.function(_data_get,method=True,fnct_inv=_data_set,string='File Content',type="binary"),
        'store_fname': fields.char('Stored Filename', size=200),
        'res_model': fields.char('Attached Model', size=64), #res_model
        'res_id': fields.integer('Attached ID'), #res_id
        'partner_id':fields.many2one('res.partner', 'Partner', select=1),
        'title': fields.char('Resource Title',size=64),
    }

    _defaults = {
Exemplo n.º 59
0
    def _get_extras(self, cr, uid, ids, *args, **kwargs):
        result = []
        if aeroo_ooo_test(cr):
            result.append('aeroo_ooo')
        ##### Check deferred_processing module #####
        cr.execute("SELECT id, state FROM ir_module_module WHERE name='deferred_processing'")
        deferred_proc_module = cr.dictfetchone()
        if deferred_proc_module and deferred_proc_module['state'] in ('installed', 'to upgrade'):
            result.append('deferred_processing')
        ############################################
        return dict.fromkeys(ids, ','.join(result))

    _columns = {
        'charset':fields.selection(_get_encodings, string='Charset', required=True),
        'content_fname': fields.char('Override Extension',size=64, help='Here you can override output file extension'),
        'styles_mode': fields.selection([
            ('default','Not used'),
            ('global', 'Global'),
            ('specified', 'Specified'),
            ], string='Stylesheet'),
        'stylesheet_id':fields.many2one('report.stylesheets', 'Template Stylesheet'),
        'preload_mode':fields.selection([
            ('static',_('Static')),
            ('preload',_('Preload')),
        ],'Preload Mode'),
        'tml_source':fields.selection([
            ('database','Database'),
            ('file','File'),
            ('parser','Parser'),
        ],'Template source', select=True),
Exemplo n.º 60
0
class project_issue(crm.crm_case, osv.osv):
    _name = "project.issue"
    _description = "Project Issue"
    _order = "priority, id desc"
    _inherit = ['mailgate.thread']

    def case_open(self, cr, uid, ids, *args):
        """
        @param self: The object pointer
        @param cr: the current row, from the database cursor,
        @param uid: the current user’s ID for security checks,
        @param ids: List of case's Ids
        @param *args: Give Tuple Value
        """

        res = super(project_issue, self).case_open(cr, uid, ids, *args)
        self.write(cr, uid, ids,
                   {'date_open': time.strftime('%Y-%m-%d %H:%M:%S')})
        for (id, name) in self.name_get(cr, uid, ids):
            message = _("Issue '%s' has been opened.") % name
            self.log(cr, uid, id, message)
        return res

    def case_close(self, cr, uid, ids, *args):
        """
        @param self: The object pointer
        @param cr: the current row, from the database cursor,
        @param uid: the current user’s ID for security checks,
        @param ids: List of case's Ids
        @param *args: Give Tuple Value
        """

        res = super(project_issue, self).case_close(cr, uid, ids, *args)
        for (id, name) in self.name_get(cr, uid, ids):
            message = _("Issue '%s' has been closed.") % name
            self.log(cr, uid, id, message)
        return res

    def _compute_day(self, cr, uid, ids, fields, args, context=None):
        """
        @param cr: the current row, from the database cursor,
        @param uid: the current user’s ID for security checks,
        @param ids: List of Openday’s IDs
        @return: difference between current date and log date
        @param context: A standard dictionary for contextual values
        """
        cal_obj = self.pool.get('resource.calendar')
        res_obj = self.pool.get('resource.resource')

        res = {}
        for issue in self.browse(cr, uid, ids, context=context):
            for field in fields:
                res[issue.id] = {}
                duration = 0
                ans = False
                hours = 0

                if field in ['working_hours_open', 'day_open']:
                    if issue.date_open:
                        date_create = datetime.strptime(
                            issue.create_date, "%Y-%m-%d %H:%M:%S")
                        date_open = datetime.strptime(issue.date_open,
                                                      "%Y-%m-%d %H:%M:%S")
                        ans = date_open - date_create
                        date_until = issue.date_open
                        #Calculating no. of working hours to open the issue
                        hours = cal_obj.interval_hours_get(
                            cr, uid, issue.project_id.resource_calendar_id.id,
                            datetime.strptime(issue.create_date,
                                              '%Y-%m-%d %H:%M:%S'),
                            datetime.strptime(issue.date_open,
                                              '%Y-%m-%d %H:%M:%S'))
                elif field in ['working_hours_close', 'day_close']:
                    if issue.date_closed:
                        date_create = datetime.strptime(
                            issue.create_date, "%Y-%m-%d %H:%M:%S")
                        date_close = datetime.strptime(issue.date_closed,
                                                       "%Y-%m-%d %H:%M:%S")
                        date_until = issue.date_closed
                        ans = date_close - date_create
                        #Calculating no. of working hours to close the issue
                        hours = cal_obj.interval_hours_get(
                            cr, uid, issue.project_id.resource_calendar_id.id,
                            datetime.strptime(issue.create_date,
                                              '%Y-%m-%d %H:%M:%S'),
                            datetime.strptime(issue.date_closed,
                                              '%Y-%m-%d %H:%M:%S'))
                if ans:
                    resource_id = False
                    if issue.user_id:
                        resource_ids = res_obj.search(
                            cr, uid, [('user_id', '=', issue.user_id.id)])
                        if resource_ids and len(resource_ids):
                            resource_id = resource_ids[0]
                    duration = float(ans.days)
                    if issue.project_id and issue.project_id.resource_calendar_id:
                        duration = float(ans.days) * 24
                        new_dates = cal_obj.interval_min_get(
                            cr,
                            uid,
                            issue.project_id.resource_calendar_id.id,
                            datetime.strptime(issue.create_date,
                                              '%Y-%m-%d %H:%M:%S'),
                            duration,
                            resource=resource_id)
                        no_days = []
                        date_until = datetime.strptime(date_until,
                                                       '%Y-%m-%d %H:%M:%S')
                        for in_time, out_time in new_dates:
                            if in_time.date not in no_days:
                                no_days.append(in_time.date)
                            if out_time > date_until:
                                break
                        duration = len(no_days)
                if field in ['working_hours_open', 'working_hours_close']:
                    res[issue.id][field] = hours
                else:
                    res[issue.id][field] = abs(float(duration))
        return res

    def _get_issue_task(self, cr, uid, ids, context=None):
        issues = []
        issue_pool = self.pool.get('project.issue')
        for task in self.pool.get('project.task').browse(cr,
                                                         uid,
                                                         ids,
                                                         context=context):
            issues += issue_pool.search(cr, uid, [('task_id', '=', task.id)])
        return issues

    def _get_issue_work(self, cr, uid, ids, context=None):
        issues = []
        issue_pool = self.pool.get('project.issue')
        for work in self.pool.get('project.task.work').browse(cr,
                                                              uid,
                                                              ids,
                                                              context=context):
            if work.task_id:
                issues += issue_pool.search(
                    cr, uid, [('task_id', '=', work.task_id.id)])
        return issues

    def _hours_get(self, cr, uid, ids, field_names, args, context=None):
        task_pool = self.pool.get('project.task')
        res = {}
        for issue in self.browse(cr, uid, ids, context=context):
            progress = 0.0
            if issue.task_id:
                progress = task_pool._hours_get(
                    cr,
                    uid, [issue.task_id.id],
                    field_names,
                    args,
                    context=context)[issue.task_id.id]['progress']
            res[issue.id] = {'progress': progress}
        return res

    _columns = {
        'id': fields.integer('ID'),
        'name': fields.char('Issue', size=128, required=True),
        'active': fields.boolean('Active', required=False),
        'create_date': fields.datetime('Creation Date', readonly=True,select=True),
        'write_date': fields.datetime('Update Date', readonly=True),
        'date_deadline': fields.date('Deadline'),
        'section_id': fields.many2one('crm.case.section', 'Sales Team', \
                        select=True, help='Sales team to which Case belongs to.\
                             Define Responsible user and Email account for mail gateway.'                                                                                         ),
        'user_id': fields.many2one('res.users', 'Responsible'),
        'partner_id': fields.many2one('res.partner', 'Partner'),
        'partner_address_id': fields.many2one('res.partner.address', 'Partner Contact', \
                                 domain="[('partner_id','=',partner_id)]"),
        'company_id': fields.many2one('res.company', 'Company'),
        'description': fields.text('Description'),
        'state': fields.selection([('draft', 'Draft'), ('open', 'To Do'), ('cancel', 'Cancelled'), ('done', 'Closed'),('pending', 'Pending'), ], 'State', size=16, readonly=True,
                                  help='The state is set to \'Draft\', when a case is created.\
                                  \nIf the case is in progress the state is set to \'Open\'.\
                                  \nWhen the case is over, the state is set to \'Done\'.\
                                  \nIf the case needs to be reviewed then the state is set to \'Pending\'.'                                                                                                           ),
        'email_from': fields.char('Email', size=128, help="These people will receive email."),
        'email_cc': fields.char('Watchers Emails', size=256, help="These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma"),
        'date_open': fields.datetime('Opened', readonly=True,select=True),
        # Project Issue fields
        'date_closed': fields.datetime('Closed', readonly=True,select=True),
        'date': fields.datetime('Date'),
        'canal_id': fields.many2one('res.partner.canal', 'Channel', help="The channels represent the different communication modes available with the customer." \
                                                                        " With each commercial opportunity, you can indicate the canall which is this opportunity source."),
        'categ_id': fields.many2one('crm.case.categ', 'Category', domain="[('object_id.model', '=', 'crm.project.bug')]"),
        'priority': fields.selection(crm.AVAILABLE_PRIORITIES, 'Priority'),
        'version_id': fields.many2one('project.issue.version', 'Version'),
        'partner_name': fields.char("Employee's Name", size=64),
        'partner_mobile': fields.char('Mobile', size=32),
        'partner_phone': fields.char('Phone', size=32),
        'type_id': fields.many2one ('project.task.type', 'Resolution'),
        'project_id':fields.many2one('project.project', 'Project'),
        'duration': fields.float('Duration'),
        'task_id': fields.many2one('project.task', 'Task', domain="[('project_id','=',project_id)]"),
        'day_open': fields.function(_compute_day, string='Days to Open', \
                                method=True, multi='day_open', type="float", store=True),
        'day_close': fields.function(_compute_day, string='Days to Close', \
                                method=True, multi='day_close', type="float", store=True),
        'assigned_to': fields.related('task_id', 'user_id', string = 'Assigned to', type="many2one", relation="res.users", store=True, help='This is the current user to whom the related task have been assigned'),
        'working_hours_open': fields.function(_compute_day, string='Working Hours to Open the Issue', \
                                method=True, multi='working_days_open', type="float", store=True),
        'working_hours_close': fields.function(_compute_day, string='Working Hours to Close the Issue', \
                                method=True, multi='working_days_close', type="float", store=True),
        'message_ids': fields.one2many('mailgate.message', 'res_id', 'Messages', domain=[('model','=',_name)]),
        'date_action_last': fields.datetime('Last Action', readonly=1),
        'date_action_next': fields.datetime('Next Action', readonly=1),
        'progress': fields.function(_hours_get, method=True, string='Progress (%)', multi='hours', group_operator="avg", help="Computed as: Time Spent / Total Time.",
            store = {
                'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['task_id'], 10),
                'project.task': (_get_issue_task, ['progress'], 10),
                'project.task.work': (_get_issue_work, ['hours'], 10),
            }),
    }

    def _get_project(self, cr, uid, context=None):
        user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
        if user.context_project_id:
            return user.context_project_id.id
        return False

    _defaults = {
        'active':
        1,
        'user_id':
        crm.crm_case._get_default_user,
        'partner_id':
        crm.crm_case._get_default_partner,
        'partner_address_id':
        crm.crm_case._get_default_partner_address,
        'email_from':
        crm.crm_case._get_default_email,
        'state':
        'draft',
        'section_id':
        crm.crm_case._get_section,
        'company_id':
        lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get(
            cr, uid, 'crm.helpdesk', context=c),
        'priority':
        crm.AVAILABLE_PRIORITIES[2][0],
        'project_id':
        _get_project,
    }

    def convert_issue_task(self, cr, uid, ids, context=None):
        case_obj = self.pool.get('project.issue')
        data_obj = self.pool.get('ir.model.data')
        task_obj = self.pool.get('project.task')

        if context is None:
            context = {}

        result = data_obj._get_id(cr, uid, 'project', 'view_task_search_form')
        res = data_obj.read(cr, uid, result, ['res_id'])
        id2 = data_obj._get_id(cr, uid, 'project', 'view_task_form2')
        id3 = data_obj._get_id(cr, uid, 'project', 'view_task_tree2')
        if id2:
            id2 = data_obj.browse(cr, uid, id2, context=context).res_id
        if id3:
            id3 = data_obj.browse(cr, uid, id3, context=context).res_id

        for bug in case_obj.browse(cr, uid, ids, context=context):
            new_task_id = task_obj.create(
                cr, uid, {
                    'name': bug.name,
                    'partner_id': bug.partner_id.id,
                    'description': bug.description,
                    'date': bug.date,
                    'project_id': bug.project_id.id,
                    'priority': bug.priority,
                    'user_id': bug.assigned_to.id,
                    'planned_hours': 0.0,
                })

            vals = {'task_id': new_task_id, 'state': 'pending'}
            case_obj.write(cr, uid, [bug.id], vals)

        return {
            'name':
            _('Tasks'),
            'view_type':
            'form',
            'view_mode':
            'form,tree',
            'res_model':
            'project.task',
            'res_id':
            int(new_task_id),
            'view_id':
            False,
            'views': [(id2, 'form'), (id3, 'tree'), (False, 'calendar'),
                      (False, 'graph')],
            'type':
            'ir.actions.act_window',
            'search_view_id':
            res['res_id'],
            'nodestroy':
            True
        }

    def _convert(self, cr, uid, ids, xml_id, context=None):
        data_obj = self.pool.get('ir.model.data')
        id2 = data_obj._get_id(cr, uid, 'project_issue', xml_id)
        categ_id = False
        if id2:
            categ_id = data_obj.browse(cr, uid, id2, context=context).res_id
        if categ_id:
            self.write(cr, uid, ids, {'categ_id': categ_id})
        return True

    def convert_to_feature(self, cr, uid, ids, context=None):
        return self._convert(cr,
                             uid,
                             ids,
                             'feature_request_categ',
                             context=context)

    def convert_to_bug(self, cr, uid, ids, context=None):
        return self._convert(cr, uid, ids, 'bug_categ', context=context)

    def next_type(self, cr, uid, ids, *args):
        for task in self.browse(cr, uid, ids):
            typeid = task.type_id.id
            types = map(lambda x: x.id, task.project_id.type_ids or [])
            if types:
                if not typeid:
                    self.write(cr, uid, task.id, {'type_id': types[0]})
                elif typeid and typeid in types and types.index(
                        typeid) != len(types) - 1:
                    index = types.index(typeid)
                    self.write(cr, uid, task.id, {'type_id': types[index + 1]})
        return True

    def prev_type(self, cr, uid, ids, *args):
        for task in self.browse(cr, uid, ids):
            typeid = task.type_id.id
            types = map(lambda x: x.id,
                        task.project_id and task.project_id.type_ids or [])
            if types:
                if typeid and typeid in types:
                    index = types.index(typeid)
                    self.write(
                        cr, uid, task.id,
                        {'type_id': index and types[index - 1] or False})
        return True

    def onchange_task_id(self, cr, uid, ids, task_id, context=None):
        result = {}
        if not task_id:
            return {'value': {}}
        task = self.pool.get('project.task').browse(cr,
                                                    uid,
                                                    task_id,
                                                    context=context)
        return {
            'value': {
                'assigned_to': task.user_id.id,
            }
        }

    def case_escalate(self, cr, uid, ids, *args):
        """Escalates case to top level
        @param self: The object pointer
        @param cr: the current row, from the database cursor,
        @param uid: the current user’s ID for security checks,
        @param ids: List of case Ids
        @param *args: Tuple Value for additional Params
        """
        cases = self.browse(cr, uid, ids)
        for case in cases:
            data = {}
            if case.project_id.project_escalation_id:
                data['project_id'] = case.project_id.project_escalation_id.id
                if case.project_id.project_escalation_id.user_id:
                    data[
                        'user_id'] = case.project_id.project_escalation_id.user_id.id
                if case.task_id:
                    self.pool.get('project.task').write(
                        cr, uid, [case.task_id.id], {
                            'project_id': data['project_id'],
                            'user_id': False
                        })
            else: