# -*- coding: utf-8 -*- from openerp import api, models, fields from openerp.tools import logging from yanp import nessus_parser _mylog = logging.getLogger('yustas##########') class rawscan_base(models.Model): _name = 'aol.rawscan.base' _inherits = {'ir.attachment': 'att_id'} # type_of_scan = fields.Many2one(comodel_name='aol.scantype', string="Source of the scan") create_date = fields.Datetime(string='Created on:', readonly=True) body = fields.Text('Raw scan body') assets_ids = fields.Many2many(comodel_name='res.partner', relation='scans_assets') risk_ids = fields.One2many(comodel_name='crm.lead', inverse_name='rawscan_id') def raw_risk_create(self): # pass @api.multi def get_att_location(self): # _mylog.info('file path is: %s' % (unicode(self.att_id._full_path(self.att_id.store_fname)))) return self.att_id._full_path(self.att_id.store_fname) @api.multi def parse_att(self):
def tmp_xls_import(cr, inst, source, fields, poss, sheet_line_poss, model_nm): #def tmp_xls_import(cr, inst, source, fields, poss, sheet_line_poss): pool = pooler.get_pool(cr.dbname) logger = logging.getLogger('imp') product_pool = pool.get(model_nm) # product_pool = pool.get('product.product') uid = 1 datas = [] nullify_ids_list = [] supplier = inst.name exist_ids_list = product_pool.search(cr, uid, [('supplier_id', '=', supplier.id)]) updated_ids_list = [] config_ids = pool.get('imp.config').search(cr, 1, []) config = pool.get('imp.config').browse(cr, 1, config_ids)[0] fields_types = eval(config.import_fields_types) sheet = source.sheets()[sheet_line_poss[0]] fields += ['supplier_id.id'] ## fields += ['supplier.id'] #raise osv.except_osv(_('Warning'), str(fields)) # # category = inst.category_id # # if category: # # fields += ['categ_id.id'] # hardcode by Sasha # required fields for non-etalon catalog # yustas qty_pos = fields.index('quantity') price_pos = fields.index('price') defcode_pos = fields.index('default_code') name_pos = fields.index('name') # qty_pos = fields.index('imp_qty') # price_pos = fields.index('standard_price') # defcode_pos = fields.index('default_code') # name_pos = fields.index('name') new_prod_count = 0 exist_prod_count = 0 error_rows_list = [] for row in xrange(sheet_line_poss[1], sheet.nrows): vals = [] itr = 0 try: for col in poss: cell_value = sheet.cell(row, col).value cell_type = sheet.cell(row, col).ctype fld = fields[itr] if cell_type == 2: if fields_types[fld] == 'char': if isinstance(cell_value, float): if modf(cell_value)[0]: cell_value = str(cell_value) else: cell_value = str(int(cell_value)) else: if fields_types[fld] == 'float': try: if cell_value: cell_value = float(cell_value) else: cell_value = 0.0 except: if isinstance(cell_value, str) or isinstance( cell_value, unicode): cell_value = cell_value.replace(',', '.') for sign in ['<', '>']: if sign in cell_value: cell_value = cell_value.replace( sign, '') cell_value = float(cell_value) if fields_types[fld] == 'int': if cell_value: cell_value = int(cell_value) else: cell_value = 0 vals.append(cell_value) itr += 1 except: #vals = [] error_rows_list.append(row + 1) logger.error("Cannot import the line #%s", row + 1) if any(vals): exist_id = product_pool.search( cr, uid, [('default_code', '=', vals[defcode_pos]), ('supplier_id', '=', supplier.id)]) # exist_id = product_pool.search(cr, uid, [('name','=',vals[defcode_pos]),('supplier','=',supplier.id)]) # exist_id = product_pool.search(cr, uid, [('default_code','=',vals[defcode_pos]),('supplier','=',supplier.id)]) if not exist_id: vals += [supplier] ## if category: ## vals += [category] datas.append(vals) new_prod_count += 1 else: updated_ids_list.append(exist_id[0]) exist = product_pool.browse(cr, uid, exist_id[0]) product_rewrite = product_pool.write( cr, uid, exist.id, { 'quantity': vals[qty_pos], #'imp_qty':vals[qty_pos], 'price': vals[price_pos] # 'standard_price':vals[price_pos] }) if product_rewrite: logger.warning("\nProduct with ID = %s is rewrote with values:\nQTY = %s\nPRICE = %s" \ % (exist_id,vals[qty_pos],vals[price_pos])) # if exist.price_ref: # part_inf_rewrite = pool.get('pricelist.partnerinfo').write(cr, uid, exist.price_ref.id, # {'sup_quantity':vals[qty_pos],'price':vals[price_pos]}) # #{'min_quantity':vals[qty_pos],'price':vals[price_pos]}) # #yustas # if part_inf_rewrite:logger.warning("\nPartber info with ID = %s is rewrote with values:\nQTY = %s\nPRICE = %s" \ # % (exist.price_ref,vals[qty_pos],vals[price_pos])) exist_prod_count += 1 print unicode(exist_ids_list) print unicode(updated_ids_list) nullify_ids_list = list(set(exist_ids_list) - set(updated_ids_list)) product_pool.write(cr, uid, nullify_ids_list, { 'quantity': 0, }) report = u'* %s : %d %s:' % (time.strftime('%d.%m.%y %H:%M:%S'), new_prod_count + exist_prod_count, ('records imported')) # report = u'* %s : %d %s:' % (time.strftime('%d.%m.%y %H:%M:%S'),new_prod_count+exist_prod_count,_('records imported')) report += u'\n\t- %d %s' % (new_prod_count, ('records created')) #report += u'\n\t- %d %s' % (new_prod_count,_('records created')) report += u'\n\t- %d %s' % (exist_prod_count, ('records updated')) #report += u'\n\t- %d %s' % (exist_prod_count,_('records updated')) report += u'\n\t- %d %s' % (len(nullify_ids_list), ('records nullified')) if error_rows_list: report += u'\n\t- %s: %s' % ( ('could not import records on rows'), str(error_rows_list)[1:-1]) #report += u'\n\t- %s: %s' % (_('could not import records on rows'),str(error_rows_list)[1:-1]) if inst.etalon_catalog: fields += ['if_etalon'] for l in datas: l.append('True') return product_pool.import_data(cr, uid, fields, datas), report
def imp_import(self, cr, uid, ids, context={}): logger = logging.getLogger('imp') inst = self.browse(cr, uid, ids[0]) if inst.imp_file == 'remote': try: sock = urllib2.urlopen(inst.url) del sock except Exception: raise osv.except_osv( 'Warning', 'URl is incorrect or wrong type or format of the file !') #raise osv.except_osv(_('Warning'), _('URl is incorrect or wrong type or format of the file !')) else: if not inst.loc: raise osv.except_osv('Warning', 'Please, provide a file !') #raise osv.except_osv(_('Warning'), _('Please, provide a file !')) remote = inst.imp_file == 'remote' checkmd5 = False md = md5() if remote: try: sock = urllib2.urlopen(inst.url) if inst.imp_type == 'csv': source = csv.reader(sock, quotechar='"', delimiter=',') elif inst.imp_type == 'xls': source = open_workbook(file_contents=sock.read()) except Exception: logger.warning("Except my remote") raise osv.except_osv('Warning', 'Cant open Url !') #raise osv.except_osv(_('Warning'), _('Cant open Url !')) else: try: if inst.imp_type == 'csv': source = csv.reader(StringIO.StringIO( inst.loc.decode('base64')), quotechar='"', delimiter=',') elif inst.imp_type == 'xls': source = open_workbook(file_contents=StringIO.StringIO( inst.loc.decode('base64')).read()) except Exception: raise osv.except_osv( 'Warning', 'Corrupt Fie format or incorrect file format !') #raise osv.except_osv(_('Warning'), _('Corrupt Fie format or incorrect file format !')) if remote: sock = urllib2.urlopen(inst.url) md.update(sock.read()) _md5 = md.hexdigest() else: md.update(StringIO.StringIO(inst.loc.decode('base64')).read()) _md5 = md.hexdigest() if not inst.force_import: if inst.md5: if inst.md5 == _md5: raise osv.except_osv('Warning', 'Old version of the file !') #raise osv.except_osv(_('Warning'), _('Old version of the file !')) fields_str = inst.import_order_main if not fields_str: raise osv.except_osv('Warning', 'Please, set fields order first !') #raise osv.except_osv(_('Warning'), _('Please, set fields order first !')) fields_dict = eval(fields_str) #yustas fields = [] poss = fields_dict.values() #poss = fields_dict.keys() # poss.sort() # for key in poss: # fields.append(fields_dict[key]) fields = fields_dict.keys() sheet_line_poss = [inst.sheet_num - 1, inst.line_start - 1] arrgs = (cr, inst, source, fields, poss, sheet_line_poss, inst.model_name) # arrgs = (cr, inst, source, fields, poss, sheet_line_poss, self.model_name) if inst.imp_type == 'xls': msg = check_fields_types(*arrgs) if msg: context.update({'import_results': msg}) return { 'name': "Test Wizard", #'name':_("Test Wizard"), 'view_mode': 'form', 'view_type': 'form', 'res_model': 'imp.wizard.fields', 'target': 'new', 'nodestroy': True, 'type': 'ir.actions.act_window', 'context': "%s" % context } else: if inst.etalon_catalog: res = tmp_xls_import(*arrgs) # res = tmp_xls_import_etalon(*arrgs) else: res = tmp_xls_import(*arrgs) else: if inst.etalon_catalog: ## res = tmp_csv_import_etalon(*arrgs) pass else: ## res = tmp_csv_import(*arrgs) pass result, rows, warning_msg, dummy = res[0] report = res[1] if result < 0: print res #raise Exception('Import error: %s' % (warning_msg,)) #raise Exception(_('Import error: %s') % (warning_msg,)) else: if inst.description: old_reports_list = inst.description.split('\n*') if len(old_reports_list) > 50: old_reports_list.pop() report = report + '\n' + '\n*'.join(old_reports_list) else: report = report + '\n' + '\n*'.join(old_reports_list) return self.write( cr, uid, inst.id, { 'md5': _md5, 'description': report, 'last_import': str(time.strftime('%d.%m.%y %H:%M:%S')) })
from operator import attrgetter from uuid import uuid4, uuid1 from dateutil.relativedelta import relativedelta from openerp.addons.mozaik_base.base_tools import format_email, check_email from openerp.addons.mozaik_base.base_tools import get_age from openerp.addons.mozaik_person.res_partner import AVAILABLE_GENDERS from openerp.osv import orm, fields from openerp.tools import SUPERUSER_ID from openerp.tools import logging from openerp.tools.misc import DEFAULT_SERVER_DATE_FORMAT from openerp.exceptions import ValidationError from openerp import _, api, fields as new_fields _logger = logging.getLogger(__name__) MEMBERSHIP_AVAILABLE_STATES = [ ('draft', 'Draft'), ('confirm', 'Confirmed'), ('validate', 'Done'), ('cancel', 'Cancelled'), ] EMPTY_ADDRESS = '0#0#0#0#0#0#0#0' MEMBERSHIP_REQUEST_TYPE = [ ('m', 'Member'), ('s', 'Supporter'), ] MR_REQUIRED_AGE_KEY = 'mr_required_age'
def imp_import(self, cr, uid, ids, context={}): logger = logging.getLogger('imp') inst = self.browse(cr, uid, ids[0]) if inst.imp_file == 'remote': try: sock = urllib2.urlopen(inst.url) del sock except Exception : raise osv.except_osv('Warning', 'URl is incorrect or wrong type or format of the file !') #raise osv.except_osv(_('Warning'), _('URl is incorrect or wrong type or format of the file !')) else: if not inst.loc: raise osv.except_osv('Warning', 'Please, provide a file !') #raise osv.except_osv(_('Warning'), _('Please, provide a file !')) remote = inst.imp_file == 'remote' checkmd5 = False md = md5() if remote: try: sock = urllib2.urlopen(inst.url) if inst.imp_type == 'csv': source = csv.reader(sock, quotechar='"', delimiter=',') elif inst.imp_type == 'xls': source = open_workbook(file_contents=sock.read()) except Exception : logger.warning("Except my remote") raise osv.except_osv('Warning', 'Cant open Url !') #raise osv.except_osv(_('Warning'), _('Cant open Url !')) else: try: if inst.imp_type == 'csv': source = csv.reader(StringIO.StringIO(inst.loc.decode('base64')), quotechar='"', delimiter=',') elif inst.imp_type == 'xls': source = open_workbook(file_contents=StringIO.StringIO(inst.loc.decode('base64')).read()) except Exception : raise osv.except_osv('Warning', 'Corrupt Fie format or incorrect file format !') #raise osv.except_osv(_('Warning'), _('Corrupt Fie format or incorrect file format !')) if remote: sock = urllib2.urlopen(inst.url) md.update(sock.read()) _md5 = md.hexdigest() else: md.update(StringIO.StringIO(inst.loc.decode('base64')).read()) _md5 = md.hexdigest() if not inst.force_import: if inst.md5: if inst.md5 == _md5: raise osv.except_osv('Warning', 'Old version of the file !') #raise osv.except_osv(_('Warning'), _('Old version of the file !')) fields_str = inst.import_order_main if not fields_str: raise osv.except_osv('Warning', 'Please, set fields order first !') #raise osv.except_osv(_('Warning'), _('Please, set fields order first !')) fields_dict = eval(fields_str) #yustas fields=[] poss = fields_dict.values() #poss = fields_dict.keys() # poss.sort() # for key in poss: # fields.append(fields_dict[key]) fields = fields_dict.keys() sheet_line_poss = [inst.sheet_num-1, inst.line_start-1] arrgs = (cr, inst, source, fields, poss, sheet_line_poss, inst.model_name) # arrgs = (cr, inst, source, fields, poss, sheet_line_poss, self.model_name) if inst.imp_type == 'xls': msg = check_fields_types(*arrgs) if msg: context.update({'import_results':msg}) return { 'name':"Test Wizard", #'name':_("Test Wizard"), 'view_mode': 'form', 'view_type': 'form', 'res_model': 'imp.wizard.fields', 'target': 'new', 'nodestroy': True, 'type': 'ir.actions.act_window', 'context': "%s" % context } else: if inst.etalon_catalog: res = tmp_xls_import(*arrgs) # res = tmp_xls_import_etalon(*arrgs) else: res = tmp_xls_import(*arrgs) else: if inst.etalon_catalog: ## res = tmp_csv_import_etalon(*arrgs) pass else: ## res = tmp_csv_import(*arrgs) pass result, rows, warning_msg, dummy = res[0] report = res[1] if result < 0: print res #raise Exception('Import error: %s' % (warning_msg,)) #raise Exception(_('Import error: %s') % (warning_msg,)) else: if inst.description: old_reports_list = inst.description.split('\n*') if len(old_reports_list) > 50: old_reports_list.pop() report = report + '\n' + '\n*'.join(old_reports_list) else: report = report + '\n' + '\n*'.join(old_reports_list) return self.write(cr, uid, inst.id,{'md5':_md5, 'description':report, 'last_import':str(time.strftime('%d.%m.%y %H:%M:%S')) })
def tmp_xls_import(cr, inst, source, fields, poss, sheet_line_poss, model_nm): #def tmp_xls_import(cr, inst, source, fields, poss, sheet_line_poss): pool = pooler.get_pool(cr.dbname) logger = logging.getLogger('imp') product_pool = pool.get(model_nm) # product_pool = pool.get('product.product') uid = 1 datas = [] nullify_ids_list = [] supplier = inst.name exist_ids_list = product_pool.search(cr,uid,[('supplier_id','=', supplier.id)]) updated_ids_list = [] config_ids = pool.get('imp.config').search(cr,1,[]) config = pool.get('imp.config').browse(cr,1,config_ids)[0] fields_types = eval(config.import_fields_types) sheet = source.sheets()[sheet_line_poss[0]] fields += ['supplier_id.id'] ## fields += ['supplier.id'] #raise osv.except_osv(_('Warning'), str(fields)) # # category = inst.category_id # # if category: # # fields += ['categ_id.id'] # hardcode by Sasha # required fields for non-etalon catalog # yustas qty_pos = fields.index('quantity') price_pos = fields.index('price') defcode_pos = fields.index('default_code') name_pos = fields.index('name') # qty_pos = fields.index('imp_qty') # price_pos = fields.index('standard_price') # defcode_pos = fields.index('default_code') # name_pos = fields.index('name') new_prod_count = 0 exist_prod_count = 0 error_rows_list = [] for row in xrange(sheet_line_poss[1],sheet.nrows): vals = [] itr = 0 try: for col in poss: cell_value = sheet.cell(row,col).value cell_type = sheet.cell(row,col).ctype fld = fields[itr] if cell_type == 2: if fields_types[fld]=='char': if isinstance(cell_value,float): if modf(cell_value)[0]: cell_value = str(cell_value) else: cell_value = str(int(cell_value)) else: if fields_types[fld]=='float': try: if cell_value: cell_value = float(cell_value) else: cell_value = 0.0 except: if isinstance (cell_value,str) or isinstance (cell_value,unicode): cell_value = cell_value.replace(',','.') for sign in ['<','>']: if sign in cell_value: cell_value = cell_value.replace(sign, '') cell_value = float(cell_value) if fields_types[fld]=='int': if cell_value: cell_value = int(cell_value) else: cell_value = 0 vals.append(cell_value) itr += 1 except: #vals = [] error_rows_list.append(row+1) logger.error("Cannot import the line #%s", row+1) if any(vals): exist_id = product_pool.search(cr, uid, [('default_code','=',vals[defcode_pos]),('supplier_id','=',supplier.id)]) # exist_id = product_pool.search(cr, uid, [('name','=',vals[defcode_pos]),('supplier','=',supplier.id)]) # exist_id = product_pool.search(cr, uid, [('default_code','=',vals[defcode_pos]),('supplier','=',supplier.id)]) if not exist_id: vals += [supplier] ## if category: ## vals += [category] datas.append(vals) new_prod_count += 1 else: updated_ids_list.append(exist_id[0]) exist = product_pool.browse(cr, uid, exist_id[0]) product_rewrite = product_pool.write(cr, uid, exist.id, {'quantity':vals[qty_pos], #'imp_qty':vals[qty_pos], 'price':vals[price_pos] # 'standard_price':vals[price_pos] }) if product_rewrite: logger.warning("\nProduct with ID = %s is rewrote with values:\nQTY = %s\nPRICE = %s" \ % (exist_id,vals[qty_pos],vals[price_pos])) # if exist.price_ref: # part_inf_rewrite = pool.get('pricelist.partnerinfo').write(cr, uid, exist.price_ref.id, # {'sup_quantity':vals[qty_pos],'price':vals[price_pos]}) # #{'min_quantity':vals[qty_pos],'price':vals[price_pos]}) # #yustas # if part_inf_rewrite:logger.warning("\nPartber info with ID = %s is rewrote with values:\nQTY = %s\nPRICE = %s" \ # % (exist.price_ref,vals[qty_pos],vals[price_pos])) exist_prod_count += 1 print unicode(exist_ids_list) print unicode(updated_ids_list) nullify_ids_list = list(set(exist_ids_list) - set(updated_ids_list)) product_pool.write(cr, uid, nullify_ids_list, {'quantity': 0, }) report = u'* %s : %d %s:' % (time.strftime('%d.%m.%y %H:%M:%S'),new_prod_count+exist_prod_count,('records imported')) # report = u'* %s : %d %s:' % (time.strftime('%d.%m.%y %H:%M:%S'),new_prod_count+exist_prod_count,_('records imported')) report += u'\n\t- %d %s' % (new_prod_count,('records created')) #report += u'\n\t- %d %s' % (new_prod_count,_('records created')) report += u'\n\t- %d %s' % (exist_prod_count,('records updated')) #report += u'\n\t- %d %s' % (exist_prod_count,_('records updated')) report += u'\n\t- %d %s' % (len(nullify_ids_list),('records nullified')) if error_rows_list: report += u'\n\t- %s: %s' % (('could not import records on rows'),str(error_rows_list)[1:-1]) #report += u'\n\t- %s: %s' % (_('could not import records on rows'),str(error_rows_list)[1:-1]) if inst.etalon_catalog: fields += ['if_etalon'] for l in datas: l.append('True') return product_pool.import_data(cr, uid, fields, datas), report