rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
ref_acc_bank = data.res_id | view_id_cur = data.res_id | def execute(self, cr, uid, ids, context=None): obj_multi = self.browse(cr, uid, ids[0]) obj_acc = self.pool.get('account.account') obj_acc_tax = self.pool.get('account.tax') obj_journal = self.pool.get('account.journal') obj_sequence = self.pool.get('ir.sequence') obj_acc_template = self.pool.get('account.account.template') obj_fiscal_position_template = self.pool.get('account.fiscal.position.template') obj_fiscal_position = self.pool.get('account.fiscal.position') data_pool = self.pool.get('ir.model.data') |
tmp = self.pool.get('res.partner.bank').name_get(cr, uid, [line.acc_no.id])[0][1] | tmp = line.acc_name | def execute(self, cr, uid, ids, context=None): obj_multi = self.browse(cr, uid, ids[0]) obj_acc = self.pool.get('account.account') obj_acc_tax = self.pool.get('account.tax') obj_journal = self.pool.get('account.journal') obj_sequence = self.pool.get('ir.sequence') obj_acc_template = self.pool.get('account.account.template') obj_fiscal_position_template = self.pool.get('account.fiscal.position.template') obj_fiscal_position = self.pool.get('account.fiscal.position') data_pool = self.pool.get('ir.model.data') |
'name': line.acc_no.bank and line.acc_no.bank.name+' '+tmp or tmp, | 'name': tmp, | def execute(self, cr, uid, ids, context=None): obj_multi = self.browse(cr, uid, ids[0]) obj_acc = self.pool.get('account.account') obj_acc_tax = self.pool.get('account.tax') obj_journal = self.pool.get('account.journal') obj_sequence = self.pool.get('ir.sequence') obj_acc_template = self.pool.get('account.account.template') obj_fiscal_position_template = self.pool.get('account.fiscal.position.template') obj_fiscal_position = self.pool.get('account.fiscal.position') data_pool = self.pool.get('ir.model.data') |
st_line = account_bank_statement_line_obj.browse(cr, uid, st_line_id.id, context) | st_line = account_bank_statement_line_obj.browse(cr, uid, st_line_id, context) | def create_move_from_st_line(self, cr, uid, st_line_id, company_currency_id, st_line_number, context=None): res_currency_obj = self.pool.get('res.currency') res_users_obj = self.pool.get('res.users') account_move_obj = self.pool.get('account.move') account_move_line_obj = self.pool.get('account.move.line') account_analytic_line_obj = self.pool.get('account.analytic.line') account_bank_statement_line_obj = self.pool.get('account.bank.statement.line') st_line = account_bank_statement_line_obj.browse(cr, uid, st_line_id.id, context) st = st_line.statement_id |
res_id = email_tool.process_email(cr, uid, server.object_id.model, data[0][1], attach=server.attach, context=context) | res_id = email_tool.process_email(cr, uid, server.object_id.model, msg, attach=server.attach, context=context) | def fetch_mail(self, cr, uid, ids, context=None): if not context: context = {} email_tool = self.pool.get('email.server.tools') for server in self.browse(cr, uid, ids, context): logger.notifyChannel('imap', netsvc.LOG_INFO, 'fetchmail start checking for new emails on %s' % (server.name)) context.update({'server_id': server.id, 'server_type': server.type}) count = 0 try: if server.type == 'imap': imap_server = None if server.is_ssl: imap_server = IMAP4_SSL(server.server, int(server.port)) else: imap_server = IMAP4(server.server, int(server.port)) |
res.update({'subject': ustr(context.get('subject', case.name) or '')}) | res.update({'subject': tools.ustr(context.get('subject', case.name) or '')}) | def default_get(self, cr, uid, fields, context=None): """ This function gets default values """ if not context: context = {} |
'name': sale.name, | 'name': sale.client_order_ref or sale.name, | def create_invoices(self, cr, uid, ids, context={}): """ To create invoices. |
if mode in ('w', 'w+', 'r+'): self._need_index = True else: self._need_index = False | if mode.endswith('b'): mode = mode[:-1] self.mode = mode | def __init__(self, parent, path, mode): nodes.node_descriptor.__init__(self, parent) self.__file = open(path, mode) if mode in ('w', 'w+', 'r+'): self._need_index = True else: self._need_index = False for attr in ('closed', 'read', 'write', 'seek', 'tell'): setattr(self,attr, getattr(self.__file, attr)) |
if self._need_index: | if self.mode in ('w', 'w+', 'r+'): | def close(self): # TODO: locking in init, close() fname = self.__file.name self.__file.close() if self._need_index: par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() icont = '' mime = '' filename = par.path if isinstance(filename, (tuple, list)): filename = '/'.join(filename) try: mime, icont = cntIndex.doIndex(None, filename=filename, content_type=None, realfname=fname) except Exception: logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True) pass |
cr.execute('UPDATE ir_attachment SET index_content = %s, file_type = %s WHERE id = %s', (icont_u, mime, par.file_id)) par.content_length = filesize | fsize = os.stat(fname).st_size cr.execute("UPDATE ir_attachment " \ " SET index_content = %s, file_type = %s, " \ " file_size = %s " \ " WHERE id = %s", (icont_u, mime, fsize, par.file_id)) par.content_length = fsize | def close(self): # TODO: locking in init, close() fname = self.__file.name self.__file.close() if self._need_index: par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() icont = '' mime = '' filename = par.path if isinstance(filename, (tuple, list)): filename = '/'.join(filename) try: mime, icont = cntIndex.doIndex(None, filename=filename, content_type=None, realfname=fname) except Exception: logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True) pass |
logging.getLogger('document.storage').debug('Cannot save file indexed content:', exc_info=True) | logging.getLogger('document.storage').warning('Cannot save file indexed content:', exc_info=True) elif self.mode in ('a', 'a+' ): try: par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() fsize = os.stat(fname).st_size cr.execute("UPDATE ir_attachment SET file_size = %s " \ " WHERE id = %s", (fsize, par.file_id)) par.content_length = fsize par.content_type = mime cr.commit() cr.close() except Exception: logging.getLogger('document.storage').warning('Cannot save file appended content:', exc_info=True) | def close(self): # TODO: locking in init, close() fname = self.__file.name self.__file.close() if self._need_index: par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() icont = '' mime = '' filename = par.path if isinstance(filename, (tuple, list)): filename = '/'.join(filename) try: mime, icont = cntIndex.doIndex(None, filename=filename, content_type=None, realfname=fname) except Exception: logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True) pass |
if ira.file_size: self._doclog.warning( "ir.attachment raise IOError(errno.ENOENT, 'No file can be located') fpath = os.path.join(boo.path, ira.store_fname) | if mode in ('r','r+'): if ira.file_size: self._doclog.warning( "ir.attachment raise IOError(errno.ENOENT, 'No file can be located') else: store_fname = self.__get_random_fname(boo.path) cr.execute('UPDATE ir_attachment SET store_fname = %s WHERE id = %s', (store_fname, ira.id)) fpath = os.path.join(boo.path, store_fname) else: fpath = os.path.join(boo.path, ira.store_fname) | def get_file(self, cr, uid, id, file_node, mode, context=None): """ Return a file-like object for the contents of some node """ if context is None: context = {} boo = self.browse(cr, uid, id, context) if not boo.online: raise RuntimeError('media offline') ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context) if boo.type == 'filestore': if not ira.store_fname: # On a migrated db, some files may have the wrong storage type # try to fix their directory. if ira.file_size: self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) raise IOError(errno.ENOENT, 'No file can be located') fpath = os.path.join(boo.path, ira.store_fname) return nodefd_file(file_node, path=fpath, mode=mode) |
if not os.path.exists(fpath): | if (not os.path.exists(fpath)) and mode in ('r','r+'): | def get_file(self, cr, uid, id, file_node, mode, context=None): """ Return a file-like object for the contents of some node """ if context is None: context = {} boo = self.browse(cr, uid, id, context) if not boo.online: raise RuntimeError('media offline') ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context) if boo.type == 'filestore': if not ira.store_fname: # On a migrated db, some files may have the wrong storage type # try to fix their directory. if ira.file_size: self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) raise IOError(errno.ENOENT, 'No file can be located') fpath = os.path.join(boo.path, ira.store_fname) return nodefd_file(file_node, path=fpath, mode=mode) |
flag = None if os.path.isdir(path): for dirs in os.listdir(path): if os.path.isdir(os.path.join(path, dirs)) and len(os.listdir(os.path.join(path, dirs))) < 4000: flag = dirs break flag = flag or create_directory(path) filename = random_name() fname = os.path.join(path, flag, filename) | store_fname = self.__get_random_fname(path) fname = os.path.join(path, store_fname) | def set_data(self, cr, uid, id, file_node, data, context=None, fil_obj=None): """ store the data. This function MUST be used from an ir.attachment. It wouldn't make sense to store things persistently for other types (dynamic). """ if not context: context = {} boo = self.browse(cr, uid, id, context) if fil_obj: ira = fil_obj else: ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context) |
store_fname = os.path.join(flag, filename) | def set_data(self, cr, uid, id, file_node, data, context=None, fil_obj=None): """ store the data. This function MUST be used from an ir.attachment. It wouldn't make sense to store things persistently for other types (dynamic). """ if not context: context = {} boo = self.browse(cr, uid, id, context) if fil_obj: ira = fil_obj else: ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context) |
|
for id in ids: res[id] = {}.fromkeys(name, 0) | def _user_left_days(self, cr, uid, ids, name, args, context={}): return_false = False employee_id = False res = {} for id in ids: res[id] = {}.fromkeys(name, 0) if context and context.has_key('employee_id'): if not context['employee_id']: return_false = True employee_id = context['employee_id'] else: employee_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id','=',uid)]) if employee_ids: employee_id = employee_ids[0] else: return_false = True if employee_id: res = self.get_days(cr, uid, ids, employee_id, return_false, context=context) return res |
|
on (inv_line.product_id = pt.id) | on (inv_line.product_id = pp.id) | def init(self, cr): drop_view_if_exists(cr, 'report_intrastat') cr.execute(""" create or replace view report_intrastat as ( select to_char(inv.create_date, 'YYYY') as name, to_char(inv.create_date, 'MM') as month, min(inv_line.id) as id, intrastat.id as intrastat_id, upper(inv_country.code) as code, sum(case when inv_line.price_unit is not null then inv_line.price_unit * inv_line.quantity else 0 end) as value, sum( case when uom.category_id != puom.category_id then (pt.weight_net * inv_line.quantity) else (pt.weight_net * inv_line.quantity * uom.factor) end ) as weight, sum( case when uom.category_id != puom.category_id then inv_line.quantity else (inv_line.quantity * uom.factor) end ) as supply_units, |
'state_id':int(payload.country_id), | 'state_id':int(payload.state_id), | def execute(self, cr, uid, ids, context=None): assert len(ids) == 1, "We should only get one object from the form" payload = self.browse(cr, uid, ids[0], context=context) if not getattr(payload, 'company_id', None): raise ValueError('Case where no default main company is setup ' 'not handled yet') |
if move.move_dest_id.picking_id: | if context.get('call_unlink',False) and move.move_dest_id.picking_id: | def action_cancel(self, cr, uid, ids, context={}): if not len(ids): return True pickings = {} for move in self.browse(cr, uid, ids): if move.state in ('confirmed', 'waiting', 'assigned', 'draft'): if move.picking_id: pickings[move.picking_id.id] = True if move.move_dest_id and move.move_dest_id.state == 'waiting': self.write(cr, uid, [move.move_dest_id.id], {'state': 'assigned'}) if move.move_dest_id.picking_id: wf_service = netsvc.LocalService("workflow") wf_service.trg_write(uid, 'stock.picking', move.move_dest_id.picking_id.id, cr) self.write(cr, uid, ids, {'state': 'cancel', 'move_dest_id': False}) |
for pick in self.pool.get('stock.picking').browse(cr, uid, pickings.keys()): if all(move.state == 'cancel' for move in pick.move_lines): self.pool.get('stock.picking').write(cr, uid, [pick.id], {'state': 'cancel'}) | if not context.get('call_unlink',False): for pick in self.pool.get('stock.picking').browse(cr, uid, pickings.keys()): if all(move.state == 'cancel' for move in pick.move_lines): self.pool.get('stock.picking').write(cr, uid, [pick.id], {'state': 'cancel'}) | def action_cancel(self, cr, uid, ids, context={}): if not len(ids): return True pickings = {} for move in self.browse(cr, uid, ids): if move.state in ('confirmed', 'waiting', 'assigned', 'draft'): if move.picking_id: pickings[move.picking_id.id] = True if move.move_dest_id and move.move_dest_id.state == 'waiting': self.write(cr, uid, [move.move_dest_id.id], {'state': 'assigned'}) if move.move_dest_id.picking_id: wf_service = netsvc.LocalService("workflow") wf_service.trg_write(uid, 'stock.picking', move.move_dest_id.picking_id.id, cr) self.write(cr, uid, ids, {'state': 'cancel', 'move_dest_id': False}) |
'create_uid': fields.many2one('res.users', 'Author', select=True), 'create_date': fields.datetime("Created on", select=True), 'write_date': fields.datetime("Modification Date", select=True), | 'create_uid': fields.many2one('res.users', 'Author', select=True, readonly=True), 'create_date': fields.datetime("Created on", select=True, readonly=True), 'write_date': fields.datetime("Modification Date", select=True, readonly=True), | def open_wiki_page(self, cr, uid, ids, context): |
if 'create_uid' in vals: del vals['create_uid'] | def create(self, cr, uid, vals, context=None): |
|
body=None | body=self._get_body() if self._l_isLocked(uri): return self.send_body(None, '423', 'Locked', 'Locked') ct=None if self.headers.has_key("Content-Type"): ct=self.headers['Content-Type'] try: location = dc.put(uri,body,ct) except DAV_Error, (ec,dd): return self.send_status(ec) headers = {} if location: headers['Location'] = location try: etag = dc.get_prop(location or uri, "DAV:", "getetag") headers['ETag'] = etag except: pass self.send_body(None, '201', 'Created', '', headers=headers) def _get_body(self): body = None | def do_PUT(self): dc=self.IFACE_CLASS uri=urlparse.urljoin(self.get_baseuri(dc), self.path) uri=urllib.unquote(uri) # Handle If-Match if self.headers.has_key('If-Match'): test = False etag = None for match in self.headers['If-Match'].split(','): if match == '*': if dc.exists(uri): test = True break else: if dc.match_prop(uri, match, "DAV:", "getetag"): test = True break if not test: self.send_status(412) return |
if self._l_isLocked(uri): return self.send_body(None, '423', 'Locked', 'Locked') ct=None if self.headers.has_key("Content-Type"): ct=self.headers['Content-Type'] try: location = dc.put(uri,body,ct) except DAV_Error, (ec,dd): return self.send_status(ec) headers = {} if location: headers['Location'] = location try: etag = dc.get_prop(location or uri, "DAV:", "getetag") headers['ETag'] = etag except: pass self.send_body(None, '201', 'Created', '', headers=headers) | return body | def do_PUT(self): dc=self.IFACE_CLASS uri=urlparse.urljoin(self.get_baseuri(dc), self.path) uri=urllib.unquote(uri) # Handle If-Match if self.headers.has_key('If-Match'): test = False etag = None for match in self.headers['If-Match'].split(','): if match == '*': if dc.exists(uri): test = True break else: if dc.match_prop(uri, match, "DAV:", "getetag"): test = True break if not test: self.send_status(412) return |
if not res: | if context.get('bank_statement', False) and not res: | def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False): res = super(account_coda, self).search(cr, user, args=args, offset=offset, limit=limit, order=order, context=context, count=count) if not res: raise osv.except_osv('Error', _('Coda file not found for bank statement !!')) return res |
childs = self.search(cr, uid, [('parent_id', '=', parent_id), ('active', 'in', [True, False])]) | childs = self.search(cr, uid, [('parent_id', '=', parent_id)]) | def on_change_parent(self, cr, uid, id, parent_id): if not parent_id: return {} parent = self.read(cr, uid, [parent_id], ['partner_id','code'])[0] childs = self.search(cr, uid, [('parent_id', '=', parent_id), ('active', 'in', [True, False])]) numchild = len(childs) if parent['partner_id']: partner = parent['partner_id'][0] else: partner = False res = {'value' : {'code' : '%s - %03d' % (parent['code'] or '', numchild + 1),}} if partner: res['value']['partner_id'] = partner return res |
if picking.sale_id and picking.sale_id.note: if picking.note: return picking.note + '\n' + picking.sale_id.note else: return picking.sale_id.note | if picking.note or (picking.sale_id and picking.sale_id.note): return picking.note or picking.sale_id.note | def _get_comment_invoice(self, cursor, user, picking): if picking.sale_id and picking.sale_id.note: if picking.note: return picking.note + '\n' + picking.sale_id.note else: return picking.sale_id.note return super(stock_picking, self)._get_comment_invoice(cursor, user, picking) |
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): res = super(account_bs_report, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=False) doc = etree.XML(res['arch']) nodes = doc.xpath("//field[@name='journal_ids']") for node in nodes: node.set('readonly', '1') node.set('required', '0') res['arch'] = etree.tostring(doc) return res | def onchange_chart_id(self, cr, uid, chart_id, context=None): if not chart_id: return False account = self.pool.get('account.account').browse(cr, uid, chart_id , context=context) if not account.company_id.property_reserve_and_surplus_account: return False return { 'value': {'reserve_account_id': account.company_id.property_reserve_and_surplus_account.id}} | def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): res = super(account_bs_report, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=False) doc = etree.XML(res['arch']) nodes = doc.xpath("//field[@name='journal_ids']") for node in nodes: node.set('readonly', '1') node.set('required', '0') res['arch'] = etree.tostring(doc) return res |
account = self.pool.get('account.account').browse(cr, uid, data['form']['chart_account_id'], context=context) if not account.company_id.property_reserve_and_surplus_account: raise osv.except_osv(_('Warning'),_('Please define the Reserve and Profit/Loss account for current user company !')) data['form']['reserve_account_id'] = account.company_id.property_reserve_and_surplus_account.id data['form'].update(self.read(cr, uid, ids, ['display_type'])[0]) | def _print_report(self, cr, uid, ids, data, context=None): if context is None: context = {} data = self.pre_print_report(cr, uid, ids, data, context=context) account = self.pool.get('account.account').browse(cr, uid, data['form']['chart_account_id'], context=context) if not account.company_id.property_reserve_and_surplus_account: raise osv.except_osv(_('Warning'),_('Please define the Reserve and Profit/Loss account for current user company !')) data['form']['reserve_account_id'] = account.company_id.property_reserve_and_surplus_account.id data['form'].update(self.read(cr, uid, ids, ['display_type'])[0]) if data['form']['display_type']: return { 'type': 'ir.actions.report.xml', 'report_name': 'account.balancesheet.horizontal', 'datas': data, } else: return { 'type': 'ir.actions.report.xml', 'report_name': 'account.balancesheet', 'datas': data, } |
|
def _ellipsis(self, orig_str, maxlen=100, ellipsis='...'): maxlen = maxlen - len(ellipsis) if maxlen <= 0: maxlen = 1 new_str = orig_str[:maxlen] return new_str | def _ellipsis(self, char, size=100, truncation_str='...'): if len(char) <= size: return char return char[:size-len(truncation_str)] + truncation_str | def _ellipsis(self, orig_str, maxlen=100, ellipsis='...'): maxlen = maxlen - len(ellipsis) if maxlen <= 0: maxlen = 1 new_str = orig_str[:maxlen] return new_str |
return self._ellipsis(name, maxlen, ' ...') | return self._ellipsis(name, maxlen) | def _strip_name(self, name, maxlen=50): return self._ellipsis(name, maxlen, ' ...') |
return {} | return res res = {'account_id':False} | def onchange_payment(self, cr, uid, ids, pay_now, journal_id, partner_id, ttype='sale'): if not partner_id: return {} partner_pool = self.pool.get('res.partner') res = {'account_id':False} if pay_now == 'pay_later': partner = partner_pool.browse(cr, uid, partner_id) if ttype == 'sale': res.update({ 'account_id':partner.property_account_receivable.id, }) elif ttype == 'purchase': res.update({ 'account_id':partner.property_account_payable.id, }) return { 'value':res } |
res = {'account_id':False} | journal_pool = self.pool.get('account.journal') | def onchange_payment(self, cr, uid, ids, pay_now, journal_id, partner_id, ttype='sale'): if not partner_id: return {} partner_pool = self.pool.get('res.partner') res = {'account_id':False} if pay_now == 'pay_later': partner = partner_pool.browse(cr, uid, partner_id) if ttype == 'sale': res.update({ 'account_id':partner.property_account_receivable.id, }) elif ttype == 'purchase': res.update({ 'account_id':partner.property_account_payable.id, }) return { 'value':res } |
if ttype == 'sale': res.update({ 'account_id':partner.property_account_receivable.id, }) elif ttype == 'purchase': res.update({ 'account_id':partner.property_account_payable.id, }) return { 'value':res } | journal = journal_pool.browse(cr, uid, journal_id) if journal.type in ('sale','sale_refund'): account_id = partner.property_account_receivable.id elif journal.type in ('purchase', 'purchase_refund','expense'): account_id = partner.property_account_payable.id else: account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id res['account_id'] = account_id return {'value':res} | def onchange_payment(self, cr, uid, ids, pay_now, journal_id, partner_id, ttype='sale'): if not partner_id: return {} partner_pool = self.pool.get('res.partner') res = {'account_id':False} if pay_now == 'pay_later': partner = partner_pool.browse(cr, uid, partner_id) if ttype == 'sale': res.update({ 'account_id':partner.property_account_receivable.id, }) elif ttype == 'purchase': res.update({ 'account_id':partner.property_account_payable.id, }) return { 'value':res } |
'no_of_employee': fields.integer('No of Employees', help='Number of employee there are already in the department', readonly=True), 'no_of_recruitment': fields.integer('No of Recruitment'), | 'no_of_employee': fields.integer('No of Employees', help='Number of employee there are already in the department'), 'no_of_recruitment': fields.integer('No of Recruitment', readonly=True), | def _check_recursion(self, cr, uid, ids, context=None): level = 100 while len(ids): cr.execute('select distinct parent_id from hr_employee_category where id IN %s', (tuple(ids), )) ids = filter(None, map(lambda x:x[0], cr.fetchall())) if not level: return False level -= 1 return True |
def on_change_expected_employee(self, cr, uid, ids, expected_employee, context=None): | def on_change_expected_employee(self, cr, uid, ids, expected_employee, no_of_employee, context=None): | def on_change_expected_employee(self, cr, uid, ids, expected_employee, context=None): if context is None: context = {} result={} if expected_employee: xx = self.browse(cr, uid, ids, context)[0] result['no_of_recruitment'] = expected_employee - xx['no_of_employee'] return {'value': result} |
xx = self.browse(cr, uid, ids, context)[0] result['no_of_recruitment'] = expected_employee - xx['no_of_employee'] | result['no_of_recruitment'] = expected_employee - no_of_employee | def on_change_expected_employee(self, cr, uid, ids, expected_employee, context=None): if context is None: context = {} result={} if expected_employee: xx = self.browse(cr, uid, ids, context)[0] result['no_of_recruitment'] = expected_employee - xx['no_of_employee'] return {'value': result} |
ids_dept = obj_dept.search(cr, uid, [('member_ids', 'in', [user_id])], context=context) | emp_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', user_id)]) cr.execute('SELECT emp.department_id FROM hr_employee AS emp JOIN resource_resource AS res ON res.id = emp.resource_id \ WHERE res.user_id = %s AND emp.department_id IS NOT NULL', (user_id,)) ids_dept = [x[0] for x in cr.fetchall()] | def _parent_compute(self, cr, uid, ids, name, args, context=None): if context is None: context = {} result = {} obj_dept = self.pool.get('hr.department') for user_id in ids: ids_dept = obj_dept.search(cr, uid, [('member_ids', 'in', [user_id])], context=context) parent_ids = [] if ids_dept: data_dept = obj_dept.read(cr, uid, ids_dept, ['manager_id'], context=context) parent_ids = map(lambda x: x['manager_id'][0], data_dept) result[user_id] = parent_ids return result |
for manager_id in ids: | for user_id in ids: | def _child_compute(self, cr, uid, ids, name, args, context=None): if context is None: context = {} obj_dept = self.pool.get('hr.department') obj_user = self.pool.get('res.users') result = {} for manager_id in ids: child_ids = [] mgnt_dept_ids = obj_dept.search(cr, uid, [('manager_id', '=', manager_id)], context=context) ids_dept = obj_dept.search(cr, uid, [('id', 'child_of', mgnt_dept_ids)], context=context) if ids_dept: data_dept = obj_dept.read(cr, uid, ids_dept, ['member_ids'], context=context) childs = map(lambda x: x['member_ids'], data_dept) childs = tools.flatten(childs) childs = obj_user.search(cr, uid, [('id', 'in', childs),('active', '=', True)], context=context) if manager_id in childs: childs.remove(manager_id) child_ids.extend(tools.flatten(childs)) set = {} map(set.__setitem__, child_ids, []) child_ids = set.keys() else: child_ids = [] result[manager_id] = child_ids return result |
mgnt_dept_ids = obj_dept.search(cr, uid, [('manager_id', '=', manager_id)], context=context) | cr.execute('SELECT dept.id FROM hr_department AS dept \ LEFT JOIN hr_employee AS emp ON dept.manager_id = emp.id \ WHERE emp.id IN \ (SELECT emp.id FROM hr_employee \ JOIN resource_resource r ON r.id = emp.resource_id WHERE r.user_id=' + str(user_id) + ') ') mgnt_dept_ids = [x[0] for x in cr.fetchall()] | def _child_compute(self, cr, uid, ids, name, args, context=None): if context is None: context = {} obj_dept = self.pool.get('hr.department') obj_user = self.pool.get('res.users') result = {} for manager_id in ids: child_ids = [] mgnt_dept_ids = obj_dept.search(cr, uid, [('manager_id', '=', manager_id)], context=context) ids_dept = obj_dept.search(cr, uid, [('id', 'child_of', mgnt_dept_ids)], context=context) if ids_dept: data_dept = obj_dept.read(cr, uid, ids_dept, ['member_ids'], context=context) childs = map(lambda x: x['member_ids'], data_dept) childs = tools.flatten(childs) childs = obj_user.search(cr, uid, [('id', 'in', childs),('active', '=', True)], context=context) if manager_id in childs: childs.remove(manager_id) child_ids.extend(tools.flatten(childs)) set = {} map(set.__setitem__, child_ids, []) child_ids = set.keys() else: child_ids = [] result[manager_id] = child_ids return result |
if manager_id in childs: childs.remove(manager_id) | if user_id in childs: childs.remove(user_id) | def _child_compute(self, cr, uid, ids, name, args, context=None): if context is None: context = {} obj_dept = self.pool.get('hr.department') obj_user = self.pool.get('res.users') result = {} for manager_id in ids: child_ids = [] mgnt_dept_ids = obj_dept.search(cr, uid, [('manager_id', '=', manager_id)], context=context) ids_dept = obj_dept.search(cr, uid, [('id', 'child_of', mgnt_dept_ids)], context=context) if ids_dept: data_dept = obj_dept.read(cr, uid, ids_dept, ['member_ids'], context=context) childs = map(lambda x: x['member_ids'], data_dept) childs = tools.flatten(childs) childs = obj_user.search(cr, uid, [('id', 'in', childs),('active', '=', True)], context=context) if manager_id in childs: childs.remove(manager_id) child_ids.extend(tools.flatten(childs)) set = {} map(set.__setitem__, child_ids, []) child_ids = set.keys() else: child_ids = [] result[manager_id] = child_ids return result |
child_ids = set.keys() else: child_ids = [] result[manager_id] = child_ids | child_ids = set.keys() result[user_id] = child_ids | def _child_compute(self, cr, uid, ids, name, args, context=None): if context is None: context = {} obj_dept = self.pool.get('hr.department') obj_user = self.pool.get('res.users') result = {} for manager_id in ids: child_ids = [] mgnt_dept_ids = obj_dept.search(cr, uid, [('manager_id', '=', manager_id)], context=context) ids_dept = obj_dept.search(cr, uid, [('id', 'child_of', mgnt_dept_ids)], context=context) if ids_dept: data_dept = obj_dept.read(cr, uid, ids_dept, ['member_ids'], context=context) childs = map(lambda x: x['member_ids'], data_dept) childs = tools.flatten(childs) childs = obj_user.search(cr, uid, [('id', 'in', childs),('active', '=', True)], context=context) if manager_id in childs: childs.remove(manager_id) child_ids.extend(tools.flatten(childs)) set = {} map(set.__setitem__, child_ids, []) child_ids = set.keys() else: child_ids = [] result[manager_id] = child_ids return result |
fnode.set_data(cr, data, fil) | if data is not None: fnode.set_data(cr, data, fil) | def create_child(self, cr, path, data): """ API function to create a child file object and node Return the node_* created """ dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) fil_obj=dirobj.pool.get('ir.attachment') val = { 'name': path, 'datas_fname': path, 'parent_id': self.dir_id, # Datas are not set here } |
'vtimezone': fields.related('user_id', 'context_tz', type='char', size=24, \ string='Timezone', store=True), | 'vtimezone': fields.selection(_tz_get, size=64, string='Timezone'), | def _get_rulestring(self, cr, uid, ids, name, arg, context=None): """ Gets Recurrence rule string according to value type RECUR of iCalendar from the values given. @param self: The object pointer @param cr: the current row, from the database cursor, @param id: List of calendar event's ids. @param context: A standard dictionary for contextual values @return: dictionary of rrule value. """ result = {} for datas in self.read(cr, uid, ids, context=context): event = datas['id'] if datas.get('rrule_type'): if datas.get('rrule_type') == 'none': result[event] = False cr.execute("UPDATE %s set exrule=Null where id=%s" % (self._table, event)) elif datas.get('rrule_type') == 'custom': if datas.get('interval', 0) < 0: raise osv.except_osv('Warning!', 'Interval can not be Negative') if datas.get('count', 0) < 0: raise osv.except_osv('Warning!', 'Count can not be Negative') rrule_custom = self.compute_rule_string(cr, uid, datas, \ context=context) result[event] = rrule_custom else: result[event] = self.compute_rule_string(cr, uid, {'freq': datas.get('rrule_type').upper(), 'interval': 1}, context=context) |
value = {} | def action_apply(self, cr, uid, ids, context=None): """ This converts lead to opportunity and opens Opportunity view @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Lead to Opportunity IDs @param context: A standard dictionary for contextual values |
|
if record_id: lead_obj = self.pool.get('crm.lead') opp_obj = self. pool.get('crm.opportunity') data_obj = self.pool.get('ir.model.data') history_obj = self.pool.get('crm.case.history') model_obj = self.pool.get('ir.model') result = data_obj._get_id(cr, uid, 'crm', 'view_crm_case_opportunities_filter') res = data_obj.read(cr, uid, result, ['res_id']) id2 = data_obj._get_id(cr, uid, 'crm', 'crm_case_form_view_oppor') id3 = data_obj._get_id(cr, uid, 'crm', 'crm_case_tree_view_oppor') if id2: id2 = data_obj.browse(cr, uid, id2, context=context).res_id if id3: id3 = data_obj.browse(cr, uid, id3, context=context).res_id lead = lead_obj.browse(cr, uid, record_id, context=context) model_ids = model_obj.search(cr, uid, [('model', '=', 'crm.opportunity')]) for this in self.browse(cr, uid, ids, context=context): new_opportunity_id = opp_obj.create(cr, uid, { 'name': this.name, 'referred': this.referred, 'planned_revenue': this.planned_revenue, 'probability': this.probability, 'partner_id': lead.partner_id and lead.partner_id.id or False , 'section_id': lead.section_id and lead.section_id.id or False, 'description': lead.description or False, 'date_deadline': lead.date_deadline or False, 'partner_address_id': lead.partner_address_id and \ lead.partner_address_id.id or False , 'priority': lead.priority, 'phone': lead.phone, 'email_from': lead.email_from }) new_opportunity = opp_obj.browse(cr, uid, new_opportunity_id) vals = { 'partner_id': this.partner_id and this.partner_id.id or False, } if not lead.opportunity_id: vals.update({'opportunity_id' : new_opportunity.id}) lead_obj.write(cr, uid, [lead.id], vals) lead_obj.case_close(cr, uid, [lead.id]) for his_id in lead.history_line: history_ids = history_obj.copy(cr, uid, his_id.id, \ {'model_id': model_ids[0], \ 'res_id': new_opportunity_id}) opp_obj.case_open(cr, uid, [new_opportunity_id]) value = { 'name': _('Opportunity'), 'view_type': 'form', 'view_mode': 'form,tree', 'res_model': 'crm.opportunity', 'res_id': int(new_opportunity_id), 'view_id': False, 'views': [(id2, 'form'), (id3, 'tree'), (False, 'calendar'), (False, 'graph')], 'type': 'ir.actions.act_window', 'search_view_id': res['res_id'] } | if not record_id: return {} lead_obj = self.pool.get('crm.lead') opp_obj = self. pool.get('crm.opportunity') data_obj = self.pool.get('ir.model.data') history_obj = self.pool.get('crm.case.history') model_obj = self.pool.get('ir.model') result = data_obj._get_id(cr, uid, 'crm', 'view_crm_case_opportunities_filter') res = data_obj.read(cr, uid, result, ['res_id']) id2 = data_obj._get_id(cr, uid, 'crm', 'crm_case_form_view_oppor') id3 = data_obj._get_id(cr, uid, 'crm', 'crm_case_tree_view_oppor') if id2: id2 = data_obj.browse(cr, uid, id2, context=context).res_id if id3: id3 = data_obj.browse(cr, uid, id3, context=context).res_id lead = lead_obj.browse(cr, uid, record_id, context=context) model_ids = model_obj.search(cr, uid, [('model', '=', 'crm.opportunity')]) for this in self.browse(cr, uid, ids, context=context): new_opportunity_id = opp_obj.create(cr, uid, { 'name': this.name, 'referred': lead.referred, 'planned_revenue': this.planned_revenue, 'probability': this.probability, 'partner_id': lead.partner_id and lead.partner_id.id or False, 'section_id': lead.section_id and lead.section_id.id or False, 'description': lead.description or False, 'date_deadline': lead.date_deadline or False, 'partner_address_id': lead.partner_address_id and lead.partner_address_id.id or False , 'priority': lead.priority, 'phone': lead.phone, 'email_from': lead.email_from }) new_opportunity = opp_obj.browse(cr, uid, new_opportunity_id) vals = { 'partner_id': this.partner_id and this.partner_id.id or False, } if not lead.opportunity_id: vals.update({'opportunity_id' : new_opportunity.id}) model_opportunity_id = self.pool.get('ir.model').search(cr, uid, [('model', '=', 'crm.opportunity')], context=context)[0] for model in ('crm.case.log', 'crm.case.history'): log_proxy = self.pool.get(model) log_ids = log_proxy.search(cr, uid, [('model_id.model', '=', 'crm.lead'),('res_id', '=', lead.id)], context=context) for log_id in log_ids: log_proxy.copy(cr, uid, log_id, {'model_id':model_opportunity_id}, context=context) lead_obj.write(cr, uid, [lead.id], vals) lead_obj.case_close(cr, uid, [lead.id]) for his_id in lead.history_line: history_ids = history_obj.copy(cr, uid, his_id.id, \ {'model_id': model_ids[0], \ 'res_id': new_opportunity_id}) value = { 'name': _('Opportunity'), 'view_type': 'form', 'view_mode': 'form,tree', 'res_model': 'crm.opportunity', 'res_id': int(new_opportunity_id), 'view_id': False, 'views': [(id2, 'form'), (id3, 'tree'), (False, 'calendar'), (False, 'graph')], 'type': 'ir.actions.act_window', 'search_view_id': res['res_id'] } | def action_apply(self, cr, uid, ids, context=None): """ This converts lead to opportunity and opens Opportunity view @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Lead to Opportunity IDs @param context: A standard dictionary for contextual values |
ids = [] cr.execute('select move_id,sum(debit) from account_move_line group by move_id') result = dict(cr.fetchall()) for item in args: if item[1] == '>=': res = [('id', 'in', [k for k,v in result.iteritems() if v >= item[2]])] | ids = set() for cond in args: amount = cond[2] if isinstance(cond[2],(list,tuple)): if cond[1] in ['in','not in']: amount = tuple(cond[2]) else: continue | def _search_amount(self, cr, uid, obj, name, args, context): ids = [] cr.execute('select move_id,sum(debit) from account_move_line group by move_id') result = dict(cr.fetchall()) |
res = [('id', 'in', [k for k,v in result.iteritems() if v <= item[2]])] ids += res if not ids: return [('id', '>', '0')] return ids | if cond[1] in ['=like', 'like', 'not like', 'ilike', 'not ilike', 'in', 'not in', 'child_of']: continue cr.execute("select move_id from account_move_line group by move_id having sum(debit) %s %%s" % (cond[1]) ,(amount,)) res_ids = set(id[0] for id in cr.fetchall()) ids = ids and (ids & res_ids) or res_ids if ids: return [('id','in',tuple(ids))] else: return [('id', '=', '0')] | def _search_amount(self, cr, uid, obj, name, args, context): ids = [] cr.execute('select move_id,sum(debit) from account_move_line group by move_id') result = dict(cr.fetchall()) |
amount_unit=move.product_id.price_get(cr, uid, ids, pricetype.field, context) | amount_unit=move.product_id.price_get(pricetype.field, context)[move.product_id.id] | def action_done(self, cr, uid, ids, context=None): track_flag = False for move in self.browse(cr, uid, ids): if move.move_dest_id.id and (move.state != 'done'): cr.execute('insert into stock_move_history_ids (parent_id,child_id) values (%s,%s)', (move.id, move.move_dest_id.id)) if move.move_dest_id.state in ('waiting', 'confirmed'): self.write(cr, uid, [move.move_dest_id.id], {'state': 'assigned'}) if move.move_dest_id.picking_id: wf_service = netsvc.LocalService("workflow") wf_service.trg_write(uid, 'stock.picking', move.move_dest_id.picking_id.id, cr) else: pass # self.action_done(cr, uid, [move.move_dest_id.id]) if move.move_dest_id.auto_validate: self.action_done(cr, uid, [move.move_dest_id.id], context=context) |
'get_end_date':self._get_end_date, 'get_currency ':self.get_currency | 'get_end_date':self._get_end_date, 'get_currency ':self._get_currency | def __init__(self, cr, uid, name, context=None): if context is None: context = {} super(journal_print, self).__init__(cr, uid, name, context=context) self.period_ids = [] self.journal_ids = [] self.localcontext.update( { 'time': time, 'lines': self.lines, 'periods': self.periods, 'sum_debit_period': self._sum_debit_period, 'sum_credit_period': self._sum_credit_period, 'sum_debit': self._sum_debit, 'sum_credit': self._sum_credit, 'get_fiscalyear': self._get_fiscalyear, 'get_account': self._get_account, 'get_start_period': self.get_start_period, 'get_end_period': self.get_end_period, 'get_sortby': self._get_sortby, 'sum_currency_amount_account': self._sum_currency_amount_account, 'get_filter': self._get_filter, 'get_journal': self._get_journal, 'get_start_date':self._get_start_date, 'get_end_date':self._get_end_date, 'get_currency ':self.get_currency }) |
self.list[obj.code]=str(obj.amount) | self.list[str(obj.code)]=str(obj.amount) | def find_child(obj): self.list[obj.code]=str(obj.amount) if obj.child_ids: for child in obj.child_ids: find_child(child) return True |
def _balance_search(self, cursor, user, obj, name, args): | def _balance_search(self, cursor, user, obj, name, args, domain=None, context=None): if context is None: context = {} | def _balance_search(self, cursor, user, obj, name, args): if not len(args): return [] where = ' and '.join(map(lambda x: '(abs(sum(debit-credit))'+x[1]+str(x[2])+')',args)) cursor.execute('select id, sum(debit-credit) from account_move_line \ group by id,debit,credit having '+where) res = cursor.fetchall() if not len(res): return [('id', '=', '0')] return [('id', 'in', [x[0] for x in res])] |
if form['journal_ids'][0][2]: journal = " in (" + ','.join(map(lambda x: str(x), form['journal_ids'][0][2])) + ")" | if form['journal_ids']: journal = " in (" + ','.join(map(lambda x: str(x), form['journal_ids'])) + ")" | def _ref_lines(self,form): result = [] res = {} acc_id = [] final = [] acc_pool = self.pool.get('account.analytic.account') line_pool = self.pool.get('account.analytic.line') |
if form['journal_ids'][0][2]: journal=" in (" + ','.join(map(lambda x: str(x), form['journal_ids'][0][2])) + ")" | if form['journal_ids']: journal=" in (" + ','.join(map(lambda x: str(x), form['journal_ids'])) + ")" | def _lines(self,form,ids={}): if not ids: ids = self.ids |
elif pick.state in ['confirmed','assigned']: | elif pick.state in ['confirmed','assigned', 'draft']: | def unlink(self, cr, uid, ids, context=None): for pick in self.browse(cr, uid, ids, context=context): if pick.state in ['done','cancel']: raise osv.except_osv(_('Error'), _('You cannot remove the picking which is in %s state !')%(pick.state,)) elif pick.state in ['confirmed','assigned']: ids2 = [move.id for move in pick.move_lines] context.update({'call_unlink':True}) self.pool.get('stock.move').action_cancel(cr, uid, ids2, context) else: continue return super(stock_picking, self).unlink(cr, uid, ids, context=context) |
context.update({'call_unlink':True}) self.pool.get('stock.move').action_cancel(cr, uid, ids2, context) else: continue | ctx = context.copy() ctx.update({'call_unlink':True}) if pick.state != 'draft': move_obj.action_cancel(cr, uid, ids2, ctx) move_obj.unlink(cr, uid, ids2, ctx) | def unlink(self, cr, uid, ids, context=None): for pick in self.browse(cr, uid, ids, context=context): if pick.state in ['done','cancel']: raise osv.except_osv(_('Error'), _('You cannot remove the picking which is in %s state !')%(pick.state,)) elif pick.state in ['confirmed','assigned']: ids2 = [move.id for move in pick.move_lines] context.update({'call_unlink':True}) self.pool.get('stock.move').action_cancel(cr, uid, ids2, context) else: continue return super(stock_picking, self).unlink(cr, uid, ids, context=context) |
data_of_file = '<?xml version="1.0"?>\n<VATSENDING xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="MultiDeclarationTVA-NoSignature-14.xml">' | data_of_file = '<?xml version="1.0"?>\n<VATSENDING xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="MultiDeclarationTVA-NoSignature-16.xml">' | def create_xml(self, cr, uid, ids, context=None): obj_fyear = self.pool.get('account.fiscalyear') obj_tax_code = self.pool.get('account.tax.code') obj_acc_period = self.pool.get('account.period') obj_user = self.pool.get('res.users') mod_obj = self.pool.get('ir.model.data') |
'dir_id': fields.many2one('document.directory', 'Directory', required=True), | 'dir_id': fields.many2one('document.directory', 'Directory', required=True, ondelete="cascade"), | # TODO def unlink(... |
task_obj.write(cr, uid, data['id'], { | task_obj.write(cr, uid, [data['id']], { | def _do_assign(self, cr, uid, data, context): task_obj = pooler.get_pool(cr.dbname).get('project.task') task = task_obj.browse(cr, uid, data['id'], context) newname = data['form']['prefix'] or '' task_obj.copy(cr, uid, data['id'], { 'name': data['form']['name'], 'user_id': data['form']['user_id'], 'planned_hours': data['form']['planned_hours'], 'remaining_hours': data['form']['planned_hours'], 'parent_id': data['id'], 'state': 'open', 'description': data['form']['new_task_description'] or '', 'child_ids': [], 'work_ids': [] }) task_obj.write(cr, uid, data['id'], { 'remaining_hours': data['form']['planned_hours_me'], 'name': newname }) if data['form']['state']=='pending': task_obj.do_pending(cr, uid, [data['id']]) else: task_obj.do_close(cr, uid, [data['id']]) return {} |
event_date = datetime.strptime(data['date'], "%Y-%m-%d %H:%M:%S") if start_date and start_date <= event_date: start_date = event_date | start_date = event_date | def get_recurrent_ids(self, cr, uid, select, base_start_date, base_until_date, limit=100): """ @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param base_start_date: Get Start Date @param base_until_date: Get End Date @param limit: The Number of Results to Return """ |
cr.execute("""select p.id, p.name from account_fiscalyear y, account_period p where y.id=p.fiscalyear_id \ | cr.execute("""select p.id from account_fiscalyear y, account_period p where y.id=p.fiscalyear_id \ | def compute_refund(self, cr, uid, ids, mode='refund', context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: the account invoice refund’s ID or list of IDs |
'manager_id2': fields.many2one('hr.employee', 'Second Approval', readonly=True, help='This area is automaticly filled by the user who validate the leave with second level (If Leave type need second validation)') | 'manager_id2': fields.many2one('hr.employee', 'Second Approval', readonly=True, help='This area is automaticly filled by the user who validate the leave with second level (If Leave type need second validation)'), 'category_holiday_id': fields.many2one('hr.holidays', 'Holiday', help='For allocation By Employee Category (Link between Employee Category holiday and related holidays for employees of that category)') | def _employee_get(obj, cr, uid, context=None): if context is None: context = {} ids = obj.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context) if ids: return ids[0] return False |
ids2 = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)]) | ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)]) | def holidays_validate2(self, cr, uid, ids, *args): vals = {'state':'validate1'} self.check_holidays(cr, uid, ids) ids2 = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)]) if ids2: vals['manager_id'] = ids2[0] else: raise osv.except_osv(_('Warning !'),_('No user related to the selected employee.')) return self.write(cr, uid, ids, vals) |
self._create_resource_leave(cr, uid, vals) | for leave_id in self.search(cr, uid, [('category_holiday_id', '=', record.id)]): wf_service.trg_validate(uid, 'hr.holidays', leave_id, 'validate', cr) | def holidays_validate(self, cr, uid, ids, *args): obj_emp = self.pool.get('hr.employee') data_holiday = self.browse(cr, uid, ids) self.check_holidays(cr, uid, ids) vals = {'state':'validate'} ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)]) if ids2: if data_holiday[0].state == 'validate1': vals['manager_id2'] = ids2[0] else: vals['manager_id'] = ids2[0] else: raise osv.except_osv(_('Warning !'), _('No user related to the selected employee.')) self.write(cr, uid, ids, vals) for record in data_holiday: if record.holiday_type == 'employee' and record.type == 'remove': vals = { 'name': record.name, 'date_from': record.date_from, 'date_to': record.date_to, 'calendar_id': record.employee_id.calendar_id.id, 'company_id': record.employee_id.company_id.id, 'resource_id': record.employee_id.resource_id.id, 'holiday_id': record.id } self._create_resource_leave(cr, uid, vals) elif record.holiday_type == 'category' and record.type == 'remove': emp_ids = obj_emp.search(cr, uid, [('category_ids', '=', record.category_id.id)]) for emp in obj_emp.browse(cr, uid, emp_ids): vals = { 'name': record.name, 'date_from': record.date_from, 'date_to': record.date_to, 'calendar_id': emp.calendar_id.id, 'company_id': emp.company_id.id, 'resource_id': emp.resource_id.id, 'holiday_id':record.id } self._create_resource_leave(cr, uid, vals) return True |
ids2 = self.pool.get('hr.employee').search(cr, uid, [('user_id','=', uid)]) | ids2 = obj_emp.search(cr, uid, [('user_id','=', uid)]) | def holidays_refuse(self, cr, uid, ids, *args): vals = {'state': 'refuse'} ids2 = self.pool.get('hr.employee').search(cr, uid, [('user_id','=', uid)]) if ids2: vals['manager_id'] = ids2[0] self.write(cr, uid, ids, vals) return True |
self._remove_resouce_leave(cr, uid, ids) | leave_ids = self._get_category_leave_ids(cr, uid, ids) if leave_ids: self.unlink(cr, uid, leave_ids) | def holidays_cancel(self, cr, uid, ids, *args): self._update_user_holidays(cr, uid, ids) self.write(cr, uid, ids, {'state': 'cancel'}) self._remove_resouce_leave(cr, uid, ids) return True |
res.update({'subject': str(context.get('subject', case.name) or '')}) | res.update({'subject': ustr(context.get('subject', case.name) or '')}) | def default_get(self, cr, uid, fields, context=None): """ This function gets default values """ if not context: context = {} |
if res and res[0][0] < 0: | if res and res[0][0] and res[0][0] < 0: | def _check_date(self, cr, uid, ids): if ids: cr.execute('select number_of_days_temp from hr_holidays where id in ('+','.join(map(str, ids))+')') res = cr.fetchall() if res and res[0][0] < 0: return False return True |
def _check_has_start(self, cr, uid, ids, context=None): for campaign in self.browse(cr, uid, ids, context=context): if not any(a.start for a in campaign.activity_ids): return False return True | def __str__(self): return self._str |
|
_constraints = [(_check_has_start, 'Please mark at least one activity as a start activity', ['Activities'])] | def _check_has_start(self, cr, uid, ids, context=None): for campaign in self.browse(cr, uid, ids, context=context): if not any(a.start for a in campaign.activity_ids): return False return True |
|
if not campaign.activity_ids : raise osv.except_osv("Error", "There is no activitity in the campaign") activity_ids = [ act_id.id for act_id in campaign.activity_ids] if not activity_ids: raise osv.except_osv(_("Error"), _("The campaign cannot be started : there are no activities in it")) act_obj = self.pool.get('marketing.campaign.activity') act_ids = act_obj.search(cr, uid, [('id', 'in', activity_ids), ('type', '=', 'email')]) for activity in act_obj.browse(cr, uid, act_ids): if not activity.email_template_id.enforce_from_account : | if not campaign.activity_ids: raise osv.except_osv(_("Error"), _("The campaign cannot be started: there are no activities in it")) has_start = False has_signal_without_from = False for activity in campaign.activity_ids: if activity.start: has_start = True if activity.signal and len(activity.from_ids) == 0: has_signal_without_from = True if activity.type != 'email': continue if not activity.email_template_id.enforce_from_account: | def state_running_set(self, cr, uid, ids, *args): # TODO check that all subcampaigns are running campaign = self.browse(cr, uid, ids[0]) if not campaign.activity_ids : raise osv.except_osv("Error", "There is no activitity in the campaign") activity_ids = [ act_id.id for act_id in campaign.activity_ids] if not activity_ids: raise osv.except_osv(_("Error"), _("The campaign cannot be started : there are no activities in it")) act_obj = self.pool.get('marketing.campaign.activity') act_ids = act_obj.search(cr, uid, [('id', 'in', activity_ids), ('type', '=', 'email')]) for activity in act_obj.browse(cr, uid, act_ids): if not activity.email_template_id.enforce_from_account : raise osv.except_osv(_("Error"), _("The campaign cannot be started: an email account is missing in the email activity '%s'")%activity.name) if activity.email_template_id.enforce_from_account.state != 'approved' : raise osv.except_osv(_("Error"), _("The campaign cannot be started: the email account is not approved in the email activity '%s'")%activity.name) self.write(cr, uid, ids, {'state': 'running'}) return True |
self.write(cr, uid, ids, {'state': 'running'}) return True | if not has_start and not has_signal_without_from: raise osv.except_osv(_("Error"), _("The campaign hasn't any starting activity nor any activity with a signal and no previous activity.")) return self.write(cr, uid, ids, {'state': 'running'}) | def state_running_set(self, cr, uid, ids, *args): # TODO check that all subcampaigns are running campaign = self.browse(cr, uid, ids[0]) if not campaign.activity_ids : raise osv.except_osv("Error", "There is no activitity in the campaign") activity_ids = [ act_id.id for act_id in campaign.activity_ids] if not activity_ids: raise osv.except_osv(_("Error"), _("The campaign cannot be started : there are no activities in it")) act_obj = self.pool.get('marketing.campaign.activity') act_ids = act_obj.search(cr, uid, [('id', 'in', activity_ids), ('type', '=', 'email')]) for activity in act_obj.browse(cr, uid, act_ids): if not activity.email_template_id.enforce_from_account : raise osv.except_osv(_("Error"), _("The campaign cannot be started: an email account is missing in the email activity '%s'")%activity.name) if activity.email_template_id.enforce_from_account.state != 'approved' : raise osv.except_osv(_("Error"), _("The campaign cannot be started: the email account is not approved in the email activity '%s'")%activity.name) self.write(cr, uid, ids, {'state': 'running'}) return True |
inv_amount_company_currency = invoice.move_id.amount | inv_amount_company_currency = 0 for aml in invoice.move_id.line_id: if aml.account_id.id == invoice.account_id.id or aml.account_id.type in ('receivable', 'payable'): inv_amount_company_currency += aml.debit inv_amount_company_currency -= aml.credit inv_amount_company_currency = abs(inv_amount_company_currency) | def _wo_check(self, cr, uid, data, context): pool = pooler.get_pool(cr.dbname) invoice = pool.get('account.invoice').browse(cr, uid, data['id'], context) journal = pool.get('account.journal').browse(cr, uid, data['form']['journal_id'], context) cur_obj = pool.get('res.currency') # Here we need that: # The invoice total amount in company's currency <> paid amount in company currency # (according to the correct day rate, invoicing rate and payment rate are may be different) # => Ask to a write-off of the difference. This could happen even if both amount are equal, # because if the currency rate # Get the amount in company currency for the invoice (according to move lines) inv_amount_company_currency = invoice.move_id.amount |
select min(pt.id) as id, a.name as project, sum(pt.remaining_hours) as remaining_hours, a.state, pu.uid from project_task as pt, project_project as p, account_analytic_account as a, project_user_rel as pu where pt.project_id=p.id and p.category_id = a.id and pu.project_id=p.id group by a.name,a.state,pu.uid | select min(pt.id) as id, aaa.name as project, CASE WHEN pu.uid is null THEN aaa.user_id ELSE pu.uid END as uid, sum(pt.remaining_hours) as remaining_hours, aaa.state from project_task pt left join project_project as pp ON (pt.project_id=pp.id) left join account_analytic_account as aaa ON (pp.category_id=aaa.id) left join project_user_rel as pu ON (pu.project_id=pp.id) where pt.create_uid=aaa.user_id group by aaa.name,aaa.state,pu.uid,aaa.user_id | def init(self, cr): tools.sql.drop_view_if_exists(cr, 'project_vs_remaining_hours') cr.execute(""" create or replace view project_vs_remaining_hours as ( select min(pt.id) as id, a.name as project, sum(pt.remaining_hours) as remaining_hours, a.state, pu.uid from project_task as pt, project_project as p, account_analytic_account as a, project_user_rel as pu where pt.project_id=p.id and p.category_id = a.id and pu.project_id=p.id group by a.name,a.state,pu.uid ) """) |
objects = self.pool.get('account.journal.period').browse(self.cr, self.uid, new_ids) self.query_get_clause = data['form']['query_line'] or '' super(journal_print, self).set_context(objects, data, new_ids, report_type) | self.query_get_clause = 'AND ' self.query_get_clause += data['form']['query_line'] or '' self.sort_selection = data['form']['sort_selection'] objects = self.pool.get('account.journal.period').browse(self.cr, self.uid, new_ids) self.cr.execute('SELECT period_id, journal_id FROM account_journal_period WHERE id IN %s', (tuple(new_ids),)) res = self.cr.fetchall() self.period_ids, self.journal_ids = zip(*res) return super(journal_print, self).set_context(objects, data, ids, report_type) | def set_context(self, objects, data, ids, report_type = None): new_ids = ids if (data['model'] == 'ir.ui.menu'): new_ids = 'active_ids' in data['form'] and data['form']['active_ids'] or [] objects = self.pool.get('account.journal.period').browse(self.cr, self.uid, new_ids) self.query_get_clause = data['form']['query_line'] or '' super(journal_print, self).set_context(objects, data, new_ids, report_type) |
def __init__(self, cr, uid, name, context={}): | def __init__(self, cr, uid, name, context=None): if context is None: context = {} | def __init__(self, cr, uid, name, context={}): super(journal_print, self).__init__(cr, uid, name, context=context) self.localcontext.update( { 'time': time, 'lines': self.lines, 'sum_debit': self._sum_debit, 'sum_credit': self._sum_credit, 'get_start_period': self.get_start_period, 'get_end_period': self.get_end_period, 'get_account': self.get_account }) |
def lines(self, period_id, journal_id, sort_selection='date', *args): | def lines(self, period_id, journal_id=[]): | def lines(self, period_id, journal_id, sort_selection='date', *args): obj_jperiod = self.pool.get('account.journal.period') obj_mline = self.pool.get('account.move.line') self.cr.execute('update account_journal_period set state=%s where journal_id=%s and period_id=%s and state=%s', ('printed',journal_id,period_id,'draft')) self.cr.commit() self.cr.execute('SELECT id FROM account_move_line l WHERE '+self.query_get_clause+' AND period_id=%s AND journal_id=%s ORDER BY %s', (period_id, journal_id, sort_selection)) ids = map(lambda x: x[0], self.cr.fetchall()) return obj_mline.browse(self.cr, self.uid, ids) |
self.cr.execute('update account_journal_period set state=%s where journal_id=%s and period_id=%s and state=%s', ('printed',journal_id,period_id,'draft')) | self.cr.execute('update account_journal_period set state=%s where journal_id IN %s and period_id=%s and state=%s', ('printed', self.journal_ids, period_id, 'draft')) | def lines(self, period_id, journal_id, sort_selection='date', *args): obj_jperiod = self.pool.get('account.journal.period') obj_mline = self.pool.get('account.move.line') self.cr.execute('update account_journal_period set state=%s where journal_id=%s and period_id=%s and state=%s', ('printed',journal_id,period_id,'draft')) self.cr.commit() self.cr.execute('SELECT id FROM account_move_line l WHERE '+self.query_get_clause+' AND period_id=%s AND journal_id=%s ORDER BY %s', (period_id, journal_id, sort_selection)) ids = map(lambda x: x[0], self.cr.fetchall()) return obj_mline.browse(self.cr, self.uid, ids) |
self.cr.execute('SELECT id FROM account_move_line l WHERE '+self.query_get_clause+' AND period_id=%s AND journal_id=%s ORDER BY %s', (period_id, journal_id, sort_selection)) | self.cr.execute('SELECT id FROM account_move_line l WHERE period_id=%s AND journal_id IN %s ' + self.query_get_clause + ' ORDER BY '+ self.sort_selection + '' ,(period_id, self.journal_ids )) | def lines(self, period_id, journal_id, sort_selection='date', *args): obj_jperiod = self.pool.get('account.journal.period') obj_mline = self.pool.get('account.move.line') self.cr.execute('update account_journal_period set state=%s where journal_id=%s and period_id=%s and state=%s', ('printed',journal_id,period_id,'draft')) self.cr.commit() self.cr.execute('SELECT id FROM account_move_line l WHERE '+self.query_get_clause+' AND period_id=%s AND journal_id=%s ORDER BY %s', (period_id, journal_id, sort_selection)) ids = map(lambda x: x[0], self.cr.fetchall()) return obj_mline.browse(self.cr, self.uid, ids) |
def _sum_debit(self, period_id, journal_id): self.cr.execute('SELECT SUM(debit) FROM account_move_line l WHERE '+self.query_get_clause+' AND period_id=%s AND journal_id=%s', (period_id, journal_id)) | def _sum_debit(self, period_id, journal_id=[]): self.cr.execute('SELECT SUM(debit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+ self.query_get_clause +'', (period_id, self.journal_ids)) | def _sum_debit(self, period_id, journal_id): self.cr.execute('SELECT SUM(debit) FROM account_move_line l WHERE '+self.query_get_clause+' AND period_id=%s AND journal_id=%s', (period_id, journal_id)) return self.cr.fetchone()[0] or 0.0 |
def _sum_credit(self, period_id, journal_id): self.cr.execute('SELECT SUM(credit) FROM account_move_line l WHERE '+self.query_get_clause+' AND period_id=%s AND journal_id=%s', (period_id, journal_id)) | def _sum_credit(self, period_id, journal_id=[]): self.cr.execute('SELECT SUM(credit) FROM account_move_line l WHERE period_id=%s AND journal_id IN %s '+ self.query_get_clause +'', (period_id, self.journal_ids)) | def _sum_credit(self, period_id, journal_id): self.cr.execute('SELECT SUM(credit) FROM account_move_line l WHERE '+self.query_get_clause+' AND period_id=%s AND journal_id=%s', (period_id, journal_id)) return self.cr.fetchone()[0] or 0.0 |
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_from']).name | if 'period_from' in form and form['period_from']: return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_from']).name return '' | def get_start_period(self, form): return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_from']).name |
return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_to']).name | if 'period_to' in form and form['period_to']: return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_to']).name return '' | def get_end_period(self, form): return pooler.get_pool(self.cr.dbname).get('account.period').browse(self.cr,self.uid,form['period_to']).name |
return pooler.get_pool(self.cr.dbname).get('account.account').browse(self.cr,self.uid,form['chart_account_id']).name | if 'chart_account_id' in form and form['chart_account_id']: return pooler.get_pool(self.cr.dbname).get('account.account').browse(self.cr,self.uid,form['chart_account_id']).name return '' | def get_account(self, form): return pooler.get_pool(self.cr.dbname).get('account.account').browse(self.cr,self.uid,form['chart_account_id']).name |
_description = "NOte Type" | _description = "Note Type" | def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None,\ toolbar=False, submenu=False): """ Overrides orm field_view_get. @return: Dictionary of Fields, arch and toolbar. """ |
'share_root_url': lambda self, cr, uid, context, *a: context.get('share_root_url', _('Please specify "share_root_url" in server configuration or in context')), | 'share_root_url': lambda self, cr, uid, context, *a: context.get('share_root_url') or _('Please specify "share_root_url" in context'), | def generate_random_pass(): pass_chars = RANDOM_PASS_CHARACTERS[:] random.shuffle(pass_chars) return ''.join(pass_chars[0:10]) |
'audit':fields.boolean('Audit Complete ?', required=False), | 'audit': fields.related('move_id','to_check', type='boolean', relation='account.move', string='Audit Complete ?'), | def _get_currency(self, cr, uid, context): user = self.pool.get('res.users').browse(cr, uid, uid) if user.company_id: return user.company_id.currency_id.id else: return self.pool.get('res.currency').search(cr, uid, [('rate','=',1.0)])[0] |
'amount':total, 'state':'proforma' | 'amount':total | def open_voucher(self, dbcr, uid, ids, context={}): cr = 0.0 dr = 0.0 total = 0.0 new_line = [] position_pool = self.pool.get('account.fiscal.position') voucher_pool = self.pool.get('account.voucher') voucher_line_pool = self.pool.get('account.voucher.line') partner_pool = self.pool.get('res.partner') tax_pool = self.pool.get('account.tax') |
description = email_last | description = case.email_last | def add_reply(self, cursor, user, ids, context=None): for case in self.browse(cursor, user, ids, context=context): if case.email_last: description = email_last self.write(cursor, user, case.id, { 'description': '> ' + description.replace('\n','\n> '), }, context=context) return True |
val = self.pool.get("ir.property").browse(cr, uid, prop_ids[0]).value | val = self.pool.get("ir.property").browse(cr, uid, prop_ids[0]).value_reference | def create_picking(self, cr, uid, ids, context={}): """Create a picking for each order and validate it.""" picking_obj = self.pool.get('stock.picking') |
stock_dest_id = int(val.split(',')[1]) | stock_dest_id = val.id | def create_picking(self, cr, uid, ids, context={}): """Create a picking for each order and validate it.""" picking_obj = self.pool.get('stock.picking') |
args['account_id'] = order.partner_id and order.partner_id.property_account_receivable and order.partner_id.property_account_receivable.id or account_def or curr_c.account_receivable.id | args['account_id'] = order.partner_id and order.partner_id.property_account_receivable and order.partner_id.property_account_receivable.id or account_def.id or curr_c.account_receivable.id | args['account_id'] = order.partner_id and order.partner_id.property_account_receivable and order.partner_id.property_account_receivable.id or account_def or curr_c.account_receivable.id |
order_account = order.partner_id and order.partner_id.property_account_receivable and order.partner_id.property_account_receivable.id or account_def or curr_c.account_receivable.id | order_account = order.partner_id and order.partner_id.property_account_receivable and order.partner_id.property_account_receivable.id or account_def.id or curr_c.account_receivable.id | order_account = order.partner_id and order.partner_id.property_account_receivable and order.partner_id.property_account_receivable.id or account_def or curr_c.account_receivable.id |
def price_get_multi_old(self, cr, uid, product_map, context=None): """multi products 'price_get' @param context: { 'date': Date of the pricelist (%Y-%m-%d), @return: a dict with product_id as key and the product price as value } """ def _create_parent_category_list(id, lst): if not id: return [] parent = product_category_tree.get(id) if parent: lst.append(parent) return _create_parent_category_list(parent, lst) else: return lst context = context or {} date = time.strftime('%Y-%m-%d') if 'date' in context: date = context['date'] currency_obj = self.pool.get('res.currency') product_obj = self.pool.get('product.product') product_category_obj = self.pool.get('product.category') product_uom_obj = self.pool.get('product.uom') supplierinfo_obj = self.pool.get('product.supplierinfo') price_type_obj = self.pool.get('product.price.type') product_pricelist_version_obj = self.pool.get('product.pricelist.version') pricelist_version_ids = list(set([k[1] for k in product_map.keys()])) plversions_search_args = [ ('pricelist_id', 'in', pricelist_version_ids), '|', ('date_start', '=', False), ('date_start', '<=', date), '|', ('date_end', '=', False), ('date_end', '>=', date), ] plversion_ids = product_pricelist_version_obj.search(cr, uid, plversions_search_args) if len(pricelist_version_ids) != len(plversion_ids): msg = "At least one pricelist has no active version !\nPlease create or activate one." raise osv.except_osv(_('Warning !'), _(msg)) product_ids = list(set([k[0] for k in product_map.keys()])) products = dict([(item['id'], item) for item in product_obj.read(cr, uid, product_ids, ['categ_id', 'product_tmpl_id', 'uos_id', 'uom_id'])]) product_category_ids = product_category_obj.search(cr, uid, []) product_categories = product_category_obj.read(cr, uid, product_category_ids, ['parent_id']) product_category_tree = dict([(item['id'], item['parent_id'][0]) for item in product_categories if item['parent_id']]) results = {} for (product_id, pricelist_id), line in product_map.items(): price = False partner = line['partner_id'] qty = line['qty'] tmpl_id = products[product_id]['product_tmpl_id'] and products[product_id]['product_tmpl_id'][0] or False categ_id = products[product_id]['categ_id'] and products[product_id]['categ_id'][0] or False categ_ids = _create_parent_category_list(categ_id, [categ_id]) if categ_ids: categ_where = '(categ_id IN (' + ','.join(map(str, categ_ids)) + '))' else: categ_where = '(categ_id IS NULL)' cr.execute( 'SELECT i.*, pl.currency_id ' 'FROM product_pricelist_item AS i, ' 'product_pricelist_version AS v, product_pricelist AS pl ' 'WHERE (product_tmpl_id IS NULL OR product_tmpl_id = %s) ' 'AND (product_id IS NULL OR product_id = %s) ' 'AND (' + categ_where + ' OR (categ_id IS NULL)) ' 'AND price_version_id = %s ' 'AND (min_quantity IS NULL OR min_quantity <= %s) ' 'AND i.price_version_id = v.id AND v.pricelist_id = pl.id ' 'ORDER BY sequence', (tmpl_id, product_id, pricelist_id, qty)) res1 = cr.dictfetchall() for res in res1: if res: if res['base'] == -1: if not res['base_pricelist_id']: price = 0.0 else: price_tmp = self.price_get(cr, uid, [res['base_pricelist_id']], product_id, qty)[res['base_pricelist_id']] ptype_src = self.browse(cr, uid, res['base_pricelist_id']).currency_id.id price = currency_obj.compute(cr, uid, ptype_src, res['currency_id'], price_tmp, round=False) break elif res['base'] == -2: where = [] if partner: where = [('name', '=', partner) ] sinfo = supplierinfo_obj.search(cr, uid, [('product_id', '=', tmpl_id)] + where) price = 0.0 if sinfo: cr.execute('SELECT * ' \ 'FROM pricelist_partnerinfo ' \ 'WHERE suppinfo_id IN %s' \ 'AND min_quantity <= %s ' \ 'ORDER BY min_quantity DESC LIMIT 1', (tuple(sinfo),qty,)) res2 = cr.dictfetchone() if res2: price = res2['price'] break else: price_type = price_type_obj.browse(cr, uid, int(res['base'])) price = currency_obj.compute(cr, uid, price_type.currency_id.id, res['currency_id'], product_obj.price_get(cr, uid, [product_id], price_type.field)[product_id], round=False, context=context) if price: price_limit = price price = price * (1.0+(res['price_discount'] or 0.0)) price = rounding(price, res['price_round']) price += (res['price_surcharge'] or 0.0) if res['price_min_margin']: price = max(price, price_limit+res['price_min_margin']) if res['price_max_margin']: price = min(price, price_limit+res['price_max_margin']) break else: price = False if price: p_uom_id = products[product_id]['uos_id'] and products[product_id]['uos_id'][0] or products[product_id]['uom_id'] and products[product_id]['uom_id'][0] or False if p_uom_id: price = product_uom_obj._compute_price(cr, uid, p_uom_id, price, context.get('uom', False)) if results.get(product_id): results[product_id][pricelist_id] = price else: results[product_id] = {pricelist_id: price} return results | def price_get_multi_old(self, cr, uid, product_map, context=None): """multi products 'price_get' @param context: { 'date': Date of the pricelist (%Y-%m-%d), @return: a dict with product_id as key and the product price as value } """ |
|
break | def _create_parent_category_list(id, lst): if not id: return [] parent = product_category_tree.get(id) if parent: lst.append(parent) return _create_parent_category_list(parent, lst) else: return lst |
|
cr.execute("SELECT * FROM account_analytic_line WHERE account_id = %s and id IN %s AND product_id=%s and to_invoice=%s", (account.id, tuple(data['ids']), product_id, factor_id)) | cr.execute("SELECT * FROM account_analytic_line WHERE account_id = %s and id IN %s AND product_id=%s and to_invoice=%s ORDER BY account_analytic_line.date", (account.id, tuple(data['ids']), product_id, factor_id)) | def do_create(self, cr, uid, ids, context=None): mod_obj = self.pool.get('ir.model.data') analytic_account_obj = self.pool.get('account.analytic.account') res_partner_obj = self.pool.get('res.partner') account_payment_term_obj = self.pool.get('account.payment.term') invoices = [] |
details.append("%s %s" % (line['unit_amount'], self.pool.get('product.uom').browse(cr, uid, [line['product_uom_id']])[0].name)) | details.append("%s %s" % (line['unit_amount'], self.pool.get('product.uom').browse(cr, uid, [line['product_uom_id']],context2)[0].name)) | def do_create(self, cr, uid, ids, context=None): mod_obj = self.pool.get('ir.model.data') analytic_account_obj = self.pool.get('account.analytic.account') res_partner_obj = self.pool.get('res.partner') account_payment_term_obj = self.pool.get('account.payment.term') invoices = [] |
html = body_mako_tpl.render( helper=helper, css=css, _=self.translate_call, **self.parser_instance.localcontext ) | try : html = body_mako_tpl.render( helper=helper, css=css, _=self.translate_call, **self.parser_instance.localcontext ) except : raise Exception(exceptions.html_error_template().render()) | def create_single_pdf(self, cursor, uid, ids, data, report_xml, context=None): """generate the PDF""" if context is None: context={} if report_xml.report_type != 'webkit': return super(WebKitParser,self).create_single_pdf(cursor, uid, ids, data, report_xml, context=context) |
head = head_mako_tpl.render( company=company, time=time, helper=helper, css=css, formatLang=self.formatLang, setLang=self.setLang, _=self.translate_call, _debug=False ) | try : head = head_mako_tpl.render( company=company, time=time, helper=helper, css=css, formatLang=self.formatLang, setLang=self.setLang, _=self.translate_call, _debug=False ) except : raise Exception(exceptions.html_error_template().render()) | def create_single_pdf(self, cursor, uid, ids, data, report_xml, context=None): """generate the PDF""" if context is None: context={} if report_xml.report_type != 'webkit': return super(WebKitParser,self).create_single_pdf(cursor, uid, ids, data, report_xml, context=context) |
foot = foot_mako_tpl.render( company=company, time=time, helper=helper, css=css, formatLang=self.formatLang, setLang=self.setLang, _=self.translate_call, ) | try : foot = foot_mako_tpl.render( company=company, time=time, helper=helper, css=css, formatLang=self.formatLang, setLang=self.setLang, _=self.translate_call, ) except: raise Exception(exceptions.html_error_template().render()) | def create_single_pdf(self, cursor, uid, ids, data, report_xml, context=None): """generate the PDF""" if context is None: context={} if report_xml.report_type != 'webkit': return super(WebKitParser,self).create_single_pdf(cursor, uid, ids, data, report_xml, context=context) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.