rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
context)['object_name']
context)['object_name'] or False
def _default_model(self, cursor, user, context=None): """ Returns the default value for model field @param cursor: Database Cursor @param user: ID of current user @param context: OpenERP Context """ return self.pool.get('email.template').read( cursor, user, context['template_id'], ['object_name'], context)['object_name']
'ref_template': lambda self, cr, uid, ctx:ctx['template_id'],
'ref_template': lambda self, cr, uid, ctx:ctx['template_id'] or False,
def _default_model(self, cursor, user, context=None): """ Returns the default value for model field @param cursor: Database Cursor @param user: ID of current user @param context: OpenERP Context """ return self.pool.get('email.template').read( cursor, user, context['template_id'], ['object_name'], context)['object_name']
'name': _("%s Mail Form") % template_obj.name,
'name': template_obj.name,
def create_action(self, cr, uid, ids, context): vals = {} template_obj = self.browse(cr, uid, ids)[0] src_obj = template_obj.object_name.model vals['ref_ir_act_window'] = self.pool.get('ir.actions.act_window').create(cr, uid, { 'name': _("%s Mail Form") % template_obj.name, 'type': 'ir.actions.act_window', 'res_model': 'email_template.send.wizard', 'src_model': src_obj, 'view_type': 'form', 'context': "{'src_model':'%s','template_id':'%d','src_rec_id':active_id,'src_rec_ids':active_ids}" % (src_obj, template_obj.id), 'view_mode':'form,tree', 'view_id': self.pool.get('ir.ui.view').search(cr, uid, [('name', '=', 'email_template.send.wizard.form')], context=context)[0], 'target': 'new', 'auto_refresh':1 }, context) vals['ref_ir_value'] = self.pool.get('ir.values').create(cr, uid, { 'name': _('Send Mail (%s)') % template_obj.name, 'model': src_obj, 'key2': 'client_action_multi', 'value': "ir.actions.act_window," + str(vals['ref_ir_act_window']), 'object': True, }, context) self.write(cr, uid, ids, { 'ref_ir_act_window': vals['ref_ir_act_window'], 'ref_ir_value': vals['ref_ir_value'], }, context) return True
'amount': fields.float('Amount'),
'amount': fields.float('Amount', digits_compute=dp.get_precision('Account')),
def onchange_type(self, cr, uid, line_id, partner_id, type, context=None): res = {'value': {}} obj_partner = self.pool.get('res.partner') if context is None: context = {} if not partner_id: return res account_id = False line = self.browse(cr, uid, line_id) if not line or (line and not line[0].account_id): part = obj_partner.browse(cr, uid, partner_id, context=context) if type == 'supplier': account_id = part.property_account_payable.id else: account_id = part.property_account_receivable.id res['value']['account_id'] = account_id return res
class contentIndex() :
class contentIndex(): __logger = logging.getLogger('addons.document.content_index')
def mime_match(mime, mdict): if mdict.has_key(mime): return (mime, mdict[mime]) if '/' in mime: mpat = mime.split('/')[0]+'/*' if mdict.has_key(mpat): return (mime, mdict[mpat]) return (None, None)
log(netsvc.LOG_DEBUG, "Register content indexer: %r" % obj)
self.__logger.debug('Register content indexer: %s', obj)
def register(self, obj): f = False for mime in obj._getMimeTypes(): self.mimes[mime] = obj f = True for ext in obj._getExtensions(): self.exts[ext] = obj f = True if f: log(netsvc.LOG_DEBUG, "Register content indexer: %r" % obj) if not f: raise Exception("Your indexer should at least suport a mimetype or extension")
log(netsvc.LOG_DEBUG,"File gave us: %s" % mime2)
self.__logger.debug('File gave us: %s', mime2)
def doIndex(self,content, filename=None, content_type=None, realfname = None, debug=False): fobj = None fname = None mime = None if content_type and self.mimes.has_key(content_type): mime = content_type fobj = self.mimes[content_type] elif filename: bname,ext = os.path.splitext(filename) if self.exts.has_key(ext): fobj = self.exts[ext] mime = fobj._getDefMime(ext) if content_type and not fobj: mime,fobj = mime_match(content_type, self.mimes) if not fobj: try: if realfname : fname = realfname else: bname,ext = os.path.splitext(filename) fd, fname = tempfile.mkstemp(suffix=ext) os.write(fd, content) os.close(fd) fp = Popen(['file','-b','--mime-type',fname], shell=False, stdout=PIPE).stdout result = fp.read() fp.close() mime2 = result.strip() log(netsvc.LOG_DEBUG,"File gave us: %s" % mime2) # Note that the temporary file still exists now. mime,fobj = mime_match(mime2, self.mimes) if not mime: mime = mime2 except Exception, e: log(netsvc.LOG_WARNING,"Cannot determine mime type: %s" % str(e)) try: if fobj: res = (mime, fobj.indexContent(content,filename,fname or realfname) ) else: log(netsvc.LOG_DEBUG,"Have no object, return (%s, None)" % mime) res = (mime, None ) except Exception, e: log(netsvc.LOG_WARNING,"Could not index file, %s" % e) res = None # If we created a tmp file, unlink it now if not realfname and fname: try: os.unlink(fname) except Exception, e: log(netsvc.LOG_WARNING,"Could not unlink %s, %s" %(fname, e)) return res
except Exception, e: log(netsvc.LOG_WARNING,"Cannot determine mime type: %s" % str(e))
except Exception: self.__logger.exception('Cannot determine mime type')
def doIndex(self,content, filename=None, content_type=None, realfname = None, debug=False): fobj = None fname = None mime = None if content_type and self.mimes.has_key(content_type): mime = content_type fobj = self.mimes[content_type] elif filename: bname,ext = os.path.splitext(filename) if self.exts.has_key(ext): fobj = self.exts[ext] mime = fobj._getDefMime(ext) if content_type and not fobj: mime,fobj = mime_match(content_type, self.mimes) if not fobj: try: if realfname : fname = realfname else: bname,ext = os.path.splitext(filename) fd, fname = tempfile.mkstemp(suffix=ext) os.write(fd, content) os.close(fd) fp = Popen(['file','-b','--mime-type',fname], shell=False, stdout=PIPE).stdout result = fp.read() fp.close() mime2 = result.strip() log(netsvc.LOG_DEBUG,"File gave us: %s" % mime2) # Note that the temporary file still exists now. mime,fobj = mime_match(mime2, self.mimes) if not mime: mime = mime2 except Exception, e: log(netsvc.LOG_WARNING,"Cannot determine mime type: %s" % str(e)) try: if fobj: res = (mime, fobj.indexContent(content,filename,fname or realfname) ) else: log(netsvc.LOG_DEBUG,"Have no object, return (%s, None)" % mime) res = (mime, None ) except Exception, e: log(netsvc.LOG_WARNING,"Could not index file, %s" % e) res = None # If we created a tmp file, unlink it now if not realfname and fname: try: os.unlink(fname) except Exception, e: log(netsvc.LOG_WARNING,"Could not unlink %s, %s" %(fname, e)) return res
log(netsvc.LOG_DEBUG,"Have no object, return (%s, None)" % mime)
self.__logger.debug("Have no object, return (%s, None)", mime)
def doIndex(self,content, filename=None, content_type=None, realfname = None, debug=False): fobj = None fname = None mime = None if content_type and self.mimes.has_key(content_type): mime = content_type fobj = self.mimes[content_type] elif filename: bname,ext = os.path.splitext(filename) if self.exts.has_key(ext): fobj = self.exts[ext] mime = fobj._getDefMime(ext) if content_type and not fobj: mime,fobj = mime_match(content_type, self.mimes) if not fobj: try: if realfname : fname = realfname else: bname,ext = os.path.splitext(filename) fd, fname = tempfile.mkstemp(suffix=ext) os.write(fd, content) os.close(fd) fp = Popen(['file','-b','--mime-type',fname], shell=False, stdout=PIPE).stdout result = fp.read() fp.close() mime2 = result.strip() log(netsvc.LOG_DEBUG,"File gave us: %s" % mime2) # Note that the temporary file still exists now. mime,fobj = mime_match(mime2, self.mimes) if not mime: mime = mime2 except Exception, e: log(netsvc.LOG_WARNING,"Cannot determine mime type: %s" % str(e)) try: if fobj: res = (mime, fobj.indexContent(content,filename,fname or realfname) ) else: log(netsvc.LOG_DEBUG,"Have no object, return (%s, None)" % mime) res = (mime, None ) except Exception, e: log(netsvc.LOG_WARNING,"Could not index file, %s" % e) res = None # If we created a tmp file, unlink it now if not realfname and fname: try: os.unlink(fname) except Exception, e: log(netsvc.LOG_WARNING,"Could not unlink %s, %s" %(fname, e)) return res
except Exception, e: log(netsvc.LOG_WARNING,"Could not index file, %s" % e)
except Exception: self.__logger.exception("Could not index file %s (%s)", filename, fname or realfname)
def doIndex(self,content, filename=None, content_type=None, realfname = None, debug=False): fobj = None fname = None mime = None if content_type and self.mimes.has_key(content_type): mime = content_type fobj = self.mimes[content_type] elif filename: bname,ext = os.path.splitext(filename) if self.exts.has_key(ext): fobj = self.exts[ext] mime = fobj._getDefMime(ext) if content_type and not fobj: mime,fobj = mime_match(content_type, self.mimes) if not fobj: try: if realfname : fname = realfname else: bname,ext = os.path.splitext(filename) fd, fname = tempfile.mkstemp(suffix=ext) os.write(fd, content) os.close(fd) fp = Popen(['file','-b','--mime-type',fname], shell=False, stdout=PIPE).stdout result = fp.read() fp.close() mime2 = result.strip() log(netsvc.LOG_DEBUG,"File gave us: %s" % mime2) # Note that the temporary file still exists now. mime,fobj = mime_match(mime2, self.mimes) if not mime: mime = mime2 except Exception, e: log(netsvc.LOG_WARNING,"Cannot determine mime type: %s" % str(e)) try: if fobj: res = (mime, fobj.indexContent(content,filename,fname or realfname) ) else: log(netsvc.LOG_DEBUG,"Have no object, return (%s, None)" % mime) res = (mime, None ) except Exception, e: log(netsvc.LOG_WARNING,"Could not index file, %s" % e) res = None # If we created a tmp file, unlink it now if not realfname and fname: try: os.unlink(fname) except Exception, e: log(netsvc.LOG_WARNING,"Could not unlink %s, %s" %(fname, e)) return res
except Exception, e: log(netsvc.LOG_WARNING,"Could not unlink %s, %s" %(fname, e))
except Exception: self.__logger.exception("Could not unlink %s", fname)
def doIndex(self,content, filename=None, content_type=None, realfname = None, debug=False): fobj = None fname = None mime = None if content_type and self.mimes.has_key(content_type): mime = content_type fobj = self.mimes[content_type] elif filename: bname,ext = os.path.splitext(filename) if self.exts.has_key(ext): fobj = self.exts[ext] mime = fobj._getDefMime(ext) if content_type and not fobj: mime,fobj = mime_match(content_type, self.mimes) if not fobj: try: if realfname : fname = realfname else: bname,ext = os.path.splitext(filename) fd, fname = tempfile.mkstemp(suffix=ext) os.write(fd, content) os.close(fd) fp = Popen(['file','-b','--mime-type',fname], shell=False, stdout=PIPE).stdout result = fp.read() fp.close() mime2 = result.strip() log(netsvc.LOG_DEBUG,"File gave us: %s" % mime2) # Note that the temporary file still exists now. mime,fobj = mime_match(mime2, self.mimes) if not mime: mime = mime2 except Exception, e: log(netsvc.LOG_WARNING,"Cannot determine mime type: %s" % str(e)) try: if fobj: res = (mime, fobj.indexContent(content,filename,fname or realfname) ) else: log(netsvc.LOG_DEBUG,"Have no object, return (%s, None)" % mime) res = (mime, None ) except Exception, e: log(netsvc.LOG_WARNING,"Could not index file, %s" % e) res = None # If we created a tmp file, unlink it now if not realfname and fname: try: os.unlink(fname) except Exception, e: log(netsvc.LOG_WARNING,"Could not unlink %s, %s" %(fname, e)) return res
'section_id' : case.section_id and case.section_id or False,
'section_id' : case.section_id and case.section_id.id or False,
def _default_values(self, cr, uid, data, context): case_obj = pooler.get_pool(cr.dbname).get('crm.opportunity') categ_id = pooler.get_pool(cr.dbname).get('crm.phonecall.categ').search(cr, uid, [('name','=','Outbound')]) case = case_obj.browse(cr, uid, data['id']) return { 'user_id' : case.user_id and case.user_id.id, 'category_id' : categ_id and categ_id[0] or case.categ_id and case.categ_id.id, 'section_id' : case.section_id and case.section_id or False, 'note' : case.description }
date_local = cal_data.value.astimezone(_server_tzinfo) self.ical_set(cal_data.name.lower(), date_local, 'value')
self.ical_set('vtimezone', cal_data.params.get('X-VOBJ-ORIGINAL-TZID'), 'value') date_utc = cal_data.value.astimezone(pytz.utc) self.ical_set(cal_data.name.lower(), date_utc, 'value')
def parse_ics(self, cr, uid, child, cal_children=None, context=None): """ parse calendaring and scheduling information @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """
self.ids[(rec[3], result)] = id
self.ids[(rec[2], result)] = id
def _generate_object_xml(self, cr, uid, rec, recv, doc, result=None): record_list = [] noupdate = False if rec[4]=='write': for id in rec[5]: id,update = self._get_id(cr, uid, rec[3], id) noupdate = noupdate or update if not id: continue record,update = self._create_record(cr, uid, doc, rec[3], rec[6], id) noupdate = noupdate or update record_list += record
wkf.setAttribute("model", rec[1][3]) wkf.setAttribute("action", rec[1][4])
wkf.setAttribute("model", rec[1][2]) wkf.setAttribute("action", rec[1][3])
def generate_xml(self, cr, uid): # Create the minidom document if len(self.recording_data): self.ids = {} doc = minidom.Document() terp = doc.createElement("openerp") doc.appendChild(terp) for rec in self.recording_data: if rec[0]=='workflow': rec_id,noupdate = self._get_id(cr, uid, rec[1][2], rec[1][4]) if not rec_id: continue data = doc.createElement("data") terp.appendChild(data) wkf = doc.createElement('workflow') data.appendChild(wkf) wkf.setAttribute("model", rec[1][3]) wkf.setAttribute("action", rec[1][4]) if noupdate: data.setAttribute("noupdate", "1") wkf.setAttribute("ref", rec_id) if rec[0]=='query': res_list,noupdate = self._generate_object_xml(cr, uid, rec[1], rec[2], doc, rec[3]) data = doc.createElement("data") if noupdate: data.setAttribute("noupdate", "1") if res_list: terp.appendChild(data) for res in res_list: data.appendChild(res) elif rec[0]=='assert': pass return doc.toprettyxml(indent="\t").encode('utf-8')
yaml_file += str(object) + '''\n'''
yaml_file += str(object) + '''\n\n'''
def generate_yaml(self, cr, uid): self.ids = {} if len(self.recording_data): yaml_file='''\n''' for rec in self.recording_data: if rec[1][3] == 'create': self.mode="create" elif rec[1][3] == 'write': self.mode="write" elif rec[1][3] == 'copy': self.mode="copy" elif rec[0] == 'workflow': self.mode="workflow" else: continue if self.mode == "workflow": record= self._generate_object_yaml(cr, uid, rec[1],rec[0]) object=yaml.load(unicode('''\n !workflow %s \n'''%record,'iso-8859-1')) yaml_file += str(object) + '''\n''' else: record= self._generate_object_yaml(cr, uid, rec[1],rec[3]) object= yaml.load(unicode('''\n !record %s \n'''%record,'iso-8859-1')) yaml_file += str(object) + '''\n''' attrs=yaml.dump(object.attrs, default_flow_style=False) yaml_file += attrs + '''\n\n''' yaml_result='''''' for line in yaml_file.split('\n'): line=line.replace("''","'") if line.find('!record') == 0: line = "- \n" + " " + line elif line.find('!workflow') == 0: line = "- \n" + " " + line elif line.find('- -') != -1: line=line.replace('- -',' -') line = " " + line else: line = " " + line yaml_result += line + '''\n''' return yaml_result
line = "- \n" + " " + line
line = "- \n" + " " + line elif line.find('!comment') == 0: line=line.replace('!comment','- \n ')
def generate_yaml(self, cr, uid): self.ids = {} if len(self.recording_data): yaml_file='''\n''' for rec in self.recording_data: if rec[1][3] == 'create': self.mode="create" elif rec[1][3] == 'write': self.mode="write" elif rec[1][3] == 'copy': self.mode="copy" elif rec[0] == 'workflow': self.mode="workflow" else: continue if self.mode == "workflow": record= self._generate_object_yaml(cr, uid, rec[1],rec[0]) object=yaml.load(unicode('''\n !workflow %s \n'''%record,'iso-8859-1')) yaml_file += str(object) + '''\n''' else: record= self._generate_object_yaml(cr, uid, rec[1],rec[3]) object= yaml.load(unicode('''\n !record %s \n'''%record,'iso-8859-1')) yaml_file += str(object) + '''\n''' attrs=yaml.dump(object.attrs, default_flow_style=False) yaml_file += attrs + '''\n\n''' yaml_result='''''' for line in yaml_file.split('\n'): line=line.replace("''","'") if line.find('!record') == 0: line = "- \n" + " " + line elif line.find('!workflow') == 0: line = "- \n" + " " + line elif line.find('- -') != -1: line=line.replace('- -',' -') line = " " + line else: line = " " + line yaml_result += line + '''\n''' return yaml_result
amount2 = data['amount']
amount2 = data.get('amount', 0.0)
def _unit_compute(self, cr, uid, taxes, price_unit, address_id=None, product=None, partner=None, quantity=0): taxes = self._applicable(cr, uid, taxes, price_unit, address_id, product, partner) res = [] cur_price_unit=price_unit for tax in taxes: # we compute the amount for the current tax object and append it to the result
print 'Error'
def geo_find(addr): import urllib,re regex = '<coordinates>([+-]?[0-9\.]+),([+-]?[0-9\.]+),([+-]?[0-9\.]+)</coordinates>' url = 'http://maps.google.com/maps/geo?q=' + urllib.quote(addr) + '&output=xml&oe=utf8&sensor=false' xml = urllib.urlopen(url).read() if '<error>' in xml: print 'Error' return None result = re.search(regex, xml, re.M|re.I) if not result: print 'No Regex', xml return None return float(result.group(1)),float(result.group(2))
print 'No Regex', xml
def geo_find(addr): import urllib,re regex = '<coordinates>([+-]?[0-9\.]+),([+-]?[0-9\.]+),([+-]?[0-9\.]+)</coordinates>' url = 'http://maps.google.com/maps/geo?q=' + urllib.quote(addr) + '&output=xml&oe=utf8&sensor=false' xml = urllib.urlopen(url).read() if '<error>' in xml: print 'Error' return None result = re.search(regex, xml, re.M|re.I) if not result: print 'No Regex', xml return None return float(result.group(1)),float(result.group(2))
}, {'active_id': lead.id, 'active_ids': [lead.id]})
}, context) fobj.action_forward(cr, uid, [forward], context)
def forward_to_partner(self, cr, uid, ids, context=None): fobj = self.pool.get('crm.lead.forward.to.partner') for lead in self.browse(cr, uid, ids, context=context): if lead.partner_assigned_id: email = False if lead.partner_assigned_id.address: email = lead.partner_assigned_id.address[0].email forward = fobj.create(cr, uid, { 'name': 'email', 'history': 'whole', 'email_to': email, 'message': fobj._get_case_history(cr, uid, 'whole', lead.id, context) or False }, {'active_id': lead.id, 'active_ids': [lead.id]}) else: raise osv.except_osv(_('Error !'), _('No partner assigned to this opportunity'))
msg['From'] = tools.ustr(self.server[serverid]['from_email'])
if context.get('email_from', self.server[serverid]['from_email']): msg['From'] = context.get('email_from', self.server[serverid]['from_email']) elif tools.config['email_from']: msg['From'] = tools.config['email_from'] else: raise osv.except_osv(_('Error'), _("Please specify server option --email-from !"))
def test_verify_email(self, cr, uid, ids, toemail, test=False, code=False): serverid = ids[0] self.open_connection(cr, uid, ids, serverid) key = False if test and self.server[serverid]['state'] == 'confirm': body = self.server[serverid]['test_email'] or '' else: body = self.server[serverid]['verify_email'] or '' #ignore the code key = self.gen_private_key(cr, uid, ids) #md5(time.strftime('%Y-%m-%d %H:%M:%S') + toemail).hexdigest(); body = body.replace("__code__", key) user = pooler.get_pool(cr.dbname).get('res.users').browse(cr, uid, [uid])[0] body = body.replace("__user__", user.name) if len(body.strip()) <= 0: raise osv.except_osv(_('Message Error!'), _('Please configure Email Server Messages [Verification / Test]')) try: msg = MIMEText(body.encode('utf8') or '',_subtype='plain',_charset='utf-8') except: msg = MIMEText(body or '',_subtype='plain',_charset='utf-8') if not test and not self.server[serverid]['state'] == 'confirm': msg['Subject'] = _('OpenERP SMTP server Email Registration Code!') else: msg['Subject'] = _('OpenERP Test Email!') msg['To'] = toemail msg['From'] = tools.ustr(self.server[serverid]['from_email']) message = msg.as_string() if self.server[serverid]['disclaimers']: body = body + "\n" + self.server[serverid]['disclaimers'] queue = pooler.get_pool(cr.dbname).get('email.smtpclient.queue') queue.create(cr, uid, { 'to':toemail, 'server_id':serverid, 'name':msg['Subject'], 'body':body, 'serialized_message':message, 'priority':1, 'type':'system' }) if self.server[serverid]['state'] != 'confirm': self.write(cr, uid, ids, {'state':'waiting', 'code':key}) return True
msg['From'] = context.get('email_from', smtp_server.from_email)
print "::context.get('email_from', smtp_server.from_email):::",context.get('email_from', smtp_server.from_email) print "::TOOOLL::",tools.config['email_from'] if context.get('email_from', smtp_server.from_email): msg['From'] = context.get('email_from', smtp_server.from_email) elif tools.config['email_from']: msg['From'] = tools.config['email_from'] else: raise osv.except_osv(_('Error'), _("Please specify server option --email-from !")) print "::<ES::",msg['From']
def createReport(cr, uid, report, ids, name=False): files = [] for id in ids: try: service = netsvc.LocalService(report) (result, format) = service.create(cr, uid, [id], {}, {}) if not name: report_file = '/tmp/reports'+ str(id) + '.pdf' else: report_file = name fp = open(report_file,'wb+') fp.write(result); fp.close(); files += [report_file] except Exception,e: continue return files
template = self.browse(cursor, user, template_id, context=ctx)
template = self.browse(cursor, user, template.id, context=ctx)
def _generate_mailbox_item_from_template(self, cursor, user, template, record_id, context=None): """ Generates an email from the template for record record_id of target object @param cursor: Database Cursor @param user: ID of User @param template: Browse record of template @param record_id: ID of the target model for which this mail has to be generated @return: ID of created object """ if context is None: context = {} #If account to send from is in context select it, else use enforced account if 'account_id' in context.keys(): from_account = self.pool.get('email_template.account').read( cursor, user, context.get('account_id'), ['name', 'email_id'], context ) else: from_account = { 'id':template.enforce_from_account.id, 'name':template.enforce_from_account.name, 'email_id':template.enforce_from_account.email_id } lang = get_value(cursor, user, record_id, template.lang, template, context) if lang: ctx = context.copy() ctx.update({'lang':lang}) template = self.browse(cursor, user, template_id, context=ctx) mailbox_values = { 'email_from': tools.ustr(from_account['name']) + \ "<" + tools.ustr(from_account['email_id']) + ">", 'email_to':get_value(cursor, user, record_id, template.def_to, template, context), 'email_cc':get_value(cursor, user, record_id, template.def_cc, template, context), 'email_bcc':get_value(cursor, user, record_id, template.def_bcc, template, context), 'subject':get_value(cursor, user, record_id, template.def_subject, template, context), 'body_text':get_value(cursor, user, record_id, template.def_body_text, template, context), 'body_html':get_value(cursor, user, record_id, template.def_body_html, template, context), 'account_id' :from_account['id'], #This is a mandatory field when automatic emails are sent 'state':'na', 'folder':'drafts', 'mail_type':'multipart/alternative' } if not mailbox_values['account_id']: raise Exception("Unable to send the mail. No account linked to the template.") #Use signatures if allowed if template.use_sign: sign = self.pool.get('res.users').read(cursor, user, user, ['signature'], context)['signature'] if mailbox_values['body_text']: mailbox_values['body_text'] += sign if mailbox_values['body_html']: mailbox_values['body_html'] += sign print 'Creating', mailbox_values mailbox_id = self.pool.get('email_template.mailbox').create( cursor, user, mailbox_values, context)
self._generate_attach_reports(
self.generate_attach_reports(
def generate_mail(self, cursor, user, template_id, record_ids, context=None): if context is None: context = {} template = self.browse(cursor, user, template_id, context=context) if not template: raise Exception("The requested template could not be loaded") print 'loaded', record_ids result = True for record_id in record_ids: mailbox_id = self._generate_mailbox_item_from_template( cursor, user, template, record_id, context) print 'loaded' mail = self.pool.get('email_template.mailbox').browse( cursor, user, mailbox_id, context=context ) if template.report_template: self._generate_attach_reports( cursor, user, template, record_id, mail, context ) self.pool.get('email_template.mailbox').write( cursor, user, mailbox_id, {'folder':'outbox'}, context=context ) # TODO : manage return value of all the records result = self.pool.get('email_template.mailbox').send_this_mail(cursor, user, [mailbox_id], context) return result
acc_variation = accounts['property_stock_variation']
acc_variation = accounts.get('property_stock_variation', False)
def _get_accounting_values(self, cr, uid, move, context=None): product_obj=self.pool.get('product.product') product_uom_obj = self.pool.get('product.uom') price_type_obj = self.pool.get('product.price.type') accounts = product_obj.get_product_accounts(cr,uid,move.product_id.id,context) acc_src = accounts['stock_account_input'] acc_dest = accounts['stock_account_output'] acc_variation = accounts['property_stock_variation'] journal_id = accounts['stock_journal']
"WHERE partner_id = %s " \
"WHERE l.partner_id = %s " \
def _get_intial_balance(self, partner): move_state = ['draft','posted'] if self.target_move == 'posted': move_state = ['posted']
"WHERE partner_id = %s" \
"WHERE l.partner_id = %s" \
def _sum_debit_partner(self, partner): move_state = ['draft','posted'] if self.target_move == 'posted': move_state = ['posted']
"WHERE partner_id = %s" \
"WHERE l.partner_id = %s" \
def _sum_credit_partner(self, partner): move_state = ['draft','posted'] if self.target_move == 'posted': move_state = ['posted']
for inv in inv_obj.browse(cr, uid, context['active_ids'], context=context):
company = self.pool.get('res.users').browse(cr, uid, uid).company_id for inv in inv_obj.browse(cr, uid, context.get('active_ids'), context=context):
def compute_refund(self, cr, uid, ids, mode='refund', context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: the account invoice refund’s ID or list of IDs
cr.execute("""SELECT id from account_period where date(%s) between date_start AND date_stop \ and company_id = %s limit 1 """, (date, self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id,))
cr.execute("""select p.id, p.name from account_fiscalyear y, account_period p where y.id=p.fiscalyear_id \ and date(%s) between p.date_start AND p.date_stop and y.company_id = %s limit 1""", (date, company.id,))
def compute_refund(self, cr, uid, ids, mode='refund', context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: the account invoice refund’s ID or list of IDs
return context['move_line'][0][2] and context['move_line'][0][2]['location_dest_id'] or False
return context['move_line'][0][2] and context['move_line'][0][2].get('location_dest_id',False)
def _default_location_destination(self, cr, uid, context=None): """ Gets default address of partner for destination location @return: Address id or False """ if context.get('move_line', []): if context['move_line'][0]: if isinstance(context['move_line'][0], (tuple, list)): return context['move_line'][0][2] and context['move_line'][0][2]['location_dest_id'] or False else: move_list = self.pool.get('stock.move').read(cr, uid, context['move_line'][0], ['location_dest_id']) return move_list and move_list['location_dest_id'][0] or False if context.get('address_out_id', False): property_out = self.pool.get('res.partner.address').browse(cr, uid, context['address_out_id'], context).partner_id.property_stock_customer return property_out and property_out.id or False return False
if not context:
if context is None:
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100): if not args: args=[] if not context: context={} ids = [] if name: ids = self.search(cr, user, [('code','ilike',name)]+ args, limit=limit) if not ids: ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit) return self.name_get(cr, user, ids, context=context)
ids = self.search(cr, user, [('code','ilike',name)]+ args, limit=limit)
ids = self.search(cr, user, [('code','ilike',name)]+ args, limit=limit, context=context)
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100): if not args: args=[] if not context: context={} ids = [] if name: ids = self.search(cr, user, [('code','ilike',name)]+ args, limit=limit) if not ids: ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit) return self.name_get(cr, user, ids, context=context)
ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit)
ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit, context=context)
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100): if not args: args=[] if not context: context={} ids = [] if name: ids = self.search(cr, user, [('code','ilike',name)]+ args, limit=limit) if not ids: ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit) return self.name_get(cr, user, ids, context=context)
res['fields']['qty_available']['string'] = _('P&L Qty')
if fields.get('qty_available'): res['fields']['qty_available']['string'] = _('P&L Qty')
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): res = super(product_product,self).fields_view_get(cr, uid, view_id, view_type, context, toolbar=toolbar, submenu=submenu) if context == None: context = {} if ('location' in context) and context['location']: location_info = self.pool.get('stock.location').browse(cr, uid, context['location']) fields=res.get('fields',{}) if fields: if location_info.usage == 'supplier': if fields.get('virtual_available'): res['fields']['virtual_available']['string'] = _('Future Receptions') if fields.get('qty_available'): res['fields']['qty_available']['string'] = _('Received Qty')
if ('name' not in default) or (picking_obj.get('name')=='/'):
if ('name' not in default) or (picking_obj.name=='/'):
def copy(self, cr, uid, id, default=None, context={}): if default is None: default = {} default = default.copy() picking_obj = self.browse(cr, uid, [id], context)[0] if ('name' not in default) or (picking_obj.get('name')=='/'): seq_obj_name = 'stock.picking.' + picking_obj.type default['name'] = self.pool.get('ir.sequence').get(cr, uid, seq_obj_name) return super(stock_picking, self).copy(cr, uid, id, default, context)
new_end_date = date(*time.strptime(vals['date'],'%Y-%m-%d')[:3])
if isinstance(vals['date'], datetime): new_end_date = vals['date'].date() elif isinstance(vals['date'], date): new_end_date = vals['date'] else: new_end_date = date(*time.strptime(vals['date'],'%Y-%m-%d')[:3])
def write(self, cr, uid, ids, vals, *args, **kwargs): if isinstance(ids, (int, long)): ids = [ids] if vals.get('date', False): data_project = self.browse(cr, uid, ids) for prj in data_project: new_end_date = date(*time.strptime(vals['date'],'%Y-%m-%d')[:3]) if prj.date: old_end_date = date(*time.strptime(prj.date,'%Y-%m-%d')[:3]) for task in prj.tasks: if task.date_start: start_dt = (date(*time.strptime(str(task.date_start),'%Y-%m-%d %H:%M:%S')[:3])+(new_end_date-old_end_date)).strftime('%Y-%m-%d %H:%M:%S') if task.date_deadline: deadline_dt = (datetime(*time.strptime(str(task.date_deadline),'%Y-%m-%d')[:3])+(new_end_date-old_end_date)).strftime('%Y-%m-%d') self.pool.get('project.task').write(cr, uid, [task.id], {'date_start':start_dt, 'date_deadline':deadline_dt}) else: self.pool.get('project.task').write(cr, uid, [task.id], {'date_start':start_dt}) return super(project_project,self).write(cr, uid, ids, vals, *args, **kwargs)
res_id = email_tool.process_email(cr, uid, server.object_id.model, data[0][1], attach=server.attach, context=context)
res_id = email_tool.process_email(cr, user, server.object_id.model, data[0][1], attach=server.attach, context=context)
def fetch_mail(self, cr, uid, ids, context={}): email_tool = self.pool.get('email.server.tools') action_pool = self.pool.get('ir.actions.server') context.update({'get_server': True}) for server in self.browse(cr, uid, ids, context): count = 0 try: if server.type == 'imap': imap_server = self.button_confirm_login(cr, uid, [server.id], context=context) imap_server.select() result, data = imap_server.search(None, '(UNSEEN)') for num in data[0].split(): result, data = imap_server.fetch(num, '(RFC822)') res_id = email_tool.process_email(cr, uid, server.object_id.model, data[0][1], attach=server.attach, context=context) if res_id and server.action_id: action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids':[res_id]})
res_id = email_tool.process_email(cr, uid, server.object_id.model, msg, attach=server.attach, context=context)
res_id = email_tool.process_email(cr, user, server.object_id.model, msg, attach=server.attach, context=context)
def fetch_mail(self, cr, uid, ids, context={}): email_tool = self.pool.get('email.server.tools') action_pool = self.pool.get('ir.actions.server') context.update({'get_server': True}) for server in self.browse(cr, uid, ids, context): count = 0 try: if server.type == 'imap': imap_server = self.button_confirm_login(cr, uid, [server.id], context=context) imap_server.select() result, data = imap_server.search(None, '(UNSEEN)') for num in data[0].split(): result, data = imap_server.fetch(num, '(RFC822)') res_id = email_tool.process_email(cr, uid, server.object_id.model, data[0][1], attach=server.attach, context=context) if res_id and server.action_id: action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids':[res_id]})
<separator colspan="4">
<separator colspan="4"/>
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False,submenu=False): result = super(stock_partial_move, self).fields_view_get(cr, uid, view_id, view_type, context, toolbar,submenu) move_obj = self.pool.get('stock.move') move_ids = context.get('active_ids', False) move_ids = move_obj.search(cr, uid, [('id','in',move_ids)]) _moves_arch_lst = """<form string="Deliver Products"> <separator colspan="4" string="Delivery Information"/> <field name="date" /> <separator colspan="4"> <group colspan="4" attrs="{'invisible':[('type','=','in')]}"> <field name="partner_id" attrs="{'required':[('type','!=','in')]}" /> <field name="address_id" attrs="{'required':[('type','!=','in')]}"/> <field name="type" invisible="1"/> <newline/> </group> <separator colspan="4" string="Move Detail"/> """ _moves_fields = result['fields'] if move_ids and view_type in ['form']: for m in move_obj.browse(cr, uid, move_ids, context): if m.state in ('done', 'cancel'): continue _moves_fields.update({ 'move%s_product_id'%(m.id) : { 'string': _('Product'), 'type' : 'many2one', 'relation': 'product.product', 'required' : True, 'readonly' : True, }, 'move%s_product_qty'%(m.id) : { 'string': _('Quantity'), 'type' : 'float', 'required': True, }, 'move%s_product_uom'%(m.id) : { 'string': _('Product UOM'), 'type' : 'many2one', 'relation': 'product.uom', 'required' : True, 'readonly' : True, } }) _moves_arch_lst += """ <group colspan="4" col="10"> <field name="move%s_product_id" nolabel="1"/> <field name="move%s_product_qty" string="Qty" /> <field name="move%s_product_uom" nolabel="1" /> """%(m.id, m.id, m.id) if (m.picking_id.type == 'in') and (m.product_id.cost_method == 'average'): _moves_fields.update({ 'move%s_product_price'%(m.id) : { 'string': _('Price'), 'type' : 'float', }, 'move%s_product_currency'%(m.id): { 'string': _('Currency'), 'type' : 'float', 'type' : 'many2one', 'relation': 'res.currency', } }) _moves_arch_lst += """ <field name="move%s_product_price" /> <field name="move%s_product_currency" nolabel="1"/> """%(m.id, m.id) _moves_arch_lst += """ </group> """ _moves_arch_lst += """ <separator string="" colspan="4" /> <label string="" colspan="2"/> <group col="2" colspan="2"> <button icon='gtk-cancel' special="cancel" string="_Cancel" /> <button name="do_partial" string="_Deliver" colspan="1" type="object" icon="gtk-apply" /> </group> </form>""" result['arch'] = _moves_arch_lst result['fields'] = _moves_fields return result
'section_id' : case.section_id and case.section_id.id or False,
def _default_values(self, cr, uid, data, context):
'section_id' : form['section_id'],
def _doIt(self, cr, uid, data, context): form = data['form'] pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'crm', 'view_crm_case_phonecalls_filter') res = mod_obj.read(cr, uid, result, ['res_id']) phonecall_case_obj = pool.get('crm.phonecall') job_case_obj = pool.get('hr.applicant') # Select the view
def check(db, uid, passwd): if security._uid_cache.has_key( uid ) and (security._uid_cache[uid]==passwd):
def check_creds(db, uid, passwd): if security._uid_cache.get(db, {}).get(uid) and security._uid_cache.get(db, {}).get(uid) == passwd:
def check(db, uid, passwd): if security._uid_cache.has_key( uid ) and (security._uid_cache[uid]==passwd): return True cr = pooler.get_db(db).cursor() if passwd not in _salt_cache: cr.execute( 'select login from res_users where id=%s', (uid,) ) stored_login = cr.fetchone() if stored_login: stored_login = stored_login[0] if not login(db,stored_login,passwd): return False salt = _salt_cache[passwd] cr.execute(' select count(*) from res_users where id=%s and password=%s', (int(uid), encrypt_md5( passwd, salt )) ) res = cr.fetchone()[0] cr.close() if not bool(res): raise Exception('AccessDenied') if res: security._uid_cache[uid] = passwd return bool(res)
raise Exception('AccessDenied')
raise security.ExceptionNoTb('AccessDenied')
def check(db, uid, passwd): if security._uid_cache.has_key( uid ) and (security._uid_cache[uid]==passwd): return True cr = pooler.get_db(db).cursor() if passwd not in _salt_cache: cr.execute( 'select login from res_users where id=%s', (uid,) ) stored_login = cr.fetchone() if stored_login: stored_login = stored_login[0] if not login(db,stored_login,passwd): return False salt = _salt_cache[passwd] cr.execute(' select count(*) from res_users where id=%s and password=%s', (int(uid), encrypt_md5( passwd, salt )) ) res = cr.fetchone()[0] cr.close() if not bool(res): raise Exception('AccessDenied') if res: security._uid_cache[uid] = passwd return bool(res)
raise Exception('Bad username or password')
raise security.ExceptionNoTb('Bad username or password')
def access(db, uid, passwd, sec_level, ids): cr = pooler.get_db(db).cursor() salt = _salt_cache[passwd] cr.execute('select id from res_users where id=%s and password=%s', (uid, encrypt_md5( passwd, salt )) ) res = cr.fetchone() cr.close() if not res: raise Exception('Bad username or password') return res[0]
security.check=check
security.check_creds=check_creds
def access(db, uid, passwd, sec_level, ids): cr = pooler.get_db(db).cursor() salt = _salt_cache[passwd] cr.execute('select id from res_users where id=%s and password=%s', (uid, encrypt_md5( passwd, salt )) ) res = cr.fetchone() cr.close() if not res: raise Exception('Bad username or password') return res[0]
self.write(cr, uid, [res_id], {})
def create(self, cr, uid, vals, context=None): sql = [ ('journal_id', '=', vals['journal_id']), ('state', '=', 'open') ] open_jrnl = self.search(cr, uid, sql) if open_jrnl: raise osv.except_osv('Error', _('You can not have two open register for the same journal'))
term_id = False
def onchange_partner_id(self, cr, uid, ids, partner_id, journal_id=False, price=0.0, currency_id=False, ttype=False, context={}): """price Returns a dict that contains new values and context @param partner_id: latest value from user input for field partner_id @param args: other arguments @param context: context arguments, like lang, time zone @return: Returns a dict which contains new values, and context """ if not journal_id: return {} currency_pool = self.pool.get('res.currency') move_pool = self.pool.get('account.move') line_pool = self.pool.get('account.voucher.line') move_line_pool = self.pool.get('account.move.line') partner_pool = self.pool.get('res.partner') journal_pool = self.pool.get('account.journal') default = { 'value':{'line_ids':[], 'line_dr_ids':[], 'line_cr_ids':[], 'pre_line': False}, }
default['value'].update({ 'account_id':account_id, 'term_id':term_id })
default['value']['account_id'] = account_id
def onchange_partner_id(self, cr, uid, ids, partner_id, journal_id=False, price=0.0, currency_id=False, ttype=False, context={}): """price Returns a dict that contains new values and context @param partner_id: latest value from user input for field partner_id @param args: other arguments @param context: context arguments, like lang, time zone @return: Returns a dict which contains new values, and context """ if not journal_id: return {} currency_pool = self.pool.get('res.currency') move_pool = self.pool.get('account.move') line_pool = self.pool.get('account.voucher.line') move_line_pool = self.pool.get('account.move.line') partner_pool = self.pool.get('res.partner') journal_pool = self.pool.get('account.journal') default = { 'value':{'line_ids':[], 'line_dr_ids':[], 'line_cr_ids':[], 'pre_line': False}, }
case_obj = pooler.get_pool(cr.dbname).get('hr.applicant') categ_id=pooler.get_pool(cr.dbname).get('crm.case.categ').search(cr, uid, [('name','=','Outbound')]) case = case_obj.browse(cr, uid, data['id']) return { 'user_id' : case.user_id and case.user_id.id, 'category_id' : categ_id and categ_id[0] or case.categ_id and case.categ_id.id, 'section_id' : case.section_id and case.section_id.id or False, 'note' : case.description }
def _get_note(self, cr, uid, context=None): case_obj = self.pool.get('hr.applicant') case = case_obj.browse(cr, uid, context['active_id']) return case.description or ''
def _default_values(self, cr, uid, data, context):
def _doIt(self, cr, uid, data, context): form = data['form'] pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'hr', 'view_hr_case_phonecalls_filter')
_defaults = { 'user_id': _date_user, 'category_id': _date_category, 'note': _get_note } def make_phonecall(self, cr, uid, ids, context=None): mod_obj = self.pool.get('ir.model.data') job_case_obj = self.pool.get('hr.applicant') data_obj = self.pool.get('ir.model.data') phonecall_case_obj = self.pool.get('crm.phonecall') form = self.read(cr, uid, ids, [], context=context)[0] result = mod_obj._get_id(cr, uid, 'crm', 'view_crm_case_phonecalls_filter')
def _doIt(self, cr, uid, data, context): form = data['form'] pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'hr', 'view_hr_case_phonecalls_filter') res = mod_obj.read(cr, uid, result, ['res_id']) phonecall_case_obj = pool.get('hr.phonecall') job_case_obj = pool.get('hr.applicant') # Select the view
phonecall_case_obj = pool.get('hr.phonecall') job_case_obj = pool.get('hr.applicant')
def _doIt(self, cr, uid, data, context): form = data['form'] pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'hr', 'view_hr_case_phonecalls_filter') res = mod_obj.read(cr, uid, result, ['res_id']) phonecall_case_obj = pool.get('hr.phonecall') job_case_obj = pool.get('hr.applicant') # Select the view
data_obj = pool.get('ir.model.data') id2 = data_obj._get_id(cr, uid, 'hr', 'hr_case_phone_tree_view') id3 = data_obj._get_id(cr, uid, 'hr', 'hr_case_phone_form_view')
id2 = data_obj._get_id(cr, uid, 'crm', 'crm_case_phone_tree_view') id3 = data_obj._get_id(cr, uid, 'crm', 'crm_case_phone_form_view')
def _doIt(self, cr, uid, data, context): form = data['form'] pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'hr', 'view_hr_case_phonecalls_filter') res = mod_obj.read(cr, uid, result, ['res_id']) phonecall_case_obj = pool.get('hr.phonecall') job_case_obj = pool.get('hr.applicant') # Select the view
for job in job_case_obj.browse(cr, uid, data['ids']):
for job in job_case_obj.browse(cr, uid, context['active_ids']):
def _doIt(self, cr, uid, data, context): form = data['form'] pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'hr', 'view_hr_case_phonecalls_filter') res = mod_obj.read(cr, uid, result, ['res_id']) phonecall_case_obj = pool.get('hr.phonecall') job_case_obj = pool.get('hr.applicant') # Select the view
'section_id' : form['section_id'],
def _doIt(self, cr, uid, data, context): form = data['form'] pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'hr', 'view_hr_case_phonecalls_filter') res = mod_obj.read(cr, uid, result, ['res_id']) phonecall_case_obj = pool.get('hr.phonecall') job_case_obj = pool.get('hr.applicant') # Select the view
if not job.case_id: vals.update({'phonecall_id' : new_phonecall.id})
def _doIt(self, cr, uid, data, context): form = data['form'] pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'hr', 'view_hr_case_phonecalls_filter') res = mod_obj.read(cr, uid, result, ['res_id']) phonecall_case_obj = pool.get('hr.phonecall') job_case_obj = pool.get('hr.applicant') # Select the view
'res_model': 'hr.phonecall',
'res_model': 'crm.phonecall',
def _doIt(self, cr, uid, data, context): form = data['form'] pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'hr', 'view_hr_case_phonecalls_filter') res = mod_obj.read(cr, uid, result, ['res_id']) phonecall_case_obj = pool.get('hr.phonecall') job_case_obj = pool.get('hr.applicant') # Select the view
states = { 'init': { 'actions': [_default_values], 'result': {'type': 'form', 'arch': case_form, 'fields': case_fields, 'state' : [('end', 'Cancel','gtk-cancel'),('order', 'Schedule Phone Call','gtk-go-forward')]} }, 'order': { 'actions': [], 'result': {'type': 'action', 'action': _doIt, 'state': 'end'} } }
job2phonecall()
def _doIt(self, cr, uid, data, context): form = data['form'] pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'hr', 'view_hr_case_phonecalls_filter') res = mod_obj.read(cr, uid, result, ['res_id']) phonecall_case_obj = pool.get('hr.phonecall') job_case_obj = pool.get('hr.applicant') # Select the view
job2phonecall('hr.applicant.reschedule_phone_call') class job2meeting(wizard.interface): def _makeMeeting(self, cr, uid, data, context): pool = pooler.get_pool(cr.dbname) job_case_obj = pool.get('hr.applicant') meeting_case_obj = pool.get('hr.meeting') for job in job_case_obj.browse(cr, uid, data['ids']): new_meeting_id = meeting_case_obj.create(cr, uid, { 'name': job.name, 'date': job.date, 'duration': job.duration, }) new_meeting = meeting_case_obj.browse(cr, uid, new_meeting_id) vals = {} job_case_obj.write(cr, uid, [job.id], vals) job_case_obj.case_cancel(cr, uid, [job.id]) meeting_case_obj.case_open(cr, uid, [new_meeting_id]) data_obj = pool.get('ir.model.data') result = data_obj._get_id(cr, uid, 'hr', 'view_hr_case_meetings_filter') id = data_obj.read(cr, uid, result, ['res_id']) id1 = data_obj._get_id(cr, uid, 'hr', 'hr_case_calendar_view_meet') id2 = data_obj._get_id(cr, uid, 'hr', 'hr_case_form_view_meet') id3 = data_obj._get_id(cr, uid, 'hr', 'hr_case_tree_view_meet') if id1: id1 = data_obj.browse(cr, uid, id1, context=context).res_id if id2: id2 = data_obj.browse(cr, uid, id2, context=context).res_id if id3: id3 = data_obj.browse(cr, uid, id3, context=context).res_id return { 'name': _('Meetings'), 'view_type': 'form', 'view_mode': 'calendar,form,tree', 'res_model': 'hr.meeting', 'view_id': False, 'views': [(id1,'calendar'),(id2,'form'),(id3,'tree'),(False,'graph')], 'type': 'ir.actions.act_window', 'search_view_id': id['res_id'] } states = { 'init': { 'actions': [], 'result': {'type': 'action', 'action': _makeMeeting, 'state': 'order'} }, 'order': { 'actions': [], 'result': {'type': 'state', 'state': 'end'} } } job2meeting('hr.applicant.meeting_set') class partner_create(wizard.interface): case_form = """<?xml version="1.0"?> <form string="Convert To Partner"> <label string="Are you sure you want to create a partner based on this job request ?" colspan="4"/> <label string="You may have to verify that this partner does not exist already." colspan="4"/> <!--field name="close"/--> </form>""" case_fields = { 'close': {'type':'boolean', 'string':'Close job request'} } def _selectPartner(self, cr, uid, data, context): pool = pooler.get_pool(cr.dbname) case_obj = pool.get('hr.applicant') for case in case_obj.browse(cr, uid, data['ids']): if case.partner_id: raise wizard.except_wizard(_('Warning !'), _('A partner is already defined on this job request.')) return {} def _makeOrder(self, cr, uid, data, context): pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'base', 'view_res_partner_filter') res = mod_obj.read(cr, uid, result, ['res_id']) case_obj = pool.get('hr.applicant') partner_obj = pool.get('res.partner') contact_obj = pool.get('res.partner.address') for case in case_obj.browse(cr, uid, data['ids']): partner_id = partner_obj.search(cr, uid, [('name', '=', case.partner_name or case.name)]) if partner_id: raise wizard.except_wizard(_('Warning !'),_('A partner is already existing with the same name.')) else: partner_id = partner_obj.create(cr, uid, { 'name': case.partner_name or case.name, 'user_id': case.user_id.id, 'comment': case.description, }) contact_id = contact_obj.create(cr, uid, { 'partner_id': partner_id, 'name': case.partner_name2, 'phone': case.partner_phone, 'mobile': case.partner_mobile, 'email': case.email_from }) case_obj.write(cr, uid, data['ids'], { 'partner_id': partner_id, 'partner_address_id': contact_id }) if data['form']['close']: case_obj.case_close(cr, uid, data['ids']) value = { 'domain': "[]", 'view_type': 'form', 'view_mode': 'form,tree', 'res_model': 'res.partner', 'res_id': int(partner_id), 'view_id': False, 'type': 'ir.actions.act_window', 'search_view_id': res['res_id'] } return value states = { 'init': { 'actions': [_selectPartner], 'result': {'type': 'form', 'arch': case_form, 'fields': case_fields, 'state' : [('end', 'Cancel', 'gtk-cancel'),('confirm', 'Create Partner', 'gtk-go-forward')]} }, 'confirm': { 'actions': [], 'result': {'type': 'action', 'action': _makeOrder, 'state': 'end'} } } partner_create('hr.applicant.partner_create')
def _doIt(self, cr, uid, data, context): form = data['form'] pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'hr', 'view_hr_case_phonecalls_filter') res = mod_obj.read(cr, uid, result, ['res_id']) phonecall_case_obj = pool.get('hr.phonecall') job_case_obj = pool.get('hr.applicant') # Select the view
assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
if groupby: assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)" groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2]) assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False): """ Get the list of records in list view grouped by the given ``groupby`` fields
'product_id': fields.many2one('product.product', 'Product', required=True),
'product_id': fields.many2one('product.product', 'Product', required=True, states={'draft':[('readonly',False)]}, readonly=True),
def force_production(self, cr, uid, ids, *args): pick_obj = self.pool.get('stock.picking') pick_obj.force_assign(cr, uid, [prod.picking_id.id for prod in self.browse(cr, uid, ids)]) return True
'location_id': fields.many2one('stock.location', 'Location', required=True),
'location_id': fields.many2one('stock.location', 'Location', required=True, states={'draft':[('readonly',False)]}, readonly=True),
def force_production(self, cr, uid, ids, *args): pick_obj = self.pool.get('stock.picking') pick_obj.force_assign(cr, uid, [prod.picking_id.id for prod in self.browse(cr, uid, ids)]) return True
self.pool.get('purchase.requisition').write(cr, uid, [po.requisition_id.id], {'state':'close','date_end':time.strftime('%Y-%m-%d %H:%M:%S')})
self.pool.get('purchase.requisition').write(cr, uid, [po.requisition_id.id], {'state':'done','date_end':time.strftime('%Y-%m-%d %H:%M:%S')})
def wkf_confirm_order(self, cr, uid, ids, context={}): res = super(purchase_order, self).wkf_confirm_order(cr, uid, ids, context) for po in self.browse(cr, uid, ids, context): if po.requisition_id and (po.requisition_id.exclusive=='exclusive'): for order in po.requisition_id.purchase_ids: if order.id<>po.id: wf_service = netsvc.LocalService("workflow") wf_service.trg_validate(uid, 'purchase.order', order.id, 'purchase_cancel', cr) self.pool.get('purchase.requisition').write(cr, uid, [po.requisition_id.id], {'state':'close','date_end':time.strftime('%Y-%m-%d %H:%M:%S')})
new_header['debit'] = tot_debit new_header['credit'] = tot_credit
new_header['debit'] = r['credit'] new_header['credit'] = r['debit']
def _add_subtotal(self, cleanarray): i = 0 completearray = [] tot_debit = 0.0 tot_credit = 0.0 tot_scredit = 0.0 tot_sdebit = 0.0 tot_enlitige = 0.0 for r in cleanarray: # For the first element we always add the line # type = 1 is the line is the first of the account # type = 2 is an other line of the account if i==0: # We add the first as the header # ## new_header = {} new_header['ref'] = '' new_header['name'] = r['account_name'] new_header['code'] = r['code'] new_header['debit'] = tot_debit new_header['credit'] = tot_credit new_header['scredit'] = tot_scredit new_header['sdebit'] = tot_sdebit new_header['enlitige'] = tot_enlitige new_header['balance'] = float(tot_sdebit) - float(tot_scredit) new_header['type'] = 3 ## completearray.append(new_header) # r['type'] = 1 r['balance'] = float(r['sdebit']) - float(r['scredit'])
def action_in_production(self, cr, uid, ids): """ Changes state to In Production and writes starting date. @return: True """ obj = self.browse(cr, uid, ids)[0] workcenter_line_obj = self.pool.get('mrp.production.workcenter.line') for workcenter_line in obj.workcenter_lines: workcenter_line_obj.action_start_working(cr, uid, [workcenter_line.id]) return super(mrp_production,self).action_in_production(cr, uid, ids)
def action_production_end(self, cr, uid, ids): obj=self.browse(cr,uid,ids)[0] for workcenter_line in obj.workcenter_lines: tmp=self.pool.get('mrp.production.workcenter.line').action_done(cr,uid,[workcenter_line.id]) return super(mrp_production,self).action_production_end(cr,uid,ids)
'name': 'Customer Invoices',
'name': _('Customer Invoices'),
def make_invoice(self, cr, uid, ids, context=None): reg_obj = self.pool.get('event.registration') mod_obj = self.pool.get('ir.model.data') newinv = [] if context is None: context = {}
'location_id': dest,
valdef = { 'picking_id': move.picking_id.id, 'product_id': line['product_id'], 'product_uom': line['product_uom'], 'product_qty': line['product_qty'], 'product_uos': line['product_uos'], 'product_uos_qty': line['product_uos_qty'], 'move_dest_id': move.id, 'state': state, 'name': line['name'], 'location_dest_id': dest, 'move_history_ids': [(6,0,[move.id])], 'move_history_ids2': [(6,0,[])], 'procurements': [],
picking_id=super(mrp_production,self).action_confirm(cr, uid, ids) for production in self.browse(cr, uid, ids): source = production.product_id.product_tmpl_id.property_stock_production.id for sub_product in production.bom_id.sub_products: qty1 = sub_product.product_qty qty2 = production.product_uos and production.product_uos_qty or False if sub_product.subproduct_type=='variable':
picking_id=super(mrp_production,self).action_confirm(cr, uid, ids) for production in self.browse(cr, uid, ids): source = production.product_id.product_tmpl_id.property_stock_production.id if not production.bom_id: continue for sub_product in production.bom_id.sub_products: qty1 = sub_product.product_qty qty2 = production.product_uos and production.product_uos_qty or False if sub_product.subproduct_type=='variable':
def action_confirm(self, cr, uid, ids): picking_id=super(mrp_production,self).action_confirm(cr, uid, ids) for production in self.browse(cr, uid, ids): source = production.product_id.product_tmpl_id.property_stock_production.id for sub_product in production.bom_id.sub_products: qty1 = sub_product.product_qty qty2 = production.product_uos and production.product_uos_qty or False if sub_product.subproduct_type=='variable': if production.product_qty: qty1 *= production.product_qty / (production.bom_id.product_qty or 1.0) if production.product_uos_qty: qty2 *= production.product_uos_qty / (production.bom_id.product_uos_qty or 1.0) data = { 'name':'PROD:'+production.name, 'date_planned': production.date_planned, 'product_id': sub_product.product_id.id, 'product_qty': qty1, 'product_uom': sub_product.product_uom.id, 'product_uos_qty': qty2, 'product_uos': production.product_uos and production.product_uos.id or False, 'location_id': source, 'location_dest_id': production.location_dest_id.id, 'move_dest_id': production.move_prod_id.id, 'state': 'waiting', 'production_id':production.id } sub_prod_ids=self.pool.get('stock.move').create(cr, uid,data) return picking_id
self.pool.get('account.voucher').unlink(cr, uid, voucher_ids, context)
def button_cancel(self, cr, uid, ids, context=None): done = [] for st in self.browse(cr, uid, ids, context): voucher_ids = [] for line in st.line_ids: if line.voucher_id: voucher_ids.append(line.voucher_id.id) self.pool.get('account.voucher').cancel_voucher(cr, uid, voucher_ids, context) self.pool.get('account.voucher').unlink(cr, uid, voucher_ids, context) return super(account_bank_statement, self).button_cancel(cr, uid, ids, context=context)
else: partner_id = partner_obj.create(cr, uid, { 'name': case.partner_name or case.name, 'user_id': case.user_id.id, 'comment': case.description, })
partner_id = partner_obj.create(cr, uid, { 'name': case.partner_name or case.name, 'user_id': case.user_id.id, 'comment': case.description, })
def make_order(self, cr, uid, ids, context=None): mod_obj = self.pool.get('ir.model.data') partner_obj = self.pool.get('res.partner') contact_obj = self.pool.get('res.partner.address') case_obj = self.pool.get('hr.applicant')
case_obj.write(cr, uid, context['active_ids'], { 'partner_id': partner_id, 'partner_address_id': contact_id })
case_obj.write(cr, uid, case.id, { 'partner_id': partner_id, 'partner_address_id': contact_id })
def make_order(self, cr, uid, ids, context=None): mod_obj = self.pool.get('ir.model.data') partner_obj = self.pool.get('res.partner') contact_obj = self.pool.get('res.partner.address') case_obj = self.pool.get('hr.applicant')
pricelist_version_ids = pricelist_ids
plversions_search_ids = pricelist_ids
def _create_parent_category_list(id, lst): if not id: return [] parent = product_category_tree.get(id) if parent: lst.append(parent) return _create_parent_category_list(parent, lst) else: return lst
pricelist_version_ids = product_pricelist_version_obj.search(cr, uid, []) pricelist_version_ids = list(set(pricelist_version_ids))
plversions_search_ids = product_pricelist_version_obj.search(cr, uid, []) plversions_search_ids = list(set(plversions_search_ids))
def _create_parent_category_list(id, lst): if not id: return [] parent = product_category_tree.get(id) if parent: lst.append(parent) return _create_parent_category_list(parent, lst) else: return lst
('pricelist_id', 'in', pricelist_version_ids),
('pricelist_id', 'in', plversions_search_ids),
def _create_parent_category_list(id, lst): if not id: return [] parent = product_category_tree.get(id) if parent: lst.append(parent) return _create_parent_category_list(parent, lst) else: return lst
plversion_ids = product_pricelist_version_obj.search(cr, uid, plversions_search_args) if len(pricelist_version_ids) != len(plversion_ids):
pricelist_version_ids = product_pricelist_version_obj.search(cr, uid, plversions_search_args) if len(plversions_search_ids) != len(pricelist_version_ids):
def _create_parent_category_list(id, lst): if not id: return [] parent = product_category_tree.get(id) if parent: lst.append(parent) return _create_parent_category_list(parent, lst) else: return lst
for pricelist_id in pricelist_version_ids:
for pricelist_id in plversions_search_ids:
def _create_parent_category_list(id, lst): if not id: return [] parent = product_category_tree.get(id) if parent: lst.append(parent) return _create_parent_category_list(parent, lst) else: return lst
(tmpl_id, product_id, plversion_ids[0], qty))
(tmpl_id, product_id, pricelist_version_ids[0], qty))
def _create_parent_category_list(id, lst): if not id: return [] parent = product_category_tree.get(id) if parent: lst.append(parent) return _create_parent_category_list(parent, lst) else: return lst
if new_qty <= 0.0: return {}
def _change_prod_qty(self, cr, uid, ids, new_qty, context={}): move_obj = self.pool.get('stock.move')
move_obj.unlink(cr, uid, moves[product_id][0])
move_obj.unlink(cr, uid, moves[product_id])
def change(product_id, product_qty, qty_vals, qty_vals_done, moves, moves_done, field): if not moves.get(product_id) and moves_done.get(product_id): new_qty = (product_qty - qty_vals_done.get(product_id, 0.0)) new_move = move_obj.copy(cr, uid, moves_done.get(product_id), default={'product_qty': new_qty}) self.write(cr, uid, prod.id, {field: [(4, new_move)]}) return to_add = (product_qty - qty_vals_done.get(product_id, 0.0)) - \ qty_vals.get(product_id, 0.0) avail_qty = move_obj.browse(cr, uid, moves[product_id][0]).product_qty new_qty = avail_qty + to_add if new_qty == 0: move_obj.write(cr, uid, moves[product_id][0], {'state': 'draft'}) move_obj.unlink(cr, uid, moves[product_id]) elif new_qty < 0: avail_qty = move_obj.browse(cr, uid, moves_done[product_id][0]).product_qty move_obj.unlink(cr, uid, moves[product_id][0]) move_obj.write(cr, uid, moves_done[product_id][0], {'product_qty': avail_qty + new_qty}) else: move_obj.write(cr, uid, moves[product_id][0], {'product_qty': avail_qty + to_add}) return
sql = [ """SELECT l2.id, SUM(l1.debit-l1.credit) FROM account_move_line l1, account_move_line l2""", """WHERE l2.account_id = l1.account_id""", """AND""", """l1.id <= l2.id""", """AND""", """l2.id IN %s""", """AND""", self._query_get(cr, uid, obj='l1', context=c), """ GROUP BY l2.id""", ] cr.execute('\n'.join(sql), [tuple(ids)])
sql = """SELECT l2.id, SUM(l1.debit-l1.credit) FROM account_move_line l1, account_move_line l2 WHERE l2.account_id = l1.account_id AND l1.id <= l2.id AND l2.id IN %%s AND """ + \ self._query_get(cr, uid, obj='l1', context=c) + \ " GROUP BY l2.id" cr.execute(sql, [tuple(ids)])
def _balance(self, cr, uid, ids, name, arg, context=None): if context is None: context = {} c = context.copy() c['initital_bal'] = True sql = [ """SELECT l2.id, SUM(l1.debit-l1.credit) FROM account_move_line l1, account_move_line l2""", """WHERE l2.account_id = l1.account_id""", """AND""", """l1.id <= l2.id""", """AND""", """l2.id IN %s""", """AND""", self._query_get(cr, uid, obj='l1', context=c), """ GROUP BY l2.id""", ]
'function_id': fields.related('job_ids','function_id',type='many2one', \ relation='res.partner.function', string='Main Function'),
'function': fields.related('job_ids', 'function', type='char', \ string='Main Function'),
def _main_job(self, cr, uid, ids, fields, arg, context=None): """ @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of partner contact’s IDs @fields: Get Fields @param context: A standard dictionary for contextual values @param arg: list of tuples of form [(‘name_of_the_field’, ‘operator’, value), ...]. """
funct = r.function_id and (", " + r.function_id.name) or ""
funct = r.function and (", " + r.function) or ""
def name_get(self, cr, uid, ids, context={}): """ @param self: The object pointer @param cr: the current row, from the database cursor, @param user: the current user, @param ids: List of partner address’s IDs @param context: A standard dictionary for contextual values """
'function_id': fields.many2one('res.partner.function','Partner Function', \ help="Function of this contact with this partner"),
'function': fields.char('Partner Function', size=34, help="Function of this contact with this partner"),
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False): """ search parnter job @param self: The object pointer @param cr: the current row, from the database cursor, @param user: the current user @param args: list of tuples of form [(‘name_of_the_field’, ‘operator’, value), ...]. @param offset: The Number of Results to Pass @param limit: The Number of Results to Return @param context: A standard dictionary for contextual values """
for tax in tax_obj.compute(cr, uid, [tax_id], total, 1.00):
for tax in tax_obj.compute_all(cr, uid, [tax_id], total, 1.00).get('taxes'):
def create(self, cr, uid, vals, context=None, check=True): account_obj = self.pool.get('account.account') tax_obj=self.pool.get('account.tax') if context is None: context = {} self._check_date(cr, uid, vals, context, check) if ('account_id' in vals) and not account_obj.read(cr, uid, vals['account_id'], ['active'])['active']: raise osv.except_osv(_('Bad account!'), _('You can not use an inactive account!')) if 'journal_id' in vals: context['journal_id'] = vals['journal_id'] if 'period_id' in vals: context['period_id'] = vals['period_id'] if ('journal_id' not in context) and ('move_id' in vals) and vals['move_id']: m = self.pool.get('account.move').browse(cr, uid, vals['move_id']) context['journal_id'] = m.journal_id.id context['period_id'] = m.period_id.id
if type(ids) == int:
if isinstance(ids, (int, long)):
def write(self, cr, uid, ids, vals, context=None): resource_calendar_obj = self.pool.get('resource.calendar') resource_obj = self.pool.get('resource.resource') uom_obj = self.pool.get('product.uom') if context is None: context = {} if context.get('scheduler',False): return super(project_phase, self).write(cr, uid, ids, vals, context=context) # Consider calendar and efficiency if the phase is performed by a resource # otherwise consider the project's working calendar if type(ids) == int: ids = [ids] phase = self.browse(cr, uid, ids[0], context=context) calendar_id = phase.project_id.resource_calendar_id and phase.project_id.resource_calendar_id.id or False resource_id = resource_obj.search(cr, uid, [('user_id', '=', phase.responsible_id.id)],context=context) if resource_id: cal_id = resource_obj.browse(cr, uid, resource_id[0], context=context).calendar_id.id if cal_id: calendar_id = cal_id default_uom_id = self._get_default_uom_id(cr, uid) avg_hours = uom_obj._compute_qty(cr, uid, phase.product_uom.id, phase.duration, default_uom_id)
if tasks2[i][0][:10]<=current_date:
if tasks2[i][0] and tasks2[i][0][:10]<=current_date:
def compute_burndown(cr, uid, tasks_id, date_start, date_stop): latest = False if len(tasks_id): cr.execute('select id,create_date,state,planned_hours from project_task where id = ANY(%s) order by create_date',(tasks_id,)) tasks = cr.fetchall() cr.execute('select w.date,w.hours from project_task_work w left join project_task t on (t.id=w.task_id) where t.id = ANY(%s) and t.state in (%s,%s) order by date',(tasks_id,'open','progress',)) tasks2 = cr.fetchall() cr.execute('select date_end,planned_hours from project_task where id =ANY(%s) and state in (%s,%s) order by date_end' ,(tasks_id,'cancelled','done',)) tasks2 += cr.fetchall() tasks2.sort() else: tasks = [] tasks2 = [] current_date = date_start total = 0 done = 0 result = [] while current_date<=date_stop: while len(tasks) and tasks[0][1] and tasks[0][1][:10]<=current_date: latest = tasks.pop(0) total += latest[3] i = 0 while i<len(tasks2): if tasks2[i][0][:10]<=current_date: t = tasks2.pop(i) done += t[1] else: i+=1 result.append( (int(time.mktime(time.strptime(current_date,'%Y-%m-%d'))), total-done) ) current_date = (DateTime.strptime(current_date, '%Y-%m-%d') + DateTime.RelativeDateTime(days=1)).strftime('%Y-%m-%d') if not len(tasks) and not len(tasks2): break result.append( (int(time.mktime(time.strptime(date_stop,'%Y-%m-%d'))), 0) ) return result
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.model', context=c),
def compute_inv(self, cr, uid, taxes, price_unit, quantity, address_id=None, product=None, partner=None): """ Compute tax values for given PRICE_UNIT, QUANTITY and a buyer/seller ADDRESS_ID. Price Unit is a VAT included price
raise RuntimeError('media offline')
raise IOError(errno.EREMOTE, 'medium offline') if boo.readonly and mode not in ('r', 'rb'): raise IOError(errno.EPERM, "Readonly medium")
def get_file(self, cr, uid, id, file_node, mode, context=None): """ Return a file-like object for the contents of some node """ if context is None: context = {} boo = self.browse(cr, uid, id, context) if not boo.online: raise RuntimeError('media offline') ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context) if boo.type == 'filestore': if not ira.store_fname: # On a migrated db, some files may have the wrong storage type # try to fix their directory. if mode in ('r','r+'): if ira.file_size: self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) raise IOError(errno.ENOENT, 'No file can be located') else: store_fname = self.__get_random_fname(boo.path) cr.execute('UPDATE ir_attachment SET store_fname = %s WHERE id = %s', (store_fname, ira.id)) fpath = os.path.join(boo.path, store_fname) else: fpath = os.path.join(boo.path, ira.store_fname) return nodefd_file(file_node, path=fpath, mode=mode)
if not boo.online: raise RuntimeError('media offline')
def __get_data_3(self, cr, uid, boo, ira, context): if not boo.online: raise RuntimeError('media offline') if boo.type == 'filestore': if not ira.store_fname: # On a migrated db, some files may have the wrong storage type # try to fix their directory. if ira.file_size: self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) return None fpath = os.path.join(boo.path, ira.store_fname) return file(fpath, 'rb').read() elif boo.type == 'db64': # TODO: we need a better api for large files if ira.db_datas: out = base64.decodestring(ira.db_datas) else: out = '' return out elif boo.type == 'db': # We do an explicit query, to avoid type transformations. cr.execute('SELECT db_datas FROM ir_attachment WHERE id = %s', (ira.id,)) res = cr.fetchone() if res: return res[0] else: return '' elif boo.type == 'realstore': if not ira.store_fname: # On a migrated db, some files may have the wrong storage type # try to fix their directory. if ira.file_size: self._doclog.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id) sfname = ira.name fpath = os.path.join(boo.path,ira.store_fname or ira.name) if os.path.exists(fpath): return file(fpath,'rb').read() elif not ira.store_fname: return None else: raise IOError("File not found: %s" % fpath)
raise IOError("File not found: %s" % fpath)
raise IOError(errno.ENOENT, "File not found: %s" % fpath)
def __get_data_3(self, cr, uid, boo, ira, context): if not boo.online: raise RuntimeError('media offline') if boo.type == 'filestore': if not ira.store_fname: # On a migrated db, some files may have the wrong storage type # try to fix their directory. if ira.file_size: self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) return None fpath = os.path.join(boo.path, ira.store_fname) return file(fpath, 'rb').read() elif boo.type == 'db64': # TODO: we need a better api for large files if ira.db_datas: out = base64.decodestring(ira.db_datas) else: out = '' return out elif boo.type == 'db': # We do an explicit query, to avoid type transformations. cr.execute('SELECT db_datas FROM ir_attachment WHERE id = %s', (ira.id,)) res = cr.fetchone() if res: return res[0] else: return '' elif boo.type == 'realstore': if not ira.store_fname: # On a migrated db, some files may have the wrong storage type # try to fix their directory. if ira.file_size: self._doclog.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id) sfname = ira.name fpath = os.path.join(boo.path,ira.store_fname or ira.name) if os.path.exists(fpath): return file(fpath,'rb').read() elif not ira.store_fname: return None else: raise IOError("File not found: %s" % fpath)
raise RuntimeError('media offline')
raise IOError(errno.EREMOTE, 'medium offline') if boo.readonly: raise IOError(errno.EPERM, "Readonly medium")
def set_data(self, cr, uid, id, file_node, data, context=None, fil_obj=None): """ store the data. This function MUST be used from an ir.attachment. It wouldn't make sense to store things persistently for other types (dynamic). """ if not context: context = {} boo = self.browse(cr, uid, id, context) if fil_obj: ira = fil_obj else: ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
raise RuntimeError('media offline')
raise IOError(errno.EREMOTE, 'medium offline') if storage_bo.readonly: raise IOError(errno.EPERM, "Readonly medium")
def prepare_unlink(self, cr, uid, storage_bo, fil_bo): """ Before we unlink a file (fil_boo), prepare the list of real files that have to be removed, too. """
if not sbro.online: raise IOError(errno.EREMOTE, 'medium offline') if sbro.readonly: raise IOError(errno.EPERM, "Readonly medium")
def simple_rename(self, cr, uid, file_node, new_name, context=None): """ A preparation for a file rename. It will not affect the database, but merely check and perhaps rename the realstore file. @return the dict of values that can safely be be stored in the db. """ sbro = self.browse(cr, uid, file_node.storage_id, context=context) assert sbro, "The file #%d didn't provide storage" % file_node.file_id if sbro.type in ('filestore', 'db', 'db64'): # nothing to do for a rename, allow to change the db field return { 'name': new_name, 'datas_fname': new_name } elif sbro.type == 'realstore': ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
result.append((sur.id, sur.title))
res = True break
def _get_survey(self, cr, uid, context=None): """ Set the value In survey_id field.
result.append((sur.id, sur.title))
res = True break if res: result.append((sur.id, sur.title))
def _get_survey(self, cr, uid, context=None): """ Set the value In survey_id field.
if context is None: context = {} id = self.search(cr, uid, [('user_id', '=', context.get('user_id', uid))], context=context) if id: journal = self.browse(cr, uid, id[0], context=context) if journal.journal_id: return journal.journal_id.id
md = self.pool.get('ir.model.data') try: result = md.get_object_reference(cr, uid, 'hr_timesheet', 'analytic_journal') return result[1] except ValueError, e: pass
def _getAnalyticJournal(self, cr, uid, context=None): if context is None: context = {} id = self.search(cr, uid, [('user_id', '=', context.get('user_id', uid))], context=context) if id: journal = self.browse(cr, uid, id[0], context=context) if journal.journal_id: return journal.journal_id.id return False
if context is None: context = {} id = self.search(cr, uid, [('user_id', '=', context.get('user_id', uid))], context=context) if id: prod = self.browse(cr, uid, id[0], context=context) if prod.product_id: return prod.product_id.id
md = self.pool.get('ir.model.data') try: result = md.get_object_reference(cr, uid, 'hr_timesheet', 'product_consultant') return result[1] except ValueError, e: pass
def _getEmployeeProduct(self, cr, uid, context=None): if context is None: context = {} id = self.search(cr, uid, [('user_id', '=', context.get('user_id', uid))], context=context) if id: prod = self.browse(cr, uid, id[0], context=context) if prod.product_id: return prod.product_id.id return False
fp = open(addons.get_module_resource('survey', 'report') + file_name + '.pdf', 'wb+');
ret_file_name = addons.get_module_resource('survey', 'report') + file_name + '.pdf' fp = open(ret_file_name, 'wb+');
def create_report(self, cr, uid, res_ids, report_name=False, file_name=False, context=None): """ If any user give answer of survey then last create report of this answer and if 'E-mail Notification on Answer' set True in survey then send mail on responsible person of this survey and attach survey answer report in pdf format.
account_move_line_obj.reconcile(cr, uid, torec, 'statement', writeoff_period_id=st.period_id.id, writeoff_journal_id=st.journal_id.id, context=context)
writeoff_acc_id = False for entry in move.reconcile_id.line_new_ids: writeoff_acc_id = entry.account_id.id break account_move_line_obj.reconcile(cr, uid, torec, 'statement', writeoff_acc_id=writeoff_acc_id, writeoff_period_id=st.period_id.id, writeoff_journal_id=st.journal_id.id, context=context)
def button_confirm(self, cr, uid, ids, context={}): done = [] res_currency_obj = self.pool.get('res.currency') res_users_obj = self.pool.get('res.users') account_move_obj = self.pool.get('account.move') account_move_line_obj = self.pool.get('account.move.line') account_bank_statement_line_obj = \ self.pool.get('account.bank.statement.line')