rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
return [(x['id'], tools.ustr(x['name']) + ' (' + tools.ustr(x['symbol']) + ')') for x in reads]
return [(x['id'], tools.ustr(x['name']) + (x['symbol'] and (' (' + tools.ustr(x['symbol']) + ')') or '')) for x in reads]
def name_get(self, cr, uid, ids, context=None): if context is None: context = {}
@param fields: List of fields for which we want default values @param context: A standard dictionary @return: A dictionary which of fields with values. """
@param fields: List of fields for which we want default values @param context: A standard dictionary @return: A dictionary with default values for all field in ``fields`` """
def default_get(self, cr, uid, fields, context): """ To get default values for the object. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param fields: List of fields for which we want default values @param context: A standard dictionary @return: A dictionary which of fields with values. """ res = super(stock_return_picking, self).default_get(cr, uid, fields, context=context) record_id = context and context.get('active_id', False) or False pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id) for m in [line for line in pick.move_lines]: res['return%s'%(m.id)] = m.product_qty if pick.invoice_state=='invoiced': res['invoice_state'] = '2binvoiced' else: res['invoice_state'] = 'none' return res
for m in [line for line in pick.move_lines]: res['return%s'%(m.id)] = m.product_qty if pick.invoice_state=='invoiced': res['invoice_state'] = '2binvoiced' else: res['invoice_state'] = 'none'
if pick: if 'invoice_state' in fields: if pick.invoice_state=='invoiced': res['invoice_state'] = '2binvoiced' else: res['invoice_state'] = 'none' for line in pick.move_lines: return_id = 'return%s'%(line.id) if return_id in fields: res[return_id] = line.product_qty
def default_get(self, cr, uid, fields, context): """ To get default values for the object. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param fields: List of fields for which we want default values @param context: A standard dictionary @return: A dictionary which of fields with values. """ res = super(stock_return_picking, self).default_get(cr, uid, fields, context=context) record_id = context and context.get('active_id', False) or False pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id) for m in [line for line in pick.move_lines]: res['return%s'%(m.id)] = m.product_qty if pick.invoice_state=='invoiced': res['invoice_state'] = '2binvoiced' else: res['invoice_state'] = 'none' return res
if abs(amount) < 10 ** -(int(config['price_accuracy'])):
if abs(amount) < 10 ** -(int(config['price_accuracy'])+1):
def validate(self, cr, uid, ids, context={}): if context and ('__last_update' in context): del context['__last_update'] ok = True for move in self.browse(cr, uid, ids, context): #unlink analytic lines on move_lines for obj_line in move.line_id: for obj in obj_line.analytic_lines: self.pool.get('account.analytic.line').unlink(cr,uid,obj.id)
self.email_send(cr, uid, ids, meeting_id.sprint_id.scrum_master_id.user_email)
res = self.email_send(cr, uid, ids, meeting_id.sprint_id.scrum_master_id.user_email) if not res: raise osv.except_osv(_('Error !'), _(' Email Not send to the scrum master %s!' % meeting_id.sprint_id.scrum_master_id.name))
def button_send_to_master(self, cr, uid, ids, context=None): if context is None: context = {} meeting_id = self.browse(cr, uid, ids)[0] user = self.pool.get('res.users').browse(cr, uid, uid, context=context) if meeting_id and meeting_id.sprint_id.scrum_master_id.user_email: self.email_send(cr, uid, ids, meeting_id.sprint_id.scrum_master_id.user_email) else: raise osv.except_osv(_('Error !'), _('Please provide email address for scrum master defined on sprint.')) return True
self.email_send(cr,uid,ids,meeting_id.sprint_id.product_owner_id.user_email)
res = self.email_send(cr,uid,ids,meeting_id.sprint_id.product_owner_id.user_email) if not res: raise osv.except_osv(_('Error !'), _(' Email Not send to the product owner %s!' % meeting_id.sprint_id.product_owner_id.name))
def button_send_product_owner(self, cr, uid, ids, context=None): if context is None: context = {} context.update({'button_send_product_owner': True}) meeting_id = self.browse(cr, uid, ids)[0] if meeting_id.sprint_id.product_owner_id.user_email: self.email_send(cr,uid,ids,meeting_id.sprint_id.product_owner_id.user_email) else: raise osv.except_osv(_('Error !'), _('Please provide email address for product owner defined on sprint.')) return True
if context.get('button_send_product_owner', False): raise osv.except_osv(_('Error !'), _(' Email Not send to the product owner %s!' % meeting_id.sprint_id.product_owner_id.name)) raise osv.except_osv(_('Error !'), _(' Email Not send to the scrum master %s!' % meeting_id.sprint_id.scrum_master_id.name)) if context.get('button_send_product_owner', False): raise osv.except_osv(_('Information !'), _(' Email send successfully to product owner %s!' % meeting_id.sprint_id.product_owner_id.name)) raise osv.except_osv(_('Information!'), _(' Email send successfully to scrum master %s!'% meeting_id.sprint_id.scrum_master_id.name))
return False
def email_send(self, cr, uid, ids, email, context=None): if context is None: context = {} email_from = tools.config.get('email_from', False) meeting_id = self.browse(cr,uid,ids)[0] user = self.pool.get('res.users').browse(cr, uid, uid, context=context) user_email = email_from or user.address_id.email or email_from body = "Hello " + meeting_id.sprint_id.scrum_master_id.name+",\n" +" \nI am sending you Daily Meeting Details of date %s for the Sprint %s \n" % (meeting_id.date, meeting_id.sprint_id.name) body += '\n*Tasks since yesterday: \n_______________________%s' % (meeting_id.question_yesterday) + '\n*Task for Today :\n_______________________ %s\n' % (meeting_id.question_today )+ '\n*Blocks encountered: \n_______________________ %s' % (meeting_id.question_blocks or 'No Blocks') body += "\n\nThank you,\n"+ user.name sub_name = meeting_id.name or 'Scrum Meeting of %s '%meeting_id.date flag = tools.email_send(user_email , [email], sub_name, body, reply_to=None, openobject_id=str(meeting_id.id)) if not flag: if context.get('button_send_product_owner', False): raise osv.except_osv(_('Error !'), _(' Email Not send to the product owner %s!' % meeting_id.sprint_id.product_owner_id.name)) raise osv.except_osv(_('Error !'), _(' Email Not send to the scrum master %s!' % meeting_id.sprint_id.scrum_master_id.name)) if context.get('button_send_product_owner', False): raise osv.except_osv(_('Information !'), _(' Email send successfully to product owner %s!' % meeting_id.sprint_id.product_owner_id.name)) raise osv.except_osv(_('Information!'), _(' Email send successfully to scrum master %s!'% meeting_id.sprint_id.scrum_master_id.name)) return True
if purchase.invoice_id and purchase.invoice_id.state not in ('draft','cancel'): tot += purchase.invoice_id.amount_untaxed
for invoice in purchase.invoice_ids: if invoice.state not in ('draft','cancel'): tot += invoice.amount_untaxed
def _invoiced_rate(self, cursor, user, ids, name, arg, context=None): res = {} for purchase in self.browse(cursor, user, ids, context=context): tot = 0.0 if purchase.invoice_id and purchase.invoice_id.state not in ('draft','cancel'): tot += purchase.invoice_id.amount_untaxed if purchase.amount_untaxed: res[purchase.id] = tot * 100.0 / purchase.amount_untaxed else: res[purchase.id] = 0.0 return res
if purchase.invoice_id.reconciled: res[purchase.id] = purchase.invoice_id.reconciled else: res[purchase.id] = False
invoiced = [] for invoice in purchase.invoice_ids: invoiced.append(invoice.reconciled) res[purchase.id] = invoiced
def _invoiced(self, cursor, user, ids, name, arg, context=None): res = {} for purchase in self.browse(cursor, user, ids, context=context): if purchase.invoice_id.reconciled: res[purchase.id] = purchase.invoice_id.reconciled else: res[purchase.id] = False return res
'invoice_id': fields.many2one('account.invoice', 'Invoice', readonly=True, help="An invoice generated for a purchase order"),
'invoice_ids': fields.function(_get_invoices, method=True, type='many2many', relation="account.invoice", string="Invoice", help="Invoices generated for a purchase order"),
def _invoiced(self, cursor, user, ids, name, arg, context=None): res = {} for purchase in self.browse(cursor, user, ids, context=context): if purchase.invoice_id.reconciled: res[purchase.id] = purchase.invoice_id.reconciled else: res[purchase.id] = False return res
self.write(cr, uid, [o.id], {'invoice_id': inv_id})
self.write(cr, uid, [o.id], {'invoice_ids': [4, inv_id]})
def action_invoice_create(self, cr, uid, ids, *args): res = False
inv = purchase.invoice_id if inv and inv.state not in ('cancel','draft'): raise osv.except_osv( _('Could not cancel this purchase order !'), _('You must first cancel all invoices attached to this purchase order.')) if inv: wf_service = netsvc.LocalService("workflow") wf_service.trg_validate(uid, 'account.invoice', inv.id, 'invoice_cancel', cr)
for inv in purchase.invoice_ids: if inv and inv.state not in ('cancel','draft'): raise osv.except_osv( _('Could not cancel this purchase order !'), _('You must first cancel all invoices attached to this purchase order.')) if inv: wf_service = netsvc.LocalService("workflow") wf_service.trg_validate(uid, 'account.invoice', inv.id, 'invoice_cancel', cr)
def action_cancel(self, cr, uid, ids, context={}): for purchase in self.browse(cr, uid, ids): for pick in purchase.picking_ids: if pick.state not in ('draft','cancel'): raise osv.except_osv( _('Could not cancel purchase order !'), _('You must first cancel all picking attached to this purchase order.')) for pick in purchase.picking_ids: wf_service = netsvc.LocalService("workflow") wf_service.trg_validate(uid, 'stock.picking', pick.id, 'button_cancel', cr) inv = purchase.invoice_id if inv and inv.state not in ('cancel','draft'): raise osv.except_osv( _('Could not cancel this purchase order !'), _('You must first cancel all invoices attached to this purchase order.')) if inv: wf_service = netsvc.LocalService("workflow") wf_service.trg_validate(uid, 'account.invoice', inv.id, 'invoice_cancel', cr) self.write(cr,uid,ids,{'state':'cancel'}) for (id,name) in self.name_get(cr, uid, ids): message = _("Purchase order '%s' is cancelled.") % name self.log(cr, uid, id, message) return True
'invoice_id':False,
'invoice_ids':{},
def copy(self, cr, uid, id, default=None,context={}): if not default: default = {} default.update({ 'state':'draft', 'shipped':False, 'invoiced':False, 'invoice_id':False, 'picking_ids':[], 'name': self.pool.get('ir.sequence').get(cr, uid, 'purchase.order'), }) return super(purchase_order, self).copy(cr, uid, id, default, context)
'grows': fields.function(_calculate, method=True, store=True, multi='dc', string='Gross Salary', type='float', digits=(16, int(config['price_accuracy']))), 'net': fields.function(_calculate, method=True, store=True, multi='dc', string='Net Salary', digits=(16, int(config['price_accuracy']))), 'allounce': fields.function(_calculate, method=True, store=True, multi='dc', string='Allowance', digits=(16, int(config['price_accuracy']))), 'deduction': fields.function(_calculate, method=True, store=True, multi='dc', string='Deduction', digits=(16, int(config['price_accuracy']))),
'grows': fields.function(_calculate, method=True, store=True, multi='dc', string='Gross Salary', type='float', digits=(16, 4)), 'net': fields.function(_calculate, method=True, store=True, multi='dc', string='Net Salary', digits=(16, 4)), 'allounce': fields.function(_calculate, method=True, store=True, multi='dc', string='Allowance', digits=(16, 4)), 'deduction': fields.function(_calculate, method=True, store=True, multi='dc', string='Deduction', digits=(16, 4)),
def _calculate(self, cr, uid, ids, field_names, arg, context): res = {} allounce = 0.0 deduction = 0.0 net = 0.0 grows = 0.0 for register in self.browse(cr, uid, ids, context): for slip in register.line_ids: allounce += slip.allounce deduction += slip.deduction net += slip.net grows += slip.grows
'leaves': fields.float('Leaved Deduction', readonly=True, digits=(16, 2)), 'basic': fields.float('Basic Salary - Leaves', readonly=True, digits=(16, 2)),
'leaves': fields.float('Leave Deductions', readonly=True, digits=(16, 2)), 'basic': fields.float('Net Basic', readonly=True, digits=(16, 2)),
def _calculate(self, cr, uid, ids, field_names, arg, context): res = {} for rs in self.browse(cr, uid, ids, context): allow = 0.0 deduct = 0.0 others = 0.0
for product_id in data['form']['products'][0][2]: value = sale_line_obj.product_id_change(cr, uid, [], pricelist, product_id, qty=1, partner_id=partner_id, fiscal_position=fpos)['value'] value['product_id'] = product_id value['order_id'] = new_id value['tax_id'] = [(6,0,value['tax_id'])] sale_line_obj.create(cr, uid, value)
if data['form']['products']: for product_id in data['form']['products'][0][2]: value = { 'price_unit': 0.0, 'product_id': product_id, 'order_id': new_id, } value.update( sale_line_obj.product_id_change(cr, uid, [], pricelist,product_id, qty=1, partner_id=partner_id, fiscal_position=fpos)['value'] ) value['tax_id'] = [(6,0,value['tax_id'])] sale_line_obj.create(cr, uid, value)
def _makeOrder(self, cr, uid, data, context): pool = pooler.get_pool(cr.dbname) mod_obj = pool.get('ir.model.data') result = mod_obj._get_id(cr, uid, 'sale', 'view_sales_order_filter') id = mod_obj.read(cr, uid, result, ['res_id']) case_obj = pool.get('crm.opportunity') sale_obj = pool.get('sale.order') partner_obj = pool.get('res.partner') sale_line_obj = pool.get('sale.order.line')
cr, uid = self.get_node_cr(node)
cr = self.get_node_cr(node)
def mkdir(self, node, basename): """Create the specified directory.""" cr = False if not node: raise OSError(1, 'Operation not permited.') cr, uid = self.get_node_cr(node) try: basename =_to_unicode(basename) cdir = node.create_child_collection(cr, basename) self._log.debug("Created child dir: %r", cdir) except Exception,e: self._log.exception('Cannot create dir "%s" at node %s', basename, repr(node)) raise OSError(1, 'Operation not permited.') finally: if cr: cr.close()
res = [] conn = xmlrpclib.ServerProxy(self._uri+ '/xmlrpc/object') for obj in search_list: object_ids = execute ( conn,'execute',self._dbname,int(self._uid),self._pwd,'ir.model','search',[('model','=',obj)]) object_name = execute( conn,'execute',self._dbname,int(self._uid),self._pwd,'ir.model','read',object_ids,['name'])[0]['name'] if obj == "res.partner.address": ids = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'search',['|',('name','ilike',ustr(search_text)),('email','ilike',ustr(search_text))]) recs = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'read',ids,['id','name','street','city']) for rec in recs: name = ustr(rec['name']) if rec['street']: name += ', ' + ustr(rec['street']) if rec['city']: name += ', ' + ustr(rec['city']) res.append((obj,rec['id'],name,object_name)) else: ids = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'search',[('name','ilike',ustr(search_text))]) recs = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'read',ids,['id','name']) for rec in recs: name = ustr(rec['name']) res.append((obj,rec['id'],name,object_name)) return res
res = [] conn = xmlrpclib.ServerProxy(self._uri+ '/xmlrpc/object') for obj in search_list: object_ids = execute ( conn,'execute',self._dbname,int(self._uid),self._pwd,'ir.model','search',[('model','=',obj)]) object_name = execute( conn,'execute',self._dbname,int(self._uid),self._pwd,'ir.model','read',object_ids,['name'])[0]['name'] if obj == "res.partner.address": ids = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'search',['|',('name','ilike',ustr(search_text)),('email','ilike',ustr(search_text))]) recs = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'read',ids,['id','name','street','city']) for rec in recs: name = ustr(rec['name']) if rec['street']: name += ', ' + ustr(rec['street']) if rec['city']: name += ', ' + ustr(rec['city']) res.append((obj,rec['id'],name,object_name)) else: ids = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'search',[('name','ilike',ustr(search_text))]) recs = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'read',ids,['id','name']) for rec in recs: name = ustr(rec['name']) res.append((obj,rec['id'],name,object_name)) return res_id
def GetObjectItems(self, search_list=[], search_text=''): res = [] conn = xmlrpclib.ServerProxy(self._uri+ '/xmlrpc/object') for obj in search_list: object_ids = execute ( conn,'execute',self._dbname,int(self._uid),self._pwd,'ir.model','search',[('model','=',obj)]) object_name = execute( conn,'execute',self._dbname,int(self._uid),self._pwd,'ir.model','read',object_ids,['name'])[0]['name'] if obj == "res.partner.address": ids = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'search',['|',('name','ilike',ustr(search_text)),('email','ilike',ustr(search_text))]) recs = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'read',ids,['id','name','street','city']) for rec in recs: name = ustr(rec['name']) if rec['street']: name += ', ' + ustr(rec['street']) if rec['city']: name += ', ' + ustr(rec['city']) res.append((obj,rec['id'],name,object_name)) else: ids = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'search',[('name','ilike',ustr(search_text))]) recs = execute(conn,'execute',self._dbname,int(self._uid),self._pwd,obj,'read',ids,['id','name']) for rec in recs: name = ustr(rec['name']) res.append((obj,rec['id'],name,object_name)) return res
conn = xmlrpclib.ServerProxy(self._uri+ '/xmlrpc/object') res_vals = [] mail_id = execute( conn, 'execute', self._dbname, int(self._uid), self._pwd, 'mailgate.message', 'search', [('message_id','=',message_id)]) if not mail_id: return None address = execute( conn, 'execute', self._dbname, int(self._uid), self._pwd, 'mailgate.message','read',mail_id[0],['model','res_id']) for key, vals in address.items(): res_vals.append([key,vals]) return res_vals
connector = xmlrpclib.ServerProxy(self._uri+ '/xmlrpc/object') res_vals = [] mail_id = execute( conn, 'execute', self._dbname, int(self._uid), self._pwd, 'mailgate.message', 'search', [('message_id','=',message_id)]) if not mail_id: return None address = execute( conn, 'execute', self._dbname, int(self._uid), self._pwd, 'mailgate.message','read',mail_id[0],['model','res_id']) for key, vals in address.items(): res_vals.append([key,vals]) return res_vals
def SearchEmailResources(self, message_id): conn = xmlrpclib.ServerProxy(self._uri+ '/xmlrpc/object') res_vals = [] mail_id = execute( conn, 'execute', self._dbname, int(self._uid), self._pwd, 'mailgate.message', 'search', [('message_id','=',message_id)]) if not mail_id: return None address = execute( conn, 'execute', self._dbname, int(self._uid), self._pwd, 'mailgate.message','read',mail_id[0],['model','res_id']) for key, vals in address.items(): res_vals.append([key,vals]) return res_vals
'price_subtotal': fields.function(_amount_line, method=True, string='Subtotal'),
'price_subtotal': fields.function(_amount_line, method=True, string='Subtotal', digits=(16, int(config['price_accuracy']))),
def _number_packages(self, cr, uid, ids, field_name, arg, context): res = {} for line in self.browse(cr, uid, ids): try: res[line.id] = int(line.product_uom_qty / line.product_packaging.qty) except: res[line.id] = 1 return res
title = link.renderContents()
title = unicode(link)
def html2plaintext(html, body_id=None, encoding='utf-8'): ## (c) Fry-IT, www.fry-it.com, 2007 ## <peter@fry-it.com> ## download here: http://www.peterbe.com/plog/html2plaintext """ from an HTML text, convert the HTML to plain text. If @body_id is provided then this is the tag where the body (not necessarily <body>) starts. """ try: from BeautifulSoup import BeautifulSoup, SoupStrainer, Comment except: return html urls = [] if body_id is not None: strainer = SoupStrainer(id=body_id) else: strainer = SoupStrainer('body') soup = BeautifulSoup(html, parseOnlyThese=strainer, fromEncoding=encoding) for link in soup.findAll('a'): title = link.renderContents() for url in [x[1] for x in link.attrs if x[0]=='href']: urls.append(dict(url=url, tag=str(link), title=title)) html = unicode(soup) url_index = [] i = 0 for d in urls: if d['title'] == d['url'] or 'http://'+d['title'] == d['url']: html = html.replace(d['tag'], d['url']) else: i += 1 html = html.replace(d['tag'], '%s [%s]' % (d['title'], i)) url_index.append(d['url']) html = html.replace('<strong>', '*').replace('</strong>', '*') html = html.replace('<b>', '*').replace('</b>', '*') html = html.replace('<h3>', '*').replace('</h3>', '*') html = html.replace('<h2>', '**').replace('</h2>', '**') html = html.replace('<h1>', '**').replace('</h1>', '**') html = html.replace('<em>', '/').replace('</em>', '/') # the only line breaks we respect is those of ending tags and # breaks html = html.replace('\n', ' ') html = html.replace('<br>', '\n') html = html.replace('<tr>', '\n') html = html.replace('</p>', '\n\n') html = re.sub('<br\s*/>', '\n', html) html = html.replace(' ' * 2, ' ') # for all other tags we failed to clean up, just remove then and # complain about them on the stderr def desperate_fixer(g): #print >>sys.stderr, "failed to clean up %s" % str(g.group()) return ' ' html = re.sub('<.*?>', desperate_fixer, html) # lstrip all lines html = '\n'.join([x.lstrip() for x in html.splitlines()]) for i, url in enumerate(url_index): if i == 0: html += '\n\n' html += '[%s] %s\n' % (i+1, url) return html
urls.append(dict(url=url, tag=str(link), title=title))
urls.append(dict(url=url, tag=unicode(link), title=title))
def html2plaintext(html, body_id=None, encoding='utf-8'): ## (c) Fry-IT, www.fry-it.com, 2007 ## <peter@fry-it.com> ## download here: http://www.peterbe.com/plog/html2plaintext """ from an HTML text, convert the HTML to plain text. If @body_id is provided then this is the tag where the body (not necessarily <body>) starts. """ try: from BeautifulSoup import BeautifulSoup, SoupStrainer, Comment except: return html urls = [] if body_id is not None: strainer = SoupStrainer(id=body_id) else: strainer = SoupStrainer('body') soup = BeautifulSoup(html, parseOnlyThese=strainer, fromEncoding=encoding) for link in soup.findAll('a'): title = link.renderContents() for url in [x[1] for x in link.attrs if x[0]=='href']: urls.append(dict(url=url, tag=str(link), title=title)) html = unicode(soup) url_index = [] i = 0 for d in urls: if d['title'] == d['url'] or 'http://'+d['title'] == d['url']: html = html.replace(d['tag'], d['url']) else: i += 1 html = html.replace(d['tag'], '%s [%s]' % (d['title'], i)) url_index.append(d['url']) html = html.replace('<strong>', '*').replace('</strong>', '*') html = html.replace('<b>', '*').replace('</b>', '*') html = html.replace('<h3>', '*').replace('</h3>', '*') html = html.replace('<h2>', '**').replace('</h2>', '**') html = html.replace('<h1>', '**').replace('</h1>', '**') html = html.replace('<em>', '/').replace('</em>', '/') # the only line breaks we respect is those of ending tags and # breaks html = html.replace('\n', ' ') html = html.replace('<br>', '\n') html = html.replace('<tr>', '\n') html = html.replace('</p>', '\n\n') html = re.sub('<br\s*/>', '\n', html) html = html.replace(' ' * 2, ' ') # for all other tags we failed to clean up, just remove then and # complain about them on the stderr def desperate_fixer(g): #print >>sys.stderr, "failed to clean up %s" % str(g.group()) return ' ' html = re.sub('<.*?>', desperate_fixer, html) # lstrip all lines html = '\n'.join([x.lstrip() for x in html.splitlines()]) for i, url in enumerate(url_index): if i == 0: html += '\n\n' html += '[%s] %s\n' % (i+1, url) return html
except UnicodeError: pass try: return s.decode('ascii') except UnicodeError: return s
except UnicodeError: pass return s.decode('latin1')
def _to_decode(self, s, charsets): for charset in charsets: if charset: try: return s.decode(charset) except UnicodeError: pass try: return s.decode('ascii') except UnicodeError: return s
obj_payment_order.set_done(cr, uid, context['active_id'], context)
obj_payment_order.set_done(cr, uid, [context['active_id']], context)
def launch_wizard(self, cr, uid, ids, context=None): """ Search for a wizard to launch according to the type. If type is manual. just confirm the order. """ obj_payment_order = self.pool.get('payment.order')
if not journal_id or not journal_id:
if not partner_id or not journal_id:
def onchange_journal_voucher(self, cr, uid, ids, partner_id=False, journal_id=False, context={}): """price Returns a dict that contains new values and context @param partner_id: latest value from user input for field partner_id @param args: other arguments @param context: context arguments, like lang, time zone @return: Returns a dict which contains new values, and context """ default = { 'value':{}, } if not journal_id or not journal_id: return default partner_pool = self.pool.get('res.partner') journal_pool = self.pool.get('account.journal')
'description': body_data, 'history_line': [(0, 0, {'description': body_data, 'email': msg['From']})],
'description': body_data,
def msg_update(self, cr, uid, ids, msg, data={}, default_act='pending'): mailgate_obj = self.pool.get('mail.gateway') msg_actions, body_data = mailgate_obj.msg_act_get(msg) data.update({ 'description': body_data, 'history_line': [(0, 0, {'description': body_data, 'email': msg['From']})], }) act = 'case_'+default_act if 'state' in msg_actions: if msg_actions['state'] in ['draft','close','cancel','open','pending']: act = 'case_' + msg_actions['state'] for k1,k2 in [('cost','planned_cost'),('revenue','planned_revenue'),('probability','probability')]: try: data[k2] = float(msg_actions[k1]) except: pass
res = self.write(cr, uid, ids, data)
res = self.write(cr, uid, ids, data) cases = self.browse(cr, uid, [res]) self.__history(cr, uid, cases, _('Receive'), history=True, email=msg['From'])
def msg_update(self, cr, uid, ids, msg, data={}, default_act='pending'): mailgate_obj = self.pool.get('mail.gateway') msg_actions, body_data = mailgate_obj.msg_act_get(msg) data.update({ 'description': body_data, 'history_line': [(0, 0, {'description': body_data, 'email': msg['From']})], }) act = 'case_'+default_act if 'state' in msg_actions: if msg_actions['state'] in ['draft','close','cancel','open','pending']: act = 'case_' + msg_actions['state'] for k1,k2 in [('cost','planned_cost'),('revenue','planned_revenue'),('probability','probability')]: try: data[k2] = float(msg_actions[k1]) except: pass
cr.execute('SELECT p.id FROM account_fiscalyear AS f \ LEFT JOIN account_period AS p on p.fiscalyear_id=f.id \ WHERE p.id IN \ (SELECT id FROM account_period \ WHERE p.fiscalyear_id = f.id \ AND p.date_start IN \ (SELECT max(date_start) from account_period WHERE p.fiscalyear_id = f.id)\ OR p.date_stop IN \ (SELECT min(date_stop) from account_period WHERE p.fiscalyear_id = f.id)) \ AND f.id = ' + str(fiscalyear_id) + ' order by p.date_start')
cr.execute(''' SELECT * FROM (SELECT p.id FROM account_period p LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id) WHERE f.id = %s ORDER BY p.date_start ASC LIMIT 1) AS period_start UNION SELECT * FROM (SELECT p.id FROM account_period p LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id) WHERE f.id = %s AND p.date_start < NOW() ORDER BY p.date_stop DESC LIMIT 1) AS period_stop''', (fiscalyear_id, fiscalyear_id))
def onchange_filter(self, cr, uid, ids, filter='filter_no', fiscalyear_id=False, context=None): res = {} if filter == 'filter_no': res['value'] = {'period_from': False, 'period_to': False, 'date_from': False ,'date_to': False} if filter == 'filter_date': res['value'] = {'period_from': False, 'period_to': False, 'date_from': time.strftime('%Y-01-01'), 'date_to': time.strftime('%Y-%m-%d')} if filter == 'filter_period' and fiscalyear_id: start_period = end_period = False cr.execute('SELECT p.id FROM account_fiscalyear AS f \ LEFT JOIN account_period AS p on p.fiscalyear_id=f.id \ WHERE p.id IN \ (SELECT id FROM account_period \ WHERE p.fiscalyear_id = f.id \ AND p.date_start IN \ (SELECT max(date_start) from account_period WHERE p.fiscalyear_id = f.id)\ OR p.date_stop IN \ (SELECT min(date_stop) from account_period WHERE p.fiscalyear_id = f.id)) \ AND f.id = ' + str(fiscalyear_id) + ' order by p.date_start') periods = [i[0] for i in cr.fetchall()] if periods: start_period = periods[0] end_period = periods[1] res['value'] = {'period_from': start_period, 'period_to': end_period, 'date_from': False, 'date_to': False} return res
if periods:
if periods and len(periods) > 1:
def onchange_filter(self, cr, uid, ids, filter='filter_no', fiscalyear_id=False, context=None): res = {} if filter == 'filter_no': res['value'] = {'period_from': False, 'period_to': False, 'date_from': False ,'date_to': False} if filter == 'filter_date': res['value'] = {'period_from': False, 'period_to': False, 'date_from': time.strftime('%Y-01-01'), 'date_to': time.strftime('%Y-%m-%d')} if filter == 'filter_period' and fiscalyear_id: start_period = end_period = False cr.execute('SELECT p.id FROM account_fiscalyear AS f \ LEFT JOIN account_period AS p on p.fiscalyear_id=f.id \ WHERE p.id IN \ (SELECT id FROM account_period \ WHERE p.fiscalyear_id = f.id \ AND p.date_start IN \ (SELECT max(date_start) from account_period WHERE p.fiscalyear_id = f.id)\ OR p.date_stop IN \ (SELECT min(date_stop) from account_period WHERE p.fiscalyear_id = f.id)) \ AND f.id = ' + str(fiscalyear_id) + ' order by p.date_start') periods = [i[0] for i in cr.fetchall()] if periods: start_period = periods[0] end_period = periods[1] res['value'] = {'period_from': start_period, 'period_to': end_period, 'date_from': False, 'date_to': False} return res
def onchange_company_id(self, cr, uid, ids, company_id, context=None):
def onchange_company_id(self, cr, uid, ids, company_id=False, context=None):
def onchange_company_id(self, cr, uid, ids, company_id, context=None): res = {} if context is None: context = {} if company_id: company = self.pool.get('res.company').browse(cr, uid, company_id, context=context) res.update({'bank': company.partner_id.bank_ids[0].bank.name}) return { 'value':res }
res.update({'bank': company.partner_id.bank_ids[0].bank.name})
if company.partner_id.bank_ids: res.update({'bank': company.partner_id.bank_ids[0].bank.name})
def onchange_company_id(self, cr, uid, ids, company_id, context=None): res = {} if context is None: context = {} if company_id: company = self.pool.get('res.company').browse(cr, uid, company_id, context=context) res.update({'bank': company.partner_id.bank_ids[0].bank.name}) return { 'value':res }
(ca_obj.campaign_id.fixed_cost / len(wi_ids))
((ca_obj.campaign_id.fixed_cost or 0.00) / len(wi_ids))
def _total_cost(self, cr, uid, ids, field_name, arg, context={}): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of case and section Data’s IDs @param context: A standard dictionary for contextual values """ result = {} for ca_obj in self.browse(cr, uid, ids, context): wi_ids = self.pool.get('marketing.campaign.workitem').search(cr, uid, [('segment_id.campaign_id', '=', ca_obj.campaign_id.id)]) total_cost = ca_obj.activity_id.variable_cost + \ (ca_obj.campaign_id.fixed_cost / len(wi_ids)) result[ca_obj.id] = total_cost return result
if current.child_id: sums[current.id][fn] += sum(sums[child.id][fn] for child in current.child_id)
for child in current.child_id: if child.company_id.currency_id.id == current.company_id.currency_id.id: sums[current.id][fn] += sums[child.id][fn] else: sums[current.id][fn] += currency_obj.compute(cr, uid, child.company_id.currency_id.id, current.company_id.currency_id.id, sums[child.id][fn], context=context)
def __compute(self, cr, uid, ids, field_names, arg=None, context=None, query='', query_params=()): """ compute the balance, debit and/or credit for the provided account ids Arguments: `ids`: account ids `field_names`: the fields to compute (a list of any of 'balance', 'debit' and 'credit') `arg`: unused fields.function stuff `query`: additional query filter (as a string) `query_params`: parameters for the provided query string (__compute will handle their escaping) as a tuple """ mapping = { 'balance': "COALESCE(SUM(l.debit),0) " \ "- COALESCE(SUM(l.credit), 0) as balance", 'debit': "COALESCE(SUM(l.debit), 0) as debit", 'credit': "COALESCE(SUM(l.credit), 0) as credit" } #get all the necessary accounts children_and_consolidated = self._get_children_and_consol(cr, uid, ids, context=context) #compute for each account the balance/debit/credit from the move lines accounts = {} if children_and_consolidated: aml_query = self.pool.get('account.move.line')._query_get(cr, uid, context=context)
query = "l.date >= '%s' AND l.date <= '%s'" (st_date, end_date)
query = "l.date >= '%s' AND l.date <= '%s'" % (st_date, end_date)
def compute_total(self, cr, uid, ids, yr_st_date, yr_end_date, st_date, end_date, field_names, context={}): if not (st_date >= yr_st_date and end_date <= yr_end_date): return {} query = "l.date >= '%s' AND l.date <= '%s'" (st_date, end_date) return self.__compute(cr, uid, ids, field_names, context=context, query=query)
def copy(self, cr, uid, id, default=None,context={}): raise osv.except_osv(_('Warning !'),_('You cannot duplicate the resource!'))
def copy(self, cr, uid, ids, default=None, context={}): vals = {} current_rec = self.read(cr, uid, ids, context=context) title = current_rec.get('title') + ' (Copy)' vals.update({'title':title}) return super(survey, self).copy(cr, uid, ids, vals, context=context)
def survey_cancel(self, cr, uid, ids, arg): self.write(cr, uid, ids, {'state': 'cancel' }) return True
def copy(self, cr, uid, id, default=None, context={}): raise osv.except_osv(_('Warning !'),_('You cannot duplicate the resource!'))
def copy(self, cr, uid, ids, default=None, context={}): vals = {} current_rec = self.read(cr, uid, ids, context=context) title = current_rec.get('title') + ' (Copy)' vals.update({'title':title}) return super(survey_page, self).copy(cr, uid, ids, vals, context=context)
def copy(self, cr, uid, id, default=None, context={}): raise osv.except_osv(_('Warning !'),_('You cannot duplicate the resource!'))
def set_context(self, objects, data, ids, report_type=None): super(pos_invoice, self).set_context(objects, data, ids, report_type)
def set_context(self, order, data, ids, report_type=None): super(pos_invoice, self).set_context(order, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): super(pos_invoice, self).set_context(objects, data, ids, report_type) iids = [] nids = []
for order in objects: order.write({'nb_print': order.nb_print + 1})
def set_context(self, objects, data, ids, report_type=None): super(pos_invoice, self).set_context(objects, data, ids, report_type) iids = [] nids = []
if order.invoice_id and order.invoice_id not in iids: if not order.invoice_id: raise osv.except_osv(_('Error !'), _('Please create an invoice for this sale.')) iids.append(order.invoice_id) nids.append(order.invoice_id.id)
order.write({'nb_print': order.nb_print + 1}) if order.invoice_id and order.invoice_id not in iids: if not order.invoice_id: raise osv.except_osv(_('Error !'), _('Please create an invoice for this sale.')) iids.append(order.invoice_id) nids.append(order.invoice_id.id)
def set_context(self, objects, data, ids, report_type=None): super(pos_invoice, self).set_context(objects, data, ids, report_type) iids = [] nids = []
'company_id' : fields.many2one('res.company', 'Company', required=True),
'company_id' : fields.many2one('res.company', 'Company', required=False),
def interval_get(self, cr, uid, id, dt_from, hours, resource=False, byday=True): resource_cal_leaves = self.pool.get('resource.calendar.leaves') dt_leave = [] if not id: return [(dt_from,dt_from + mx.DateTime.RelativeDateTime(hours=int(hours)*3))] resource_leave_ids = resource_cal_leaves.search(cr, uid, [('calendar_id','=',id), '|', ('resource_id','=',False), ('resource_id','=',resource)]) res_leaves = resource_cal_leaves.read(cr, uid, resource_leave_ids, ['date_from', 'date_to']) for leave in res_leaves: dtf = mx.DateTime.strptime(leave['date_from'], '%Y-%m-%d %H:%M:%S') dtt = mx.DateTime.strptime(leave['date_to'], '%Y-%m-%d %H:%M:%S') no = dtt - dtf [dt_leave.append((dtf + mx.DateTime.RelativeDateTime(days=x)).strftime('%Y-%m-%d')) for x in range(int(no.days + 1))] dt_leave.sort() todo = hours cycle = 0 result = [] maxrecur = 100 current_hour = dt_from.hour while (todo>0) and maxrecur: cr.execute("select hour_from,hour_to from resource_calendar_week where dayofweek='%s' and calendar_id=%s order by hour_from", (dt_from.day_of_week,id)) for (hour_from,hour_to) in cr.fetchall(): leave_flag = False if (hour_to>current_hour) and (todo>0): m = max(hour_from, current_hour) if (hour_to-m)>todo: hour_to = m+todo dt_check = dt_from.strftime('%Y-%m-%d') for leave in dt_leave: if dt_check == leave: dt_check = mx.DateTime.strptime(dt_check, "%Y-%m-%d") + mx.DateTime.RelativeDateTime(days=1) leave_flag = True if leave_flag: break else: d1 = mx.DateTime.DateTime(dt_from.year, dt_from.month, dt_from.day, int(math.floor(m)), int((m%1) * 60)) d2 = mx.DateTime.DateTime(dt_from.year, dt_from.month, dt_from.day, int(math.floor(hour_to)), int((hour_to%1) * 60)) result.append((d1, d2)) current_hour = hour_to todo -= (hour_to - m) dt_from += mx.DateTime.RelativeDateTime(days=1) current_hour = 0 maxrecur -= 1 return result
if basic_alarm and meeting.state in ('open'):
if basic_alarm:
def do_alarm_create(self, cr, uid, ids, context={}): alarm_obj = self.pool.get('calendar.alarm') model_obj = self.pool.get('ir.model') model_id = model_obj.search(cr, uid, [('model','=',self._name)])[0] for meeting in self.browse(cr, uid, ids): self.do_alarm_unlink(cr, uid, [meeting.id]) basic_alarm = meeting.alarm_id if basic_alarm and meeting.state in ('open'): vals = { 'action': 'display', 'description': meeting.description, 'name': meeting.name, 'attendee_ids': [(6,0, map(lambda x:x.id, meeting.attendee_ids))], 'trigger_related': basic_alarm.trigger_related, 'trigger_duration': basic_alarm.trigger_duration, 'trigger_occurs': basic_alarm.trigger_occurs, 'trigger_interval': basic_alarm.trigger_interval, 'duration': basic_alarm.duration, 'repeat': basic_alarm.repeat, 'state' : 'run', 'event_date' : meeting.date, 'res_id' : meeting.id, 'model_id' : model_id, 'user_id' : uid } alarm_id = alarm_obj.create(cr, uid, vals) cr.execute('Update crm_meeting set caldav_alarm_id=%s where id=%s' % (alarm_id, meeting.id)) cr.commit() return True
val['alarm_id'] = self.browse(cr, uid, is_exists).caldav_alarm_id.alarm_id.id
cal_alarm = self.browse(cr, uid, is_exists).caldav_alarm_id val['alarm_id'] = cal_alarm.alarm_id and cal_alarm.alarm_id.id or False
def import_cal(self, cr, uid, data, context={}): file_content = base64.decodestring(data) event_obj = self.pool.get('basic.calendar.event') event_obj.__attribute__.update(self.__attribute__)
if caldav_alarm_id: alarm_id = self.browse(cr, uid, is_exists).caldav_alarm_id.alarm_id self.write(cr, uid, case_id, {'alarm_id': alarm_id})
if val['caldav_alarm_id']: cal_alarm = self.browse(cr, uid, case_id).caldav_alarm_id alarm_id = cal_alarm.alarm_id and cal_alarm.alarm_id.id or False self.write(cr, uid, [case_id], {'alarm_id': alarm_id})
def import_cal(self, cr, uid, data, context={}): file_content = base64.decodestring(data) event_obj = self.pool.get('basic.calendar.event') event_obj.__attribute__.update(self.__attribute__)
id = isinstance(domain[2], list) and int(domain[2][0]) or int(domain[2]) if id: if self.pool.get('project.project').read(cr, user, id, ['state'])['state'] == 'template': args.append(['active', '=', False])
id = isinstance(domain[2], list) and domain[2][0] or domain[2] if id and isinstance(id, (long, int)): if obj_project.read(cr, user, id, ['state'])['state'] == 'template': args.append(('active', '=', False))
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False): for domain in args: if domain[0] == 'project_id' and (not isinstance(domain[2], str)): id = isinstance(domain[2], list) and int(domain[2][0]) or int(domain[2]) if id: if self.pool.get('project.project').read(cr, user, id, ['state'])['state'] == 'template': args.append(['active', '=', False]) return super(task, self).search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
pick_name=self.pool.get('ir.sequence').get(cr, uid, 'stock.picking.out')
def create_picking(self, cr, uid, ids, context={}): """Create a picking for each order and validate it.""" picking_obj = self.pool.get('stock.picking')
'name': fields.char('Last Name', size=30, required=True), 'first_name': fields.char('First Name', size=30), 'mobile': fields.char('Mobile', size=30),
'name': fields.char('Last Name', size=64, required=True), 'first_name': fields.char('First Name', size=64), 'mobile': fields.char('Mobile', size=64),
def _main_job(self, cr, uid, ids, fields, arg, context=None): """ @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of partner contact’s IDs @fields: Get Fields @param context: A standard dictionary for contextual values @param arg: list of tuples of form [(‘name_of_the_field’, ‘operator’, value), ...]. """
'function': fields.char('Partner Function', size=34, help="Function of this contact with this partner"),
'function': fields.char('Partner Function', size=64, help="Function of this contact with this partner"),
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False): """ search parnter job @param self: The object pointer @param cr: the current row, from the database cursor, @param user: the current user @param args: list of tuples of form [(‘name_of_the_field’, ‘operator’, value), ...]. @param offset: The Number of Results to Pass @param limit: The Number of Results to Return @param context: A standard dictionary for contextual values """
price_type.field, context=context)[prod_id], round=False, context=context)
price_type.field)[prod_id], round=False, context=context)
def price_get(self, cr, uid, ids, prod_id, qty, partner=None, context=None): ''' context = { 'uom': Unit of Measure (int), 'partner': Partner ID (int), 'date': Date of the pricelist (%Y-%m-%d), } ''' context = context or {} currency_obj = self.pool.get('res.currency') product_obj = self.pool.get('product.product') supplierinfo_obj = self.pool.get('product.supplierinfo') price_type_obj = self.pool.get('product.price.type')
self.log.debug('get_objects() model_list: %s', ','.join(model_list))
self.log.debug('get_objects() model_list: %s', ','.join(map(str, model_list)))
def get_objects(self, cr, uid, module): # This function returns all object of the given module.. pool = pooler.get_pool(cr.dbname) ids2 = pool.get('ir.model.data').search(cr, uid, [('module', '=', module), ('model', '=', 'ir.model')]) model_list = [] model_data = pool.get('ir.model.data').browse(cr, uid, ids2) for model in model_data: model_list.append(model.res_id) self.log.debug('get_objects() model_list: %s', ','.join(model_list)) obj_list = [] for mod in pool.get('ir.model').browse(cr, uid, model_list): obj_list.append(str(mod.model)) self.log.debug('get_objects() obj_list: %s', ','.join(obj_list)) return obj_list
obj = self.pool.get(cal_children[child.name.lower()]) if hasattr(obj, 'check_import'): obj.check_import(cr, uid, vals, context=context) else:
objs.append(cal_children[child.name.lower()]) for obj_name in list(set(objs)): obj = self.pool.get(obj_name) if hasattr(obj, 'check_import'): obj.check_import(cr, uid, vals, context=context) checked = True if not checked:
def import_cal(self, cr, uid, content, data_id=None, context=None): """ Import Calendar @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param data_id: Get Data’s ID or False @param context: A standard dictionary for contextual values """
progress = dict(map(lambda x: (x[0], (x[1], x[2], x[3])), cr.fetchall()))
progress = dict(map(lambda x: (x[0], (x[1] or 0.0, x[2] or 0.0, x[3] or 0.0)), cr.fetchall()))
def _get_all_child_projects(ids): """Recursively get child project ids""" child_ids = flatten([project_hierarchy.get(idn, []) for idn in ids]) if child_ids: child_ids = _get_all_child_projects(child_ids) return ids + child_ids
'planned_hours': fields.function(_progress_rate, multi="progress", method=True, string='Planned Time', help="Sum of planned hours of all tasks related to this project and its child projects.", store=True), 'effective_hours': fields.function(_progress_rate, multi="progress", method=True, string='Time Spent', help="Sum of spent hours of all tasks related to this project and its child projects.", store=True), 'total_hours': fields.function(_progress_rate, multi="progress", method=True, string='Total Time', help="Sum of total hours of all tasks related to this project and its child projects.", store=True), 'progress_rate': fields.function(_progress_rate, multi="progress", method=True, string='Progress', type='float', group_operator="avg", help="Percent of tasks closed according to the total of tasks todo.", store=True),
'planned_hours': fields.function(_progress_rate, multi="progress", method=True, string='Planned Time', help="Sum of planned hours of all tasks related to this project and its child projects.", store = { 'project.project': (lambda self, cr, uid, ids, c={}: ids, ['tasks'], 10), 'project.task': (_get_project_task, ['planned_hours', 'effective_hours', 'remaining_hours', 'total_hours', 'progress', 'delay_hours'], 10), 'project.task.work': (_get_project_work, ['hours'], 10), }), 'effective_hours': fields.function(_progress_rate, multi="progress", method=True, string='Time Spent', help="Sum of spent hours of all tasks related to this project and its child projects.", store = { 'project.project': (lambda self, cr, uid, ids, c={}: ids, ['tasks'], 10), 'project.task': (_get_project_task, ['planned_hours', 'effective_hours', 'remaining_hours', 'total_hours', 'progress', 'delay_hours'], 10), 'project.task.work': (_get_project_work, ['hours'], 10), }), 'total_hours': fields.function(_progress_rate, multi="progress", method=True, string='Total Time', help="Sum of total hours of all tasks related to this project and its child projects.", store = { 'project.project': (lambda self, cr, uid, ids, c={}: ids, ['tasks'], 10), 'project.task': (_get_project_task, ['planned_hours', 'effective_hours', 'remaining_hours', 'total_hours', 'progress', 'delay_hours'], 10), 'project.task.work': (_get_project_work, ['hours'], 10), }), 'progress_rate': fields.function(_progress_rate, multi="progress", method=True, string='Progress', type='float', group_operator="avg", help="Percent of tasks closed according to the total of tasks todo.", store = { 'project.project': (lambda self, cr, uid, ids, c={}: ids, ['tasks'], 10), 'project.task': (_get_project_task, ['planned_hours', 'effective_hours', 'remaining_hours', 'total_hours', 'progress', 'delay_hours'], 10), 'project.task.work': (_get_project_work, ['hours'], 10), }),
def unlink(self, cr, uid, ids, *args, **kwargs): for proj in self.browse(cr, uid, ids): if proj.tasks: raise osv.except_osv(_('Operation Not Permitted !'), _('You can not delete a project with tasks. I suggest you to deactivate it.')) return super(project, self).unlink(cr, uid, ids, *args, **kwargs)
'effective_hours': fields.function(_hours_get, method=True, string='Hours Spent', multi='hours', store=True, help="Computed using the sum of the task work done."),
'effective_hours': fields.function(_hours_get, method=True, string='Hours Spent', multi='hours', help="Computed using the sum of the task work done.", store = { 'project.task': (lambda self, cr, uid, ids, c={}: ids, ['work_ids'], 10), 'project.task.work': (_get_task, ['hours'], 10), }),
def _is_template(self, cr, uid, ids, field_name, arg, context=None): res = {} for task in self.browse(cr, uid, ids, context=context): res[task.id] = True if task.project_id: if task.project_id.active == False or task.project_id.state == 'template': res[task.id] = False return res
'total_hours': fields.function(_hours_get, method=True, string='Total Hours', multi='hours', store=True, help="Computed as: Time Spent + Remaining Time."), 'progress': fields.function(_hours_get, method=True, string='Progress (%)', multi='hours', group_operator="avg", store=True, help="Computed as: Time Spent / Total Time."), 'delay_hours': fields.function(_hours_get, method=True, string='Delay Hours', multi='hours', store=True, help="Computed as difference of the time estimated by the project manager and the real time to close the task."),
'total_hours': fields.function(_hours_get, method=True, string='Total Hours', multi='hours', help="Computed as: Time Spent + Remaining Time.", store = { 'project.task': (lambda self, cr, uid, ids, c={}: ids, ['work_ids'], 10), 'project.task.work': (_get_task, ['hours'], 10), }), 'progress': fields.function(_hours_get, method=True, string='Progress (%)', multi='hours', group_operator="avg", help="Computed as: Time Spent / Total Time.", store = { 'project.task': (lambda self, cr, uid, ids, c={}: ids, ['work_ids'], 10), 'project.task.work': (_get_task, ['hours'], 10), }), 'delay_hours': fields.function(_hours_get, method=True, string='Delay Hours', multi='hours', help="Computed as difference of the time estimated by the project manager and the real time to close the task.", store = { 'project.task': (lambda self, cr, uid, ids, c={}: ids, ['work_ids'], 10), 'project.task.work': (_get_task, ['hours'], 10), }),
def _is_template(self, cr, uid, ids, field_name, arg, context=None): res = {} for task in self.browse(cr, uid, ids, context=context): res[task.id] = True if task.project_id: if task.project_id.active == False or task.project_id.state == 'template': res[task.id] = False return res
dt_start = datetime.datetime.strptime(leaves[i]['date_from'], '%Y-%m-%d %H:%M:%S') dt_end = datetime.datetime.strptime(leaves[i]['date_to'], '%Y-%m-%d %H:%M:%S')
dt_start = datetime.strptime(leaves[i]['date_from'], '%Y-%m-%d %H:%M:%S') dt_end = datetime.strptime(leaves[i]['date_to'], '%Y-%m-%d %H:%M:%S')
def compute_vacation(self, cr, uid, calendar_id, resource_id=False, resource_calendar=False, context=None): """ Compute the vacation from the working calendar of the resource. @param calendar_id : working calendar of the project @param resource_id : resource working on phase/task @param resource_calendar : working calendar of the resource """ if context is None: context = {} resource_calendar_leaves_pool = self.pool.get('resource.calendar.leaves') leave_list = [] if resource_id: leave_ids = resource_calendar_leaves_pool.search(cr, uid, ['|', ('calendar_id', '=', calendar_id), ('calendar_id', '=', resource_calendar), ('resource_id', '=', resource_id) ], context=context) else: leave_ids = resource_calendar_leaves_pool.search(cr, uid, [('calendar_id', '=', calendar_id), ('resource_id', '=', False) ], context=context) leaves = resource_calendar_leaves_pool.read(cr, uid, leave_ids, ['date_from', 'date_to'], context=context) for i in range(len(leaves)): dt_start = datetime.datetime.strptime(leaves[i]['date_from'], '%Y-%m-%d %H:%M:%S') dt_end = datetime.datetime.strptime(leaves[i]['date_to'], '%Y-%m-%d %H:%M:%S') no = dt_end - dt_start [leave_list.append((dt_start + datetime.timedelta(days=x)).strftime('%Y-%m-%d')) for x in range(int(no.days + 1))] leave_list.sort() return leave_list
[leave_list.append((dt_start + datetime.timedelta(days=x)).strftime('%Y-%m-%d')) for x in range(int(no.days + 1))]
[leave_list.append((dt_start + timedelta(days=x)).strftime('%Y-%m-%d')) for x in range(int(no.days + 1))]
def compute_vacation(self, cr, uid, calendar_id, resource_id=False, resource_calendar=False, context=None): """ Compute the vacation from the working calendar of the resource. @param calendar_id : working calendar of the project @param resource_id : resource working on phase/task @param resource_calendar : working calendar of the resource """ if context is None: context = {} resource_calendar_leaves_pool = self.pool.get('resource.calendar.leaves') leave_list = [] if resource_id: leave_ids = resource_calendar_leaves_pool.search(cr, uid, ['|', ('calendar_id', '=', calendar_id), ('calendar_id', '=', resource_calendar), ('resource_id', '=', resource_id) ], context=context) else: leave_ids = resource_calendar_leaves_pool.search(cr, uid, [('calendar_id', '=', calendar_id), ('resource_id', '=', False) ], context=context) leaves = resource_calendar_leaves_pool.read(cr, uid, leave_ids, ['date_from', 'date_to'], context=context) for i in range(len(leaves)): dt_start = datetime.datetime.strptime(leaves[i]['date_from'], '%Y-%m-%d %H:%M:%S') dt_end = datetime.datetime.strptime(leaves[i]['date_to'], '%Y-%m-%d %H:%M:%S') no = dt_end - dt_start [leave_list.append((dt_start + datetime.timedelta(days=x)).strftime('%Y-%m-%d')) for x in range(int(no.days + 1))] leave_list.sort() return leave_list
return super(account_invoice,self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
res = super(account_invoice,self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu) for field in res['fields']: type = context.get('journal_type', 'sale') if field == 'journal_id': journal_select = self.pool.get('account.journal')._name_search(cr, uid, '', [('type', '=', type)], context=context, limit=None, name_get_uid=1) res['fields'][field]['selection'] = journal_select return res
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False): if context.get('active_model','') in ['res.partner']: partner = self.pool.get(context['active_model']).read(cr,uid,context['active_ids'],['supplier','customer'])[0] if not view_type: view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name','=','account.invoice.tree')])[0] view_type = 'tree' if view_type == 'form': if partner['supplier'] and not partner['customer']: view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name','=','account.invoice.supplier.form')])[0] else: view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name','=','account.invoice.form')])[0] return super(account_invoice,self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
def read_group(self, cr, uid, domain, *args, **kwargs): todel=[]
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
def read_group(self, cr, uid, domain, *args, **kwargs): todel=[] fiscalyear_obj = self.pool.get('account.fiscalyear') period_obj = self.pool.get('account.period') for arg in domain: if arg[0] == 'period_id' and arg[2] == 'current_period': current_period = period_obj.find(cr, uid)[0] domain.append(['period_id','in',[current_period]]) todel.append(arg) break elif arg[0] == 'period_id' and arg[2] == 'current_year': current_year = fiscalyear_obj.find(cr, uid) ids = fiscalyear_obj.read(cr, uid, [current_year], ['period_ids'])[0]['period_ids'] domain.append(['period_id','in',ids]) todel.append(arg) for a in [['period_id','in','current_year'], ['period_id','in','current_period']]: if a in domain: domain.remove(a) return super(account_entries_report, self).read_group(cr, uid, domain, *args, **kwargs)
for arg in domain: if arg[0] == 'period_id' and arg[2] == 'current_period': current_period = period_obj.find(cr, uid)[0] domain.append(['period_id','in',[current_period]]) todel.append(arg) break elif arg[0] == 'period_id' and arg[2] == 'current_year': current_year = fiscalyear_obj.find(cr, uid) ids = fiscalyear_obj.read(cr, uid, [current_year], ['period_ids'])[0]['period_ids'] domain.append(['period_id','in',ids]) todel.append(arg) for a in [['period_id','in','current_year'], ['period_id','in','current_period']]: if a in domain: domain.remove(a) return super(account_entries_report, self).read_group(cr, uid, domain, *args, **kwargs)
if context.get('period', False) == 'current_period': current_period = period_obj.find(cr, uid)[0] domain.append(['period_id','in',[current_period]]) elif context.get('year', False) == 'current_year': current_year = fiscalyear_obj.find(cr, uid) ids = fiscalyear_obj.read(cr, uid, [current_year], ['period_ids'])[0]['period_ids'] domain.append(['period_id','in',ids]) else: domain = domain return super(account_entries_report, self).read_group(cr, uid, domain, fields, groupby, offset, limit, context, orderby)
def read_group(self, cr, uid, domain, *args, **kwargs): todel=[] fiscalyear_obj = self.pool.get('account.fiscalyear') period_obj = self.pool.get('account.period') for arg in domain: if arg[0] == 'period_id' and arg[2] == 'current_period': current_period = period_obj.find(cr, uid)[0] domain.append(['period_id','in',[current_period]]) todel.append(arg) break elif arg[0] == 'period_id' and arg[2] == 'current_year': current_year = fiscalyear_obj.find(cr, uid) ids = fiscalyear_obj.read(cr, uid, [current_year], ['period_ids'])[0]['period_ids'] domain.append(['period_id','in',ids]) todel.append(arg) for a in [['period_id','in','current_year'], ['period_id','in','current_period']]: if a in domain: domain.remove(a) return super(account_entries_report, self).read_group(cr, uid, domain, *args, **kwargs)
view_id = self.pool.get('ir.ui.view').search(cr, uid, [('name', '=', 'account.invoice.tree')])[0]
view_id = self.pool.get('ir.ui.view').search(cr, uid, [('name', '=', 'account.invoice.tree')])
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False): journal_obj = self.pool.get('account.journal') if context is None: context = {} if context.get('active_model', '') in ['res.partner'] and context.get('active_ids', False) and context['active_ids']: partner = self.pool.get(context['active_model']).read(cr, uid, context['active_ids'], ['supplier','customer'])[0] if not view_type: view_id = self.pool.get('ir.ui.view').search(cr, uid, [('name', '=', 'account.invoice.tree')])[0] view_type = 'tree' if view_type == 'form': if partner['supplier'] and not partner['customer']: view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.supplier.form')])[0] else: view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.form')])[0] res = super(account_invoice,self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu) type = context.get('journal_type', 'sale') for field in res['fields']: if field == 'journal_id': journal_select = journal_obj._name_search(cr, uid, '', [('type', '=', type)], context=context, limit=None, name_get_uid=1) res['fields'][field]['selection'] = journal_select
view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.supplier.form')])[0]
view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.supplier.form')])
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False): journal_obj = self.pool.get('account.journal') if context is None: context = {} if context.get('active_model', '') in ['res.partner'] and context.get('active_ids', False) and context['active_ids']: partner = self.pool.get(context['active_model']).read(cr, uid, context['active_ids'], ['supplier','customer'])[0] if not view_type: view_id = self.pool.get('ir.ui.view').search(cr, uid, [('name', '=', 'account.invoice.tree')])[0] view_type = 'tree' if view_type == 'form': if partner['supplier'] and not partner['customer']: view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.supplier.form')])[0] else: view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.form')])[0] res = super(account_invoice,self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu) type = context.get('journal_type', 'sale') for field in res['fields']: if field == 'journal_id': journal_select = journal_obj._name_search(cr, uid, '', [('type', '=', type)], context=context, limit=None, name_get_uid=1) res['fields'][field]['selection'] = journal_select
view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.form')])[0]
view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.form')]) if view_id and isinstance(view_id, (list, tuple)): view_id = view_id[0]
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False): journal_obj = self.pool.get('account.journal') if context is None: context = {} if context.get('active_model', '') in ['res.partner'] and context.get('active_ids', False) and context['active_ids']: partner = self.pool.get(context['active_model']).read(cr, uid, context['active_ids'], ['supplier','customer'])[0] if not view_type: view_id = self.pool.get('ir.ui.view').search(cr, uid, [('name', '=', 'account.invoice.tree')])[0] view_type = 'tree' if view_type == 'form': if partner['supplier'] and not partner['customer']: view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.supplier.form')])[0] else: view_id = self.pool.get('ir.ui.view').search(cr,uid,[('name', '=', 'account.invoice.form')])[0] res = super(account_invoice,self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu) type = context.get('journal_type', 'sale') for field in res['fields']: if field == 'journal_id': journal_select = journal_obj._name_search(cr, uid, '', [('type', '=', type)], context=context, limit=None, name_get_uid=1) res['fields'][field]['selection'] = journal_select
if not context:
if context is None:
def write(self, cr, uid, ids, vals, context=None, check=True, update_check=True): if not context: context={} if vals.get('account_tax_id', False): raise osv.except_osv(_('Unable to change tax !'), _('You can not change the tax, you should remove and recreate lines !'))
cr.execute('select id, state, name from account_move where journal_id=%s and period_id=%s order by id limit 1', (context['journal_id'],context['period_id'])) res = cr.fetchone()
res = self._check_moves(cr, uid, context)
def create(self, cr, uid, vals, context=None, check=True): if not context: context={} account_obj = self.pool.get('account.account') tax_obj=self.pool.get('account.tax') if ('account_id' in vals) and not account_obj.read(cr, uid, vals['account_id'], ['active'])['active']: raise osv.except_osv(_('Bad account!'), _('You can not use an inactive account!')) if 'journal_id' in vals and 'journal_id' not in context: context['journal_id'] = vals['journal_id'] if 'period_id' in vals and 'period_id' not in context: context['period_id'] = vals['period_id'] if ('journal_id' not in context) and ('move_id' in vals) and vals['move_id']: m = self.pool.get('account.move').browse(cr, uid, vals['move_id']) context['journal_id'] = m.journal_id.id context['period_id'] = m.period_id.id
if res[1] != 'draft': raise osv.except_osv(_('UserError'), _('The Ledger Posting (%s) for centralisation ' \ 'has been confirmed!') % res[2])
def create(self, cr, uid, vals, context=None, check=True): if not context: context={} account_obj = self.pool.get('account.account') tax_obj=self.pool.get('account.tax') if ('account_id' in vals) and not account_obj.read(cr, uid, vals['account_id'], ['active'])['active']: raise osv.except_osv(_('Bad account!'), _('You can not use an inactive account!')) if 'journal_id' in vals and 'journal_id' not in context: context['journal_id'] = vals['journal_id'] if 'period_id' in vals and 'period_id' not in context: context['period_id'] = vals['period_id'] if ('journal_id' not in context) and ('move_id' in vals) and vals['move_id']: m = self.pool.get('account.move').browse(cr, uid, vals['move_id']) context['journal_id'] = m.journal_id.id context['period_id'] = m.period_id.id
journal_list = journal_pool.name_search(cr, uid, '', [], context=context)
type_search = { 'bank':[('type','in',['bank','cash'])], 'cash':[('type','in',['bank','cash'])], 'sale':[('type','in',['sale','purchase_refund'])], 'purchase':[('type','in',['purchase','sale_refund'])], 'expense':[('type','in',['purchase'])], 'sale_refund':[('type','in',['sale','purchase_refund'])], 'purchase_refund':[('type','in',['purchase','sale_refund'])] } domain = type_search.get(context.get('journal_type')) journal_list = journal_pool.name_search(cr, uid, '', domain)
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): """ Returns views and fields for current model where view will depend on {view_type}. @param view_id: list of fields, which required to read signatures @param view_type: defines a view type. it can be one of (form, tree, graph, calender, gantt, search, mdx) @param context: context arguments, like lang, time zone @param toolbar: contains a list of reports, wizards, and links related to current model @return: Returns a dict that contains definition for fields, views, and toolbars """ data_pool = self.pool.get('ir.model.data') journal_pool = self.pool.get('account.journal') voucher_type = { 'sale':'view_sale_receipt_form', 'purchase':'view_purchase_receipt_form', 'payment':'view_vendor_payment_form', 'receipt':'view_vendor_receipt_form' } if view_type == 'form': tview = voucher_type.get(context.get('type')) tview = tview or 'view_voucher_form' result = data_pool._get_id(cr, uid, 'account_voucher', tview) view_id = data_pool.browse(cr, uid, result, context=context).res_id res = super(account_voucher, self).fields_view_get(cr, uid, view_id, view_type, context, toolbar, submenu) #Restrict the list of journal view in search view if view_type == 'search': journal_list = journal_pool.name_search(cr, uid, '', [], context=context) res['fields']['journal_id']['selection'] = journal_list return res
price_type.field)[prod_id], round=False)
price_type.field, context=context)[prod_id], round=False, context=context)
def price_get(self, cr, uid, ids, prod_id, qty, partner=None, context=None): ''' context = { 'uom': Unit of Measure (int), 'partner': Partner ID (int), 'date': Date of the pricelist (%Y-%m-%d), } ''' context = context or {} currency_obj = self.pool.get('res.currency') product_obj = self.pool.get('product.product') supplierinfo_obj = self.pool.get('product.supplierinfo') price_type_obj = self.pool.get('product.price.type')
'bank_account': fields.char('Bank Account', size=64), 'partner_id': fields.related('company_id', 'partner_id', type='many2one', relation='res.partner', readonly=True),
def job_open(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'open'}) return True
res = self.onchange_chart_id(cr, uid, chart_id, context=context)
res = self.onchange_chart_id(cr, uid, [], chart_id, context=context)
def _get_def_reserve_account(self, cr, uid, context=None): chart_id = self._get_account(cr, uid, context=context) # Reuse the onchange function, for symmetry res = self.onchange_chart_id(cr, uid, chart_id, context=context) if not res: return False return res['value']['reserve_account_id']
def onchange_chart_id(self, cr, uid, chart_id, context=None):
def onchange_chart_id(self, cr, uid, ids, chart_id, context=None):
def onchange_chart_id(self, cr, uid, chart_id, context=None): if not chart_id: return False account = self.pool.get('account.account').browse(cr, uid, chart_id , context=context) if not account.company_id.property_reserve_and_surplus_account: return False # We cannot raise an exception, because that's before the wizard return { 'value': {'reserve_account_id': account.company_id.property_reserve_and_surplus_account.id}}
return False
return {}
def onchange_chart_id(self, cr, uid, chart_id, context=None): if not chart_id: return False account = self.pool.get('account.account').browse(cr, uid, chart_id , context=context) if not account.company_id.property_reserve_and_surplus_account: return False # We cannot raise an exception, because that's before the wizard return { 'value': {'reserve_account_id': account.company_id.property_reserve_and_surplus_account.id}}
return False
return { 'value': {'reserve_account_id': False}}
def onchange_chart_id(self, cr, uid, chart_id, context=None): if not chart_id: return False account = self.pool.get('account.account').browse(cr, uid, chart_id , context=context) if not account.company_id.property_reserve_and_surplus_account: return False # We cannot raise an exception, because that's before the wizard return { 'value': {'reserve_account_id': account.company_id.property_reserve_and_surplus_account.id}}
progress = len(update_docs)/len(proc_change.process_document_ids)
if proc_change.process_document_ids: progress = len(update_docs)/len(proc_change.process_document_ids)
def _get_progress(self, cr, uid, ids, field_name, arg, context={}): result = {} update_docs = [] for proc_change in self.browse(cr, uid, ids): for doc in proc_change.process_document_ids: if doc.state in ('to_update', 'change_propose'): update_docs.append(doc) progress = len(update_docs)/len(proc_change.process_document_ids) result[proc_change.id] = progress return result
phase_type_obj = self.pool.get('document.change.process.phase.type') document_type_obj = self.pool.get('document.change.type')
def generate_phases(self, cr, uid, ids, *args): phase_obj = self.pool.get('document.change.process.phase') phase_type_obj = self.pool.get('document.change.process.phase.type') document_type_obj = self.pool.get('document.change.type') directory_obj = self.pool.get('document.directory') document_obj = self.pool.get('ir.attachment') new_doc_ids = [] for process in self.browse(cr, uid, ids): if process.process_model_id: directory_ids = directory_obj.search(cr, uid, [('parent_id','child_of',process.structure_id and process.structure_id.id)]) for phase_type_id in process.process_model_id.phase_type_ids: phase_value = { 'name' : '%s-%s' %(phase_type_id.name, process.name), 'phase_type_id': phase_type_id.id, 'process_id': process.id } phase_id = phase_obj.create(cr, uid, phase_value) cr.execute('select document_type_id from document_type_phase_type_rel where phase_type_id = %s' % phase_type_id.id) document_type_ids = map(lambda x: x[0], cr.fetchall()) document_ids = document_obj.search(cr, uid, [ ('parent_id','in',directory_ids), ('change_type_id','in',document_type_ids)]) for document_id in document_ids: vals = {'process_phase_id': phase_id} if process.pending_directory_id: vals.update({'parent_id':process.pending_directory_id.id}) new_doc_ids.append(document_obj.copy(cr, uid, document_id, vals)) phase_obj.write(cr, uid, [phase_id], {'phase_document_ids': [(6,0,document_ids)]}) self.write(cr, uid, [process.id],{'process_document_ids': [(6,0,new_doc_ids)]})
event.add_callback(self.on_playback_start, 'playback_player_start') event.add_callback(self.on_playback_end, 'playback_player_end')
event.add_callback(self.on_playback_start, 'playback_track_start') event.add_callback(self.on_playback_end, 'playback_track_end')
def _bind_events(self): event.add_callback(self.on_playback_start, 'playback_player_start') event.add_callback(self.on_playback_end, 'playback_player_end') event.add_callback(self.on_playback_toggle_pause, 'playback_toggle_pause')
@dbus.service.method(dbus.PROPERTIES_IFACE, out_signature='a{sv}')
def Rate(self): pass
return True
track = self.exaile.player.current playlist = self.exaile.queue.current_playlist return not ((len(playlist)-1) == playlist.index(track))
def CanGoNext(self): return True
return True
return not (playlist.index(track) == 0)
def CanGoPrevious(self): return True
return True
return not self.exaile.player.is_playing()
def CanPlay(self): return True
return True
return self.exaile.player.is_playing()
def CanPause(self): return True
props['CanPause'] = self.CanPause() props['CanPlay'] = self.CanPlay()
def on_playback_start(self, evt, exaile, data): props = {} props['PlaybackStatus'] = self.PlaybackStatus() props['Metadata'] = self.Metadata() props['CanGoNext'] = self.CanGoNext() props['CanGoPrevious'] = self.CanGoPrevious() props['CanPause'] = self.CanPause() props['CanPlay'] = self.CanPlay() self.PropertiesChanged(ORG_MPRIS_MEDIAPLAYER2_PLAYER, props, [])
props['Metadata'] = self.Metadata() props['CanPause'] = self.CanPause() props['CanPlay'] = self.CanPlay()
def on_playback_end(self, evt, exaile, data): props = {} props['Metadata'] = self.Metadata() props['CanPause'] = self.CanPause() props['CanPlay'] = self.CanPlay() props['PlaybackStatus'] = self.PlaybackStatus() self.PropertiesChanged(ORG_MPRIS_MEDIAPLAYER2_PLAYER, props, [])
props['CanPause'] = self.CanPause() props['CanPlay'] = self.CanPlay()
def on_playback_toggle_pause(self, evt, exaile, data): props = {} props['PlaybackStatus'] = self.PlaybackStatus() props['CanPause'] = self.CanPause() props['CanPlay'] = self.CanPlay() self.PropertiesChanged(ORG_MPRIS_MEDIAPLAYER2_PLAYER, props, [])
meta['mpris:length'] = dbus.types.Int64(int(track.get_tag_raw('__length'))*1000)
meta['mpris:length'] = dbus.types.Int64(int(track.get_tag_raw('__length') or 0)*1000)
def _get_metadata(self, track): ## mpris2.0 meta map, defined at http://xmms2.org/wiki/MPRIS_Metadata meta = {} meta['xesam:title'] = unicode(track.get_tag_raw('title')[0]) meta['xesam:album'] = unicode(track.get_tag_raw('album')[0]) meta['xesam:artist'] = dbus.types.Array([unicode(track.get_tag_raw('artist')[0])], signature='s') meta['mpris:length'] = dbus.types.Int64(int(track.get_tag_raw('__length'))*1000)
def load(self): stream = file('config', 'rU') d = yaml.load(stream) for key in d.keys(): setattr(self, key, d[key])
def load(self): try: stream = file('config', 'rU') except: pass else: d = yaml.load(stream) for key in d.keys(): setattr(self, key, d[key])
def load(self): stream = file('config', 'rU') d = yaml.load(stream) for key in d.keys(): setattr(self, key, d[key])
setattr(self.loadobj, field, self.fields[field].get())
fld = self.fields[field].get() try: fld = eval(fld) except: pass setattr(self.loadobj, field, fld)
def saveFile(self, *args): print os.path.join(self.config.path, self.loaded) for field in self.fields: if hasattr(self.loadobj, field): setattr(self.loadobj, field, self.fields[field].get()) fileObj = open(os.path.join(self.config.path, self.loaded),"w") fileObj.write(self.loadobj.dump()) fileObj.close()
elif y > 1.0 - prob_fix(p, N, t):
elif y > 1.0 - prob_fix_leg(leg_r, N, t):
def sample_freq_CDF(p, N, t): """ Takes an allele frequency p, a population size N, and a time period t. Samples from the CDF derived from Kimura to get a new allele frequency. N.B.: The current version fails sometimes (on some N, t pairs), presumably due to errors in freq_CDF_leg. These need to be fixed. """ import scipy.optimize #, random y = random.random() leg_r = legendre(1.0-2*p) extinction = prob_fix(1.0-p, N, t) # probability of allele extinction if y < extinction: return 0.0 # sample an extinction event elif y > 1.0 - prob_fix(p, N, t): return 1.0 # sample a fixation event else: def f(T): return freq_CDF_legs_noends(leg_r, legendre(1.0-2*T), N, t) \ - y + extinction # trims extinction probability, assures brentq works return scipy.optimize.brentq(f, 0.0, 1.0, disp=False)
def prob_fix(p, n, t, k=100, esp=0.000001):
def prob_fix(p, n, t, k=50, esp=0.000001):
def prob_fix(p, n, t, k=100, esp=0.000001): """Probability of fixation""" r = 1 - 2*p leg = legendre(r) prob = p for i in xrange(1, k+1): term = (.5 * (-1)**i * (leg(i-1) - leg(i+1)) * exp(-t * i * (i+1) / (4 * n))) if term != 0.0 and abs(term) < esp: return prob + term prob += term return prob
tree2.writeNewick(filename)
tree2.write(filename)
def write_in_tree(filename, tree, labels): tree2 = tree.copy() rename_tree_with_ids(tree2, labels) for node in tree2.nodes.values(): node.dist = 0 tree2.writeNewick(filename)
proposal = self.proposer.next_proposal()
def recon(self, nsearch=1000): """Perform reconciliation""" self.init_search() for i in xrange(nsearch): print "search", i proposal = self.proposer.next_proposal() p = self.eval_proposal(proposal) self.eval_search(p, proposal) # rename locus tree nodes rename_nodes(self.maxrecon["locus_tree"], self.name_internal) return self.maxp, self.maxrecon
return dl_prob + d_prob + util.safelog(prob / nsamples, -util.INF)
return dl_prob + d_prob + util.safelog(prob / nsamples)
def prob_dlcoal_recon_topology(coal_tree, coal_recon, locus_tree, locus_recon, locus_events, daughters, stree, n, duprate, lossrate, pretime=None, premean=None, maxdoom=20, nsamples=100, add_spec=True): """ Probability of a reconcile gene tree in the DLCoal model. coal_tree -- coalescent tree coal_recon -- reconciliation of coalescent tree to locus tree locus_tree -- locus tree (has dup-loss) locus_recon -- reconciliation of locus tree to species tree locus_events -- events dict for locus tree stree -- species tree n -- population sizes in species tree duprate -- duplication rate lossrate -- loss rate You must also specify one of the following pretime -- starting time before species tree premean -- mean starting time before species tree Note: locus tree must have implied speciation nodes present """ dups = phylo.count_dup(locus_tree, locus_events) # ensure implicit speciations are present if add_spec: phylo.add_implied_spec_nodes(locus_tree, stree, locus_recon, locus_events) # init popsizes for locus tree stree_popsizes = coal.init_popsizes(stree, n) popsizes = {} for node in locus_tree: popsizes[node.name] = stree_popsizes[locus_recon[node].name] # duploss probability util.tic("top") dl_prob = spidir.calc_birth_death_prior(locus_tree, stree, locus_recon, duprate, lossrate, maxdoom=maxdoom) util.toc() # daughters probability d_prob = dups * log(.5) # integrate over duplication times using sampling prob = 0.0 #util.tic("int") for i in xrange(nsamples): # sample duplication times locus_times = spidir.topology_prior.sample_dup_times( locus_tree, stree, locus_recon, duprate, lossrate, pretime, premean, events=locus_events) assert len(locus_times) == len(locus_tree.nodes), ( len(locus_times), len(locus_tree.nodes)) birthdeath.set_dists_from_timestamps(locus_tree, locus_times) # coal topology probability coal_prob = prob_coal_recon_topology(coal_tree, coal_recon, locus_tree, popsizes, daughters) prob += exp(coal_prob) print coal_prob #util.toc() return dl_prob + d_prob + util.safelog(prob / nsamples, -util.INF)
popsizes[snode.name]), -util.INF)
popsizes[snode.name]))
def prob_coal_recon_topology(tree, recon, locus_tree, n, daughters): """ Returns the log probability of a reconciled gene tree ('tree', 'recon') from the coalescent model given a locus_tree 'locus_tree', population sizes 'n', and daughters set 'daughters' """ # init population sizes popsizes = coal.init_popsizes(locus_tree, n) # log probability lnp = 0.0 nodes = set(tree.postorder()) # init reverse reconciliation rev_recon = {} for node, snode in recon.iteritems(): if node not in nodes: raise Exception("node '%s' not in tree" % node.name) rev_recon.setdefault(snode, []).append(node) # init lineage counts lineages = {} for snode in locus_tree: if snode.is_leaf(): lineages[snode] = len([x for x in rev_recon[snode] if x.is_leaf()]) else: lineages[snode] = 0 # iterate through species tree branches for snode in locus_tree.postorder(): if snode.parent: # non root branch u = lineages[snode] # subtract number of coals in branch v = u - len([x for x in rev_recon.get(snode, []) if not x.is_leaf()]) lineages[snode.parent] += v if snode not in daughters: try: lnp += util.safelog( coal.prob_coal_counts(u, v, snode.dist, popsizes[snode.name]), -util.INF) except: print u, v, snode.dist, popsizes[snode.name] raise else: assert v == 1 lnp -= util.safelog(coal.num_labeled_histories(u, v), -util.INF) else: # normal coalesent u = lineages[snode] lnp -= util.safelog(coal.num_labeled_histories(u, 1), -util.INF) # correct for topologies H(T) # find connected subtrees that are in the same species branch subtrees = [] subtree_root = {} for node in tree.preorder(): if node.parent and recon[node] == recon[node.parent]: subtree_root[node] = subtree_root[node.parent] else: subtrees.append(node) subtree_root[node] = node # find leaves through recursion def walk(node, subtree, leaves): if node.is_leaf(): leaves.append(node) elif (subtree_root[node.children[0]] != subtree and subtree_root[node.children[1]] != subtree): leaves.append(node) else: for child in node.children: walk(child, subtree, leaves) # apply correction for each subtree for subtree in subtrees: leaves = [] for child in subtree.children: walk(subtree, subtree, leaves) if len(leaves) > 2: lnp += util.safelog( birthdeath.num_topology_histories(subtree, leaves), -util.INF) return lnp
lnp -= util.safelog(coal.num_labeled_histories(u, v), -util.INF)
lnp -= util.safelog(coal.num_labeled_histories(u, v))
def prob_coal_recon_topology(tree, recon, locus_tree, n, daughters): """ Returns the log probability of a reconciled gene tree ('tree', 'recon') from the coalescent model given a locus_tree 'locus_tree', population sizes 'n', and daughters set 'daughters' """ # init population sizes popsizes = coal.init_popsizes(locus_tree, n) # log probability lnp = 0.0 nodes = set(tree.postorder()) # init reverse reconciliation rev_recon = {} for node, snode in recon.iteritems(): if node not in nodes: raise Exception("node '%s' not in tree" % node.name) rev_recon.setdefault(snode, []).append(node) # init lineage counts lineages = {} for snode in locus_tree: if snode.is_leaf(): lineages[snode] = len([x for x in rev_recon[snode] if x.is_leaf()]) else: lineages[snode] = 0 # iterate through species tree branches for snode in locus_tree.postorder(): if snode.parent: # non root branch u = lineages[snode] # subtract number of coals in branch v = u - len([x for x in rev_recon.get(snode, []) if not x.is_leaf()]) lineages[snode.parent] += v if snode not in daughters: try: lnp += util.safelog( coal.prob_coal_counts(u, v, snode.dist, popsizes[snode.name]), -util.INF) except: print u, v, snode.dist, popsizes[snode.name] raise else: assert v == 1 lnp -= util.safelog(coal.num_labeled_histories(u, v), -util.INF) else: # normal coalesent u = lineages[snode] lnp -= util.safelog(coal.num_labeled_histories(u, 1), -util.INF) # correct for topologies H(T) # find connected subtrees that are in the same species branch subtrees = [] subtree_root = {} for node in tree.preorder(): if node.parent and recon[node] == recon[node.parent]: subtree_root[node] = subtree_root[node.parent] else: subtrees.append(node) subtree_root[node] = node # find leaves through recursion def walk(node, subtree, leaves): if node.is_leaf(): leaves.append(node) elif (subtree_root[node.children[0]] != subtree and subtree_root[node.children[1]] != subtree): leaves.append(node) else: for child in node.children: walk(child, subtree, leaves) # apply correction for each subtree for subtree in subtrees: leaves = [] for child in subtree.children: walk(subtree, subtree, leaves) if len(leaves) > 2: lnp += util.safelog( birthdeath.num_topology_histories(subtree, leaves), -util.INF) return lnp
lnp -= util.safelog(coal.num_labeled_histories(u, 1), -util.INF)
lnp -= util.safelog(coal.num_labeled_histories(u, 1))
def prob_coal_recon_topology(tree, recon, locus_tree, n, daughters): """ Returns the log probability of a reconciled gene tree ('tree', 'recon') from the coalescent model given a locus_tree 'locus_tree', population sizes 'n', and daughters set 'daughters' """ # init population sizes popsizes = coal.init_popsizes(locus_tree, n) # log probability lnp = 0.0 nodes = set(tree.postorder()) # init reverse reconciliation rev_recon = {} for node, snode in recon.iteritems(): if node not in nodes: raise Exception("node '%s' not in tree" % node.name) rev_recon.setdefault(snode, []).append(node) # init lineage counts lineages = {} for snode in locus_tree: if snode.is_leaf(): lineages[snode] = len([x for x in rev_recon[snode] if x.is_leaf()]) else: lineages[snode] = 0 # iterate through species tree branches for snode in locus_tree.postorder(): if snode.parent: # non root branch u = lineages[snode] # subtract number of coals in branch v = u - len([x for x in rev_recon.get(snode, []) if not x.is_leaf()]) lineages[snode.parent] += v if snode not in daughters: try: lnp += util.safelog( coal.prob_coal_counts(u, v, snode.dist, popsizes[snode.name]), -util.INF) except: print u, v, snode.dist, popsizes[snode.name] raise else: assert v == 1 lnp -= util.safelog(coal.num_labeled_histories(u, v), -util.INF) else: # normal coalesent u = lineages[snode] lnp -= util.safelog(coal.num_labeled_histories(u, 1), -util.INF) # correct for topologies H(T) # find connected subtrees that are in the same species branch subtrees = [] subtree_root = {} for node in tree.preorder(): if node.parent and recon[node] == recon[node.parent]: subtree_root[node] = subtree_root[node.parent] else: subtrees.append(node) subtree_root[node] = node # find leaves through recursion def walk(node, subtree, leaves): if node.is_leaf(): leaves.append(node) elif (subtree_root[node.children[0]] != subtree and subtree_root[node.children[1]] != subtree): leaves.append(node) else: for child in node.children: walk(child, subtree, leaves) # apply correction for each subtree for subtree in subtrees: leaves = [] for child in subtree.children: walk(subtree, subtree, leaves) if len(leaves) > 2: lnp += util.safelog( birthdeath.num_topology_histories(subtree, leaves), -util.INF) return lnp
birthdeath.num_topology_histories(subtree, leaves), -util.INF)
birthdeath.num_topology_histories(subtree, leaves))
def walk(node, subtree, leaves): if node.is_leaf(): leaves.append(node) elif (subtree_root[node.children[0]] != subtree and subtree_root[node.children[1]] != subtree): leaves.append(node) else: for child in node.children: walk(child, subtree, leaves)
rep.append(' %s'%v)
rep.append(' %s'%i)
def __str__(self): rep = ["""
return None
raise Warning, "input to __patchFrameTypeDef__ included a \ gps time argument specified as None\n" return frametype
def __patchFrameTypeDef__(frametype=None,ifo=None,gpstime=None): """ Temporary patch function, to adjust specfied frame type used in searching the filesystem for files to display in followup. """ if frametype == None: return None if gpstime == None: return None if ifo == None: return None endOfS5=int(875232014) new=None if int(gpstime)<=endOfS5: if not frametype.lower().startswith(ifo.lower()): orig=frametype new=ifo+"_"+frametype return new
return None
raise Warning, "input to __patchFrameTypeDef__ included an \ ifo argument specified as None\n" return frametype
def __patchFrameTypeDef__(frametype=None,ifo=None,gpstime=None): """ Temporary patch function, to adjust specfied frame type used in searching the filesystem for files to display in followup. """ if frametype == None: return None if gpstime == None: return None if ifo == None: return None endOfS5=int(875232014) new=None if int(gpstime)<=endOfS5: if not frametype.lower().startswith(ifo.lower()): orig=frametype new=ifo+"_"+frametype return new
new=None
def __patchFrameTypeDef__(frametype=None,ifo=None,gpstime=None): """ Temporary patch function, to adjust specfied frame type used in searching the filesystem for files to display in followup. """ if frametype == None: return None if gpstime == None: return None if ifo == None: return None endOfS5=int(875232014) new=None if int(gpstime)<=endOfS5: if not frametype.lower().startswith(ifo.lower()): orig=frametype new=ifo+"_"+frametype return new
orig=frametype new=ifo+"_"+frametype return new
return ifo+"_"+frametype return frametype
def __patchFrameTypeDef__(frametype=None,ifo=None,gpstime=None): """ Temporary patch function, to adjust specfied frame type used in searching the filesystem for files to display in followup. """ if frametype == None: return None if gpstime == None: return None if ifo == None: return None endOfS5=int(875232014) new=None if int(gpstime)<=endOfS5: if not frametype.lower().startswith(ifo.lower()): orig=frametype new=ifo+"_"+frametype return new