rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
val = property_obj.browse(cr, uid,prop_ids[0]).value
val = property_obj.browse(cr, uid,prop_ids[0]).value_reference
def close_action(self, cr, uid, ids, context):
stock_dest_id = int(val.split(',')[1])
stock_dest_id = val.id
def close_action(self, cr, uid, ids, context):
if not (line.account_id.parent_left > parent_left and line.account_id.parent_left < parent_right):
if not (line.account_id.parent_left >= parent_left and line.account_id.parent_left <= parent_right):
def _get_chart_account(cursor, user, account): if account.parent_id: chart_account = _get_chart_account(cursor, user, account.parent_id) else: chart_account = account return chart_account
'date_start': fields.datetime('Start Date', help="Starting Date of the phase"), 'date_end': fields.datetime('End Date', help="Ending Date of the phase"),
'date_start': fields.date('Start Date', help="Starting Date of the phase"), 'date_end': fields.date('End Date', help="Ending Date of the phase"),
def _get_default_uom_id(self, cr, uid): model_data_obj = self.pool.get('ir.model.data') model_data_id = model_data_obj._get_id(cr, uid, 'product', 'uom_hour') return model_data_obj.read(cr, uid, [model_data_id], ['res_id'])[0]['res_id']
'product_uom': lambda self,cr,uid,c: self.pool.get('product.uom').search(cr, uid, [('name', '=', 'day')], context=c)[0]
'product_uom': lambda self,cr,uid,c: self.pool.get('product.uom').search(cr, uid, [('name', '=', _('Day'))], context=c)[0]
def _get_default_uom_id(self, cr, uid): model_data_obj = self.pool.get('ir.model.data') model_data_id = model_data_obj._get_id(cr, uid, 'product', 'uom_hour') return model_data_obj.read(cr, uid, [model_data_id], ['res_id'])[0]['res_id']
next_date += relativedelta(day=line.days2)
next_date += relativedelta(day=31)
def compute(self, cr, uid, id, value, date_ref=False, context={}): if not date_ref: date_ref = datetime.now().strftime('%Y-%m-%d') pt = self.browse(cr, uid, id, context) amount = value result = [] for line in pt.line_ids: prec = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account') if line.value == 'fixed': amt = round(line.value_amount, prec) elif line.value == 'procent': amt = round(value * line.value_amount, prec) elif line.value == 'balance': amt = round(amount, prec) if amt: next_date = datetime.strptime(date_ref, '%Y-%m-%d') + relativedelta(days=line.days) if line.days2 < 0: next_date += relativedelta(day=line.days2) if line.days2 > 0: next_date += relativedelta(day=line.days2, months=1) result.append( (next_date.strftime('%Y-%m-%d'), amt) ) amount -= amt return result
pick = pick_obj.browse(cr, uid, record_id)
pick = pick_obj.browse(cr, uid, record_id, context=context)
def default_get(self, cr, uid, fields, context): """ To get default values for the object. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param fields: List of fields for which we want default values @param context: A standard dictionary @return: A dictionary which of fields with values. """ res = super(stock_split_move_line, self).default_get(cr, uid, fields, context=context) record_id = context and context.get('active_id', False) or False pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id) for m in [line for line in pick.move_lines]: res['move%s'%(m.id)] = m.product_qty return res
pick = pick_obj.browse(cr, uid, record_id)
pick = pick_obj.browse(cr, uid, record_id, context=context)
def view_init(self, cr, uid, fields_list, context=None): """ Creates view dynamically and adding fields at runtime. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return: New arch of view with new columns. """ res = super(stock_split_move_line, self).view_init(cr, uid, fields_list, context=context) record_id = context and context.get('active_id', False) or False if record_id: pick_obj = self.pool.get('stock.picking') try: pick = pick_obj.browse(cr, uid, record_id) for m in [line for line in pick.move_lines]: if 'move%s' % m.id not in self._columns: self._columns['move%s' % m.id] = fields.float(string=m.product_id.name) except: return res return res
pick = pick_obj.browse(cr, uid, record_id)
pick = pick_obj.browse(cr, uid, record_id, context=context)
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): """ Changes the view dynamically @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return: New arch of view. """ res = super(stock_split_move_line, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar,submenu=False) record_id = context and context.get('active_id', False) or False assert record_id,'Active ID not found' pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id) arch_lst = ['<?xml version="1.0"?>', '<form string="Split lines">', '<label string="Indicate here the quantity of the new line. A quantity of zero will not split the line." colspan="4"/>'] for m in [line for line in pick.move_lines]: quantity = m.product_qty arch_lst.append('<field name="move%s" />\n<newline />' % (m.id,)) res['fields']['move%s' % m.id] = {'string' : m.product_id.name, 'type' : 'float', 'required' : True} arch_lst.append('<group col="2" colspan="4">') arch_lst.append('<button icon="gtk-cancel" special="cancel" string="Cancel" />') arch_lst.append('<button name="split_lines" string="Split" colspan="1" type="object" icon="gtk-apply" />') arch_lst.append('</group>') arch_lst.append('</form>') res['arch'] = '\n'.join(arch_lst) return res
pick = pick_obj.browse(cr, uid, record_id)
pick = pick_obj.browse(cr, uid, record_id, context=context)
def split_lines(self, cr, uid, ids, context): """ Splits moves in quantity given in the wizard. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of ids selected @param context: A standard dictionary @return: A dictionary which of fields with values. """ move_obj = self.pool.get('stock.move') record_id = context and context.get('active_id', False) or False pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id) data = self.read(cr, uid, ids[0]) move_ids = [m.id for m in [line for line in pick.move_lines]] for move in move_obj.browse(cr, uid, move_ids): quantity = data['move%s' % move.id] if 0 < quantity < move.product_qty: new_qty = move.product_qty - quantity new_uos_qty = new_qty / move.product_qty * move.product_uos_qty new_obj = move_obj.copy(cr, uid, move.id, {'product_qty' : new_qty, 'product_uos_qty': new_uos_qty, 'state':move.state}) uos_qty = quantity / move.product_qty * move.product_uos_qty move_obj.write(cr, uid, [move.id], {'product_qty' : quantity, 'product_uos_qty': uos_qty}) return {}
for move in move_obj.browse(cr, uid, move_ids):
for move in move_obj.browse(cr, uid, move_ids, context=context):
def split_lines(self, cr, uid, ids, context): """ Splits moves in quantity given in the wizard. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of ids selected @param context: A standard dictionary @return: A dictionary which of fields with values. """ move_obj = self.pool.get('stock.move') record_id = context and context.get('active_id', False) or False pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id) data = self.read(cr, uid, ids[0]) move_ids = [m.id for m in [line for line in pick.move_lines]] for move in move_obj.browse(cr, uid, move_ids): quantity = data['move%s' % move.id] if 0 < quantity < move.product_qty: new_qty = move.product_qty - quantity new_uos_qty = new_qty / move.product_qty * move.product_uos_qty new_obj = move_obj.copy(cr, uid, move.id, {'product_qty' : new_qty, 'product_uos_qty': new_uos_qty, 'state':move.state}) uos_qty = quantity / move.product_qty * move.product_uos_qty move_obj.write(cr, uid, [move.id], {'product_qty' : quantity, 'product_uos_qty': uos_qty}) return {}
result['res_id'] = created_inv
invoice_domain = eval(result['domain']) invoice_domain.append(('id', '=', created_inv)) result['domain'] = invoice_domain
def compute_refund(self, cr, uid, ids, mode='refund', context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: the account invoice refund’s ID or list of IDs
limit = datetime.strptime(move.date_expected, '%Y-%m-%d %H:%M:%S') + relativedelta(months=product.warranty)
limit = datetime.strptime(move.date_expected, '%Y-%m-%d %H:%M:%S') + relativedelta(months=int(product.warranty))
def onchange_move_id(self, cr, uid, ids, prod_id=False, move_id=False): """ On change of move id sets values of guarantee limit, source location, destination location, partner and partner address. @param prod_id: Id of product in current record. @param move_id: Changed move. @return: Dictionary of values. """ data = {} data['value'] = {} if not prod_id: return data if move_id: move = self.pool.get('stock.move').browse(cr, uid, move_id) product = self.pool.get('product.product').browse(cr, uid, prod_id) limit = datetime.strptime(move.date_expected, '%Y-%m-%d %H:%M:%S') + relativedelta(months=product.warranty) data['value']['guarantee_limit'] = limit.strftime('%Y-%m-%d') data['value']['location_id'] = move.location_dest_id.id data['value']['location_dest_id'] = move.location_dest_id.id if move.address_id: data['value']['partner_id'] = move.address_id.partner_id and move.address_id.partner_id.id else: data['value']['partner_id'] = False data['value']['address_id'] = move.address_id and move.address_id.id d = self.onchange_partner_id(cr, uid, ids, data['value']['partner_id'], data['value']['address_id']) data['value'].update(d['value']) return data
exval = map(lambda x: str(x), cal_data.value)
exdates += cal_data.value exval = map(lambda x: x.strftime('%Y%m%dT%H%M%SZ'), exdates)
def parse_ics(self, cr, uid, child, cal_children=None, context=None): """ parse calendaring and scheduling information @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """
elif field == 'vtimezone' and data[map_field] and data[map_field] not in timezones:
elif field == 'vtimezone' and data[map_field]:
def create_ics(self, cr, uid, datas, name, ical, context=None): """ create calendaring and scheduling information @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """
tz_obj = self.pool.get('basic.calendar.timezone') ical = tz_obj.export_cal(cr, uid, None, \ data[map_field], ical, context=context) timezones.append(data[map_field])
if tzval not in timezones: tz_obj = self.pool.get('basic.calendar.timezone') ical = tz_obj.export_cal(cr, uid, None, \ data[map_field], ical, context=context) timezones.append(data[map_field])
def create_ics(self, cr, uid, datas, name, ical, context=None): """ create calendaring and scheduling information @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """
dtfield.value = parser.parse(data[map_field])
def create_ics(self, cr, uid, datas, name, ical, context=None): """ create calendaring and scheduling information @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """
self.cr.execute('SELECT SUM(debit) FROM account_move_line ' 'WHERE period_id=%s AND journal_id IN %s '
self.cr.execute('SELECT SUM(debit) FROM account_move_line l ' 'WHERE period_id=%s AND journal_id IN %s ' + self.query_get_clause + ' ' \
def _sum_debit_period(self, period_id, journal_id=False):
self.cr.execute('SELECT SUM(credit) FROM account_move_line ' 'WHERE period_id=%s AND journal_id IN %s '
self.cr.execute('SELECT SUM(credit) FROM account_move_line l ' 'WHERE period_id=%s AND journal_id IN %s '+ self.query_get_clause + ' ' \
def _sum_credit_period(self, period_id, journal_id=None): if journal_id: journals = [journal_id] else: journals = self.journal_ids if not journals: return 0.0 self.cr.execute('SELECT SUM(credit) FROM account_move_line ' 'WHERE period_id=%s AND journal_id IN %s ' 'AND state<>\'draft\'', (period_id, tuple(journals))) return self.cr.fetchone()[0] or 0.0
if not context.get('alarm_id', False): self.do_alarm_unlink(cr, uid, [data.id], model) return True
def do_alarm_create(self, cr, uid, ids, model, date, context=None): """ Create Alarm for event. @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of res alarm’s IDs. @param model: Model name. @param date: Event date @param context: A standard dictionary for contextual values @return: True """ if not context: context = {} alarm_obj = self.pool.get('calendar.alarm') res_alarm_obj = self.pool.get('res.alarm') ir_obj = self.pool.get('ir.model') model_id = ir_obj.search(cr, uid, [('model', '=', model)])[0]
datas['end_date'] = ''.join((re.compile('\d')).findall(datas.get('end_date'))) + '235959Z'
datas['end_date'] = ''.join((re.compile('\d')).findall(datas.get('end_date'))) + 'T235959Z'
def compute_rule_string(self, cr, uid, datas, context=None, *args): """ Compute rule string according to value type RECUR of iCalendar from the values given. @param self: the object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param datas: dictionary of freq and interval value. @param context: A standard dictionary for contextual values @return: String value of the format RECUR of iCalendar """
ls = base_calendar_id2real_id(base_calendar_id, with_date=res.get('duration', 0))
ls = base_calendar_id2real_id(base_calendar_id, with_date=res and res.get('duration', 0) or 0)
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'): """ Overrides orm Read method.Read List of fields for calendar event. @param cr: the current row, from the database cursor, @param user: the current user’s ID for security checks, @param ids: List of calendar event's id. @param fields: List of fields. @param context: A standard dictionary for contextual values @return: List of Dictionary of form [{‘name_of_the_field’: value, ...}, ...] """ if not context: context = {}
for id in ids: ls = base_calendar_id2real_id(id) if not isinstance(ls, (str, int, long)) and len(ls) >= 2: date_new = ls[1] for record in self.read(cr, uid, [base_calendar_id2real_id(id)], \ ['date', 'rrule', 'exdate']): if record['rrule']: exdate = (record['exdate'] and (record['exdate'] + ',') or '') + ''.join((re.compile('\d')).findall(date_new)) + 'Z' if record['date'] == date_new: res = self.write(cr, uid, [base_calendar_id2real_id(id)], {'exdate': exdate}) else: ids = map(lambda x: base_calendar_id2real_id(x), ids) res = super(calendar_event, self).unlink(cr, uid, \ base_calendar_id2real_id(ids)) alarm_obj = self.pool.get('res.alarm') alarm_obj.do_alarm_unlink(cr, uid, ids, self._name) else: ids = map(lambda x: base_calendar_id2real_id(x), ids) res = super(calendar_event, self).unlink(cr, uid, ids) alarm_obj = self.pool.get('res.alarm') alarm_obj.do_alarm_unlink(cr, uid, ids, self._name)
for event_id in ids: if isinstance(event_id, (int, long)): res = super(calendar_event, self).unlink(cr, uid, event_id) self.pool.get('res.alarm').do_alarm_unlink(cr, uid, [event_id], self._name) continue event_id, date_new = event_id.split('-') event_id = [int(event_id)] for record in self.read(cr, uid, event_id, ['date', 'rrule', 'exdate']): if record['rrule']: date_new = time.strftime("%Y-%m-%d %H:%M:%S", \ time.strptime(date_new, "%Y%m%d%H%M%S")) exdate = (record['exdate'] and (record['exdate'] + ',') or '') + ''.join((re.compile('\d')).findall(date_new)) + 'Z' res = self.write(cr, uid, event_id, {'exdate': exdate}) else: res = super(calendar_event, self).unlink(cr, uid, event_id) self.pool.get('res.alarm').do_alarm_unlink(cr, uid, event_id, self._name)
def unlink(self, cr, uid, ids, context=None): """ Deletes records specified in ids. @param self: the object pointer. @param cr: the current row, from the database cursor, @param id: List of calendar event's id. @param context: A standard dictionary for contextual values @return: True """ res = False for id in ids: ls = base_calendar_id2real_id(id) if not isinstance(ls, (str, int, long)) and len(ls) >= 2: date_new = ls[1] for record in self.read(cr, uid, [base_calendar_id2real_id(id)], \ ['date', 'rrule', 'exdate']): if record['rrule']: exdate = (record['exdate'] and (record['exdate'] + ',') or '') + ''.join((re.compile('\d')).findall(date_new)) + 'Z' if record['date'] == date_new: res = self.write(cr, uid, [base_calendar_id2real_id(id)], {'exdate': exdate}) else: ids = map(lambda x: base_calendar_id2real_id(x), ids) res = super(calendar_event, self).unlink(cr, uid, \ base_calendar_id2real_id(ids)) alarm_obj = self.pool.get('res.alarm') alarm_obj.do_alarm_unlink(cr, uid, ids, self._name) else: ids = map(lambda x: base_calendar_id2real_id(x), ids) res = super(calendar_event, self).unlink(cr, uid, ids) alarm_obj = self.pool.get('res.alarm') alarm_obj.do_alarm_unlink(cr, uid, ids, self._name) return res
description += "\n" + "=======================" + "\n" + data['description']
description += "\n" + "=======================" + "\n" + notes
def close(self, cr, uid, ids, context=None): data = self.read(cr,uid,ids)[0] task_pool = self.pool.get('project.task') user_name = self.pool.get('res.users').browse(cr, uid, uid).name description = _("Closed By ") + user_name + _(" At ") + time.strftime('%Y-%m-%d %H:%M:%S') description += "\n" + "=======================" + "\n" + data['description'] if 'task_id' in context: task = task_pool.browse(cr, uid, context['task_id']) description = task.description + "\n\n" + description task_pool.write(cr, uid, [task.id], { 'description': description, 'state': 'done', 'date_end':time.strftime('%Y-%m-%d %H:%M:%S'), 'remaining_hours': 0.0 }) return {}
description = task.description + "\n\n" + description
description = task.description and task.description + "\n\n" + description
def close(self, cr, uid, ids, context=None): data = self.read(cr,uid,ids)[0] task_pool = self.pool.get('project.task') user_name = self.pool.get('res.users').browse(cr, uid, uid).name description = _("Closed By ") + user_name + _(" At ") + time.strftime('%Y-%m-%d %H:%M:%S') description += "\n" + "=======================" + "\n" + data['description'] if 'task_id' in context: task = task_pool.browse(cr, uid, context['task_id']) description = task.description + "\n\n" + description task_pool.write(cr, uid, [task.id], { 'description': description, 'state': 'done', 'date_end':time.strftime('%Y-%m-%d %H:%M:%S'), 'remaining_hours': 0.0 }) return {}
for item in obj.user_id.child_ids: list_ids.append(item.id)
children = obj.pool.get('report_account_analytic.planning')._child_compute(cr, user, [obj.user_id.id], '', []) for u_id in children.get(obj.user_id.id, []): list_ids.append(u_id)
def get(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None): if not context: context = context res = {} for obj in obj.browse(cr, user, ids, context=context): res[obj.id] = [] list_ids = [] for item in obj.user_id.child_ids: list_ids.append(item.id) list_ids.append(obj.user_id.id) ids2 = obj.pool.get(self._obj).search(cr, user, ['&',(self._fields_id,'=',obj.id),'|',('user_id','in',list_ids),('user_id','=',False)], limit=self._limit) for r in obj.pool.get(self._obj)._read_flat(cr, user, ids2, [self._fields_id], context=context, load='_classic_write'): if r[self._fields_id] not in res: res[r[self._fields_id]] = [] res[r[self._fields_id]].append( r['id'] ) return res
j_id = j_ids and j_ids[0] or False
journal_id = j_ids and j_ids[0] or False
def on_change_unit_amount(self, cr, uid, id, prod_id, quantity, company_id, unit=False, journal_id=False, context=None): if context==None: context={} if not journal_id: j_ids = self.pool.get('account.analytic.journal').search(cr, uid, [('type','=','purchase')]) j_id = j_ids and j_ids[0] or False if not journal_id or not prod_id: return {} product_obj = self.pool.get('product.product') analytic_journal_obj =self.pool.get('account.analytic.journal')
objname = uri2[-1]
objname = misc.ustr(uri2[-1])
def put(self, uri, data, content_type=None): """ put the object into the filesystem """ self.parent.log_message('Putting %s (%d), %s'%( misc.ustr(uri), data and len(data) or 0, content_type)) cr, uid, pool,dbname, uri2 = self.get_cr(uri) if not dbname: if cr: cr.close() raise DAV_Forbidden try: node = self.uri2object(cr, uid, pool, uri2[:]) except Exception: node = False objname = uri2[-1] ret = None if not node: dir_node = self.uri2object(cr, uid, pool, uri2[:-1]) if not dir_node: cr.close() raise DAV_NotFound('Parent folder not found')
ret = (hurl, etag)
ret = (str(hurl), etag)
def put(self, uri, data, content_type=None): """ put the object into the filesystem """ self.parent.log_message('Putting %s (%d), %s'%( misc.ustr(uri), data and len(data) or 0, content_type)) cr, uid, pool,dbname, uri2 = self.get_cr(uri) if not dbname: if cr: cr.close() raise DAV_Forbidden try: node = self.uri2object(cr, uid, pool, uri2[:]) except Exception: node = False objname = uri2[-1] ret = None if not node: dir_node = self.uri2object(cr, uid, pool, uri2[:-1]) if not dir_node: cr.close() raise DAV_NotFound('Parent folder not found')
def onchange_journal(self, cr, uid, ids, journal_id):
def onchange_journal(self, cr, uid, ids, journal_id, line_ids, tax_id, partner_id, context={}):
def onchange_journal(self, cr, uid, ids, journal_id): if not journal_id: return False journal_pool = self.pool.get('account.journal') journal = journal_pool.browse(cr, uid, journal_id) account_id = journal.default_credit_account_id or journal.default_debit_account_id tax_id = False if account_id and account_id.tax_ids: tax_id = account_id.tax_ids[0].id return {'value':{'tax_id':tax_id}}
return {'value':{'tax_id':tax_id}}
vals = self.onchange_price(cr, uid, ids, line_ids, tax_id, partner_id, context) vals['value'].update({'tax_id':tax_id}) return vals
def onchange_journal(self, cr, uid, ids, journal_id): if not journal_id: return False journal_pool = self.pool.get('account.journal') journal = journal_pool.browse(cr, uid, journal_id) account_id = journal.default_credit_account_id or journal.default_debit_account_id tax_id = False if account_id and account_id.tax_ids: tax_id = account_id.tax_ids[0].id return {'value':{'tax_id':tax_id}}
proj_name = tools.ustr(procurement.name) proj_exist_id = self.pool.get('project.project').search(cr, uid, [('name','=',proj_name)], context=context) if not proj_exist_id: project_id = self.pool.get('project.project').create(cr, uid, {'name':proj_name}) else: project_id = proj_exist_id[0]
def action_produce_assign_service(self, cr, uid, ids, context={}): for procurement in self.browse(cr, uid, ids): sline = self.pool.get('sale.order.line') sale_ids = sline.search(cr, uid, [('procurement_id','=',procurement.id)], context) content = '' l = None project_id = None for line in sline.browse(cr, uid, sale_ids, context=context): content += (line.notes or '') l = line if line.order_id.project_id: content+="\n\n"+line.order_id.project_id.complete_name
'name': (procurement.origin or procurement.product_id.name) +': '+(procurement.name or ''),
'name': '%s:%s' %(procurement.product_id.name or procurement.origin, procurement.name or ''),
def action_produce_assign_service(self, cr, uid, ids, context={}): for procurement in self.browse(cr, uid, ids): sline = self.pool.get('sale.order.line') sale_ids = sline.search(cr, uid, [('procurement_id','=',procurement.id)], context) content = '' l = None project_id = None for line in sline.browse(cr, uid, sale_ids, context=context): content += (line.notes or '') l = line if line.order_id.project_id: content+="\n\n"+line.order_id.project_id.complete_name
'partner_id': l and l.order_id.partner_id.id or False })
'partner_id': l and l.order_id.partner_id.id or False, 'project_id': project_id, },context=context)
def action_produce_assign_service(self, cr, uid, ids, context={}): for procurement in self.browse(cr, uid, ids): sline = self.pool.get('sale.order.line') sale_ids = sline.search(cr, uid, [('procurement_id','=',procurement.id)], context) content = '' l = None project_id = None for line in sline.browse(cr, uid, sale_ids, context=context): content += (line.notes or '') l = line if line.order_id.project_id: content+="\n\n"+line.order_id.project_id.complete_name
'name': fields.many2one('ir.model', 'Object', required=True),
'name': fields.char('Rule Name', size=64, required=True), 'model_id': fields.many2one('ir.model', 'Object', required=True),
def priority_get(self, cr, uid, context={}): """ Get Priority @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """ return [('', '')]
LEFT JOIN ir_model model on (model.id = rule.name) \
LEFT JOIN ir_model model on (model.id = rule.model_id) \
def pre_action(self, cr, uid, ids, model, context=None): # Searching for action rules cr.execute("SELECT model.model, rule.id FROM base_action_rule rule \ LEFT JOIN ir_model model on (model.id = rule.name) \ where active") res = cr.fetchall() # Check if any rule matching with current object for obj_name, rule_id in res: if not (model == obj_name): continue else: obj = self.pool.get(obj_name) self._action(cr, uid, [rule_id], obj.browse(cr, uid, ids, context=context)) return True
model = action_rule.name.model
model = action_rule.model_id.model
def _register_hook(self, cr, uid, ids, context=None): if not context: context = {} for action_rule in self.browse(cr, uid, ids, context=context): model = action_rule.name.model obj_pool = self.pool.get(model) obj_pool.__setattr__('create', self._create(obj_pool.create, model, context=context)) obj_pool.__setattr__('write', self._write(obj_pool.write, model, context=context)) return True
if action.name.model == action.filter_id.model_id:
if action.model_id.model == action.filter_id.model_id:
def do_check(self, cr, uid, action, obj, context={}): """ check Action @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """ ok = True if action.filter_id: if action.name.model == action.filter_id.model_id: context.update(eval(action.filter_id.context)) obj_ids = obj._table.search(cr, uid, eval(action.filter_id.domain), context=context) if not obj.id in obj_ids: ok = False else: ok = False if hasattr(obj, 'user_id'): ok = ok and (not action.trg_user_id.id or action.trg_user_id.id==obj.user_id.id) if hasattr(obj, 'partner_id'): ok = ok and (not action.trg_partner_id.id or action.trg_partner_id.id==obj.partner_id.id) ok = ok and ( not action.trg_partner_categ_id.id or ( obj.partner_id.id and (action.trg_partner_categ_id.id in map(lambda x: x.id, obj.partner_id.category_id or [])) ) ) state_to = context.get('state_to', False) if hasattr(obj, 'state'): ok = ok and (not action.trg_state_from or action.trg_state_from==obj.state) if state_to: ok = ok and (not action.trg_state_to or action.trg_state_to==state_to) elif action.trg_state_to: ok = False reg_name = action.regex_name result_name = True if reg_name: ptrn = re.compile(str(reg_name)) _result = ptrn.search(str(obj.name)) if not _result: result_name = False regex_n = not reg_name or result_name ok = ok and regex_n return ok
elif action.trg_state_to: ok = False
def do_check(self, cr, uid, action, obj, context={}): """ check Action @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """ ok = True if action.filter_id: if action.name.model == action.filter_id.model_id: context.update(eval(action.filter_id.context)) obj_ids = obj._table.search(cr, uid, eval(action.filter_id.domain), context=context) if not obj.id in obj_ids: ok = False else: ok = False if hasattr(obj, 'user_id'): ok = ok and (not action.trg_user_id.id or action.trg_user_id.id==obj.user_id.id) if hasattr(obj, 'partner_id'): ok = ok and (not action.trg_partner_id.id or action.trg_partner_id.id==obj.partner_id.id) ok = ok and ( not action.trg_partner_categ_id.id or ( obj.partner_id.id and (action.trg_partner_categ_id.id in map(lambda x: x.id, obj.partner_id.category_id or [])) ) ) state_to = context.get('state_to', False) if hasattr(obj, 'state'): ok = ok and (not action.trg_state_from or action.trg_state_from==obj.state) if state_to: ok = ok and (not action.trg_state_to or action.trg_state_to==state_to) elif action.trg_state_to: ok = False reg_name = action.regex_name result_name = True if reg_name: ptrn = re.compile(str(reg_name)) _result = ptrn.search(str(obj.name)) if not _result: result_name = False regex_n = not reg_name or result_name ok = ok and regex_n return ok
model_obj = self.pool.get(action.name.model)
model_obj = self.pool.get(action.model_id.model)
def _action(self, cr, uid, ids, objects, scrit=None, context={}): """ Do Action @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Basic Action Rule’s IDs, @param objects: pass objects @param context: A standard dictionary for contextual values """ context.update({'action': True}) if not scrit: scrit = [] for action in self.browse(cr, uid, ids): model_obj = self.pool.get(action.name.model) for obj in objects: ok = self.do_check(cr, uid, action, obj, context=context) if not ok: continue
if not val: cr.execute("select max(date) from account_bank_statement_line l, account_bank_statement_reconcile s where l.pos_statement_id=%d and l.reconcile_id=s.id"%(order.id)) val=cr.fetchone() val=val and val[0] or None
def _get_date_payment2(self, cr, uid, ids, context, *a):
ok = True
valid_moves = []
def validate(self, cr, uid, ids, context={}): if context and ('__last_update' in context): del context['__last_update'] ok = True for move in self.browse(cr, uid, ids, context): #unlink analytic lines on move_lines for obj_line in move.line_id: for obj in obj_line.analytic_lines: self.pool.get('account.analytic.line').unlink(cr,uid,obj.id)
todo = []
def validate(self, cr, uid, ids, context={}): if context and ('__last_update' in context): del context['__last_update'] ok = True for move in self.browse(cr, uid, ids, context): #unlink analytic lines on move_lines for obj_line in move.line_id: for obj in obj_line.analytic_lines: self.pool.get('account.analytic.line').unlink(cr,uid,obj.id)
if journal.type not in ('purchase','sale'): continue for line in move.line_id: code = amount = 0 key = (line.account_id.id, line.tax_code_id.id) if key in account2: code = account2[key][0] amount = account2[key][1] * (line.debit + line.credit) elif line.account_id.id in account: code = account[line.account_id.id][0] amount = account[line.account_id.id][1] * (line.debit + line.credit) if (code or amount) and not (line.tax_code_id or line.tax_amount): self.pool.get('account.move.line').write(cr, uid, [line.id], { 'tax_code_id': code, 'tax_amount': amount }, context, check=False)
if journal.type in ('purchase','sale'): for line in move.line_id: code = amount = 0 key = (line.account_id.id, line.tax_code_id.id) if key in account2: code = account2[key][0] amount = account2[key][1] * (line.debit + line.credit) elif line.account_id.id in account: code = account[line.account_id.id][0] amount = account[line.account_id.id][1] * (line.debit + line.credit) if (code or amount) and not (line.tax_code_id or line.tax_amount): self.pool.get('account.move.line').write(cr, uid, [line.id], { 'tax_code_id': code, 'tax_amount': amount }, context, check=False) elif journal.centralisation: valid_moves.append(move)
def validate(self, cr, uid, ids, context={}): if context and ('__last_update' in context): del context['__last_update'] ok = True for move in self.browse(cr, uid, ids, context): #unlink analytic lines on move_lines for obj_line in move.line_id: for obj in obj_line.analytic_lines: self.pool.get('account.analytic.line').unlink(cr,uid,obj.id)
continue if journal.centralisation:
def validate(self, cr, uid, ids, context={}): if context and ('__last_update' in context): del context['__last_update'] ok = True for move in self.browse(cr, uid, ids, context): #unlink analytic lines on move_lines for obj_line in move.line_id: for obj in obj_line.analytic_lines: self.pool.get('account.analytic.line').unlink(cr,uid,obj.id)
continue
def validate(self, cr, uid, ids, context={}): if context and ('__last_update' in context): del context['__last_update'] ok = True for move in self.browse(cr, uid, ids, context): #unlink analytic lines on move_lines for obj_line in move.line_id: for obj in obj_line.analytic_lines: self.pool.get('account.analytic.line').unlink(cr,uid,obj.id)
ok = False if ok: list_ids = [] for tmp in move.line_id: list_ids.append(tmp.id) self.pool.get('account.move.line').create_analytic_lines(cr, uid, list_ids, context) return ok
for record in valid_moves: self.pool.get('account.move.line').create_analytic_lines(cr, uid, [line.id for line in record.line_id], context) return len(valid_moves) > 0
def validate(self, cr, uid, ids, context={}): if context and ('__last_update' in context): del context['__last_update'] ok = True for move in self.browse(cr, uid, ids, context): #unlink analytic lines on move_lines for obj_line in move.line_id: for obj in obj_line.analytic_lines: self.pool.get('account.analytic.line').unlink(cr,uid,obj.id)
return context['project_id']
return int(context['project_id'])
def _default_project(self, cr, uid, context={}): if 'project_id' in context and context['project_id']: return context['project_id'] return False
states=('paid')
states=('paid',)
def _product_margin(self, cr, uid, ids, field_names, arg, context=None): res = {} for val in self.browse(cr, uid, ids,context=context): res[val.id] = {} date_from=context.get('date_from', time.strftime('%Y-01-01')) date_to=context.get('date_to', time.strftime('%Y-12-31')) invoice_state=context.get('invoice_state', 'open_paid') if 'date_from' in field_names: res[val.id]['date_from']=date_from if 'date_to' in field_names: res[val.id]['date_to']=date_to if 'invoice_state' in field_names: res[val.id]['invoice_state']=invoice_state invoice_types=() states=() if invoice_state=='paid': states=('paid') elif invoice_state=='open_paid': states=('open','paid') elif invoice_state=='draft_open_paid': states=('draft','open','paid')
start_date = base_start_date and datetime.strptime(base_start_date[:10], "%Y-%m-%d") or False until_date = base_until_date and datetime.strptime(base_until_date[:10], "%Y-%m-%d") or False
start_date = base_start_date and datetime.strptime(base_start_date[:10]+ ' 00:00:00' , "%Y-%m-%d %H:%M:%S") or False until_date = base_until_date and datetime.strptime(base_until_date[:10]+ ' 23:59:59', "%Y-%m-%d %H:%M:%S") or False
def get_recurrent_ids(self, cr, uid, select, base_start_date, base_until_date, limit=100): """Gives virtual event ids for recurring events based on value of Recurrence Rule This method gives ids of dates that comes between start date and end date of calendar views @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param base_start_date: Get Start Date @param base_until_date: Get End Date @param limit: The Number of Results to Return """
if arg[0] not in ('date', unicode('date')):
if arg[0] not in ('date', unicode('date'), 'date_deadline', unicode('date_deadline')):
def search(self, cr, uid, args, offset=0, limit=100, order=None, context=None, count=False): """ Overrides orm search method. @param cr: the current row, from the database cursor, @param user: the current user’s ID for security checks, @param args: list of tuples of form [(‘name_of_the_field’, ‘operator’, value), ...]. @param offset: The Number of Results to Pass @param limit: The Number of Results to Return @param context: A standard dictionary for contextual values @param count: If its True the method returns number of records instead of ids @return: List of id """ args_without_date = [] start_date = False until_date = False for arg in args: if arg[0] not in ('date', unicode('date')): args_without_date.append(arg) else: if arg[1] in ('>', '>='): if start_date: continue start_date = arg[2] elif arg[1] in ('<', '<='): if until_date: continue until_date = arg[2] res = super(calendar_event, self).search(cr, uid, args_without_date, \ offset, limit, order, context, count)
def onchange_journal_id(self, cursor, user, statement_id, journal_id, context=None): cursor.execute('SELECT balance_end_real \
def onchange_journal_id(self, cr, uid, statement_id, journal_id, context=None): cr.execute('SELECT balance_end_real \
def onchange_journal_id(self, cursor, user, statement_id, journal_id, context=None): cursor.execute('SELECT balance_end_real \ FROM account_bank_statement \ WHERE journal_id = %s AND NOT state = %s \ ORDER BY date DESC,id DESC LIMIT 1', (journal_id, 'draft')) res = cursor.fetchone() balance_start = res and res[0] or 0.0 return {'value': {'balance_start': balance_start}}
res = cursor.fetchone()
res = cr.fetchone()
def onchange_journal_id(self, cursor, user, statement_id, journal_id, context=None): cursor.execute('SELECT balance_end_real \ FROM account_bank_statement \ WHERE journal_id = %s AND NOT state = %s \ ORDER BY date DESC,id DESC LIMIT 1', (journal_id, 'draft')) res = cursor.fetchone() balance_start = res and res[0] or 0.0 return {'value': {'balance_start': balance_start}}
return {'value': {'balance_start': balance_start}}
account_id = self.pool.get('account.journal').read(cr, uid, journal_id, ['default_debit_account_id'], context=context)['default_debit_account_id'] return {'value': {'balance_start': balance_start, 'account_id': account_id}}
def onchange_journal_id(self, cursor, user, statement_id, journal_id, context=None): cursor.execute('SELECT balance_end_real \ FROM account_bank_statement \ WHERE journal_id = %s AND NOT state = %s \ ORDER BY date DESC,id DESC LIMIT 1', (journal_id, 'draft')) res = cursor.fetchone() balance_start = res and res[0] or 0.0 return {'value': {'balance_start': balance_start}}
'section_id' : opp.section_id and opp.section_id.id or False,
'section_id' : this.section_id.id or opp.section_id.id or False,
def action_apply(self, cr, uid, ids, context=None): """ This converts Opportunity to Phonecall and opens Phonecall view @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Opportunity to Phonecall IDs @param context: A standard dictionary for contextual values
s = decode_header(s)
s = decode_header(s.replace('\r', ''))
def _decode_header(self, s): from email.Header import decode_header s = decode_header(s) return ''.join(map(lambda x:self._to_decode(x[0], [x[1]]), s or []))
nctx = nodes.get_node_context(cr, uid, context={})
nctx = nodes.get_node_context(cr, uid, context=context)
def _data_get(self, cr, uid, ids, name, arg, context=None): if context is None: context = {} fbrl = self.browse(cr, uid, ids, context=context) nctx = nodes.get_node_context(cr, uid, context={}) # nctx will /not/ inherit the caller's context. Most of # it would be useless, anyway (like active_id, active_model, # bin_size etc.) result = {} bin_size = context.get('bin_size', False) for fbro in fbrl: if not fbro.parent_id: cr.execute("select db_datas from ir_attachment where id = %s" ,(fbro.id,)) res = cr.fetchone() datas = res[0] or '' size = len(datas) else: fnode = nodes.node_file(None, None, nctx, fbro) datas = fnode.get_data(cr, fbro) datas = base64.encodestring(datas or '') size = fnode.get_data_len(cr, fbro) if not bin_size: result[fbro.id] = datas else: result[fbro.id] = size
if not fbro.parent_id: cr.execute("select db_datas from ir_attachment where id = %s" ,(fbro.id,)) res = cr.fetchone() datas = res[0] or '' size = len(datas)
fnode = nodes.node_file(None, None, nctx, fbro) if not bin_size: data = fnode.get_data(cr, fbro) result[fbro.id] = base64.encodestring(data or '')
def _data_get(self, cr, uid, ids, name, arg, context=None): if context is None: context = {} fbrl = self.browse(cr, uid, ids, context=context) nctx = nodes.get_node_context(cr, uid, context={}) # nctx will /not/ inherit the caller's context. Most of # it would be useless, anyway (like active_id, active_model, # bin_size etc.) result = {} bin_size = context.get('bin_size', False) for fbro in fbrl: if not fbro.parent_id: cr.execute("select db_datas from ir_attachment where id = %s" ,(fbro.id,)) res = cr.fetchone() datas = res[0] or '' size = len(datas) else: fnode = nodes.node_file(None, None, nctx, fbro) datas = fnode.get_data(cr, fbro) datas = base64.encodestring(datas or '') size = fnode.get_data_len(cr, fbro) if not bin_size: result[fbro.id] = datas else: result[fbro.id] = size
fnode = nodes.node_file(None, None, nctx, fbro) datas = fnode.get_data(cr, fbro) datas = base64.encodestring(datas or '') size = fnode.get_data_len(cr, fbro) if not bin_size: result[fbro.id] = datas else: result[fbro.id] = size
result[fbro.id] = fnode.get_data_len(cr, fbro)
def _data_get(self, cr, uid, ids, name, arg, context=None): if context is None: context = {} fbrl = self.browse(cr, uid, ids, context=context) nctx = nodes.get_node_context(cr, uid, context={}) # nctx will /not/ inherit the caller's context. Most of # it would be useless, anyway (like active_id, active_model, # bin_size etc.) result = {} bin_size = context.get('bin_size', False) for fbro in fbrl: if not fbro.parent_id: cr.execute("select db_datas from ir_attachment where id = %s" ,(fbro.id,)) res = cr.fetchone() datas = res[0] or '' size = len(datas) else: fnode = nodes.node_file(None, None, nctx, fbro) datas = fnode.get_data(cr, fbro) datas = base64.encodestring(datas or '') size = fnode.get_data_len(cr, fbro) if not bin_size: result[fbro.id] = datas else: result[fbro.id] = size
and ('name' not in vals or fbro.name == vals['name']) :
and ('name' not in vals or fbro.name == vals['name']) or not fbro.parent_id:
def write(self, cr, uid, ids, vals, context=None): result = False if not isinstance(ids, list): ids = [ids] res = self.search(cr, uid, [('id', 'in', ids)]) if not len(res): return False if not self._check_duplication(cr, uid, vals, ids, 'write'): raise osv.except_osv(_('ValidateError'), _('File name must be unique!'))
'description': fields.text('Note'),
'description': fields.text('Notes'),
def _compute_day(self, cr, uid, ids, fields, args, context={}): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Openday’s IDs @return: difference between current date and log date @param context: A standard dictionary for contextual values """ cal_obj = self.pool.get('resource.calendar') res_obj = self.pool.get('resource.resource')
if (vals.has_key('alarm_id') or vals.has_key('base_calendar_alarm_id'))\ or (vals.has_key('date') or vals.has_key('duration') or vals.has_key('date_deadline')):
if ('alarm_id' in vals or 'base_calendar_alarm_id' in vals)\ or ('date' in vals or 'duration' in vals or 'date_deadline' in vals):
def write(self, cr, uid, ids, vals, context=None, check=True, update_check=True): """ Overrides orm write method. @param self: the object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of crm meeting's ids @param vals: Dictionary of field value. @param context: A standard dictionary for contextual values @return: True """ if context is None: context = {} if isinstance(ids, (str, int, long)): select = [ids] else: select = ids new_ids = [] res = False for event_id in select: real_event_id = base_calendar_id2real_id(event_id) if len(str(event_id).split('-')) > 1: data = self.read(cr, uid, event_id, ['date', 'date_deadline', \ 'rrule', 'duration']) if data.get('rrule'): data.update({ 'recurrent_uid': real_event_id, 'recurrent_id': data.get('date'), 'rrule_type': 'none', 'rrule': '' }) data.update(vals) new_id = self.copy(cr, uid, real_event_id, default=data, context=context) context.update({'active_id': new_id, 'active_ids': [new_id]}) continue if not real_event_id in new_ids: new_ids.append(real_event_id)
alarm_obj.do_alarm_create(cr, uid, new_ids, self._name, 'date', \ context=context)
alarm_obj.do_alarm_create(cr, uid, new_ids, self._name, 'date', context=context)
def write(self, cr, uid, ids, vals, context=None, check=True, update_check=True): """ Overrides orm write method. @param self: the object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of crm meeting's ids @param vals: Dictionary of field value. @param context: A standard dictionary for contextual values @return: True """ if context is None: context = {} if isinstance(ids, (str, int, long)): select = [ids] else: select = ids new_ids = [] res = False for event_id in select: real_event_id = base_calendar_id2real_id(event_id) if len(str(event_id).split('-')) > 1: data = self.read(cr, uid, event_id, ['date', 'date_deadline', \ 'rrule', 'duration']) if data.get('rrule'): data.update({ 'recurrent_uid': real_event_id, 'recurrent_id': data.get('date'), 'rrule_type': 'none', 'rrule': '' }) data.update(vals) new_id = self.copy(cr, uid, real_event_id, default=data, context=context) context.update({'active_id': new_id, 'active_ids': [new_id]}) continue if not real_event_id in new_ids: new_ids.append(real_event_id)
'state': fields.selection([('change_request', 'Change Request'),('change_proposed', 'Change Proposed'), ('in_production', 'In Production'), ('to_update', 'To Update'), ('validate', 'To Validate'), ('cancel', 'Cancel')], 'Status'), 'target_document_id': fields.many2one('document.directory', 'Target Document'), 'target':fields.binary('Target'),
'state': fields.selection([('in_production', 'In Production'), ('requested', 'Change Request'),('proposed', 'Change Proposed'), ('validated', 'To Validate'), ('cancel', 'Cancel')], 'State'), 'target_directory_id': fields.many2one('document.directory', 'Target Document'), 'target_document_id':fields.binary('Target'),
def state_done_set(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state':'done'}) return True
'state': lambda *a: 'change_request',
'state': lambda *a: 'in_production',
def state_done_set(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state':'done'}) return True
def state_set_request(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'change_request'},context=context)
def do_request(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'requested'},context=context)
def state_set_request(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'change_request'},context=context) return True
def state_set_proposed(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'change_proposed'},context=context) return True def state_set_in_production(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'in_production'}) return True def state_set_update(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'to_update'}) return True def state_set_validated(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'validate'},context=context) return True def state_set_cancel(self, cr, uid, ids, context={}):
def do_propose(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'proposed'},context=context) return True def do_validate(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'validated'},context=context) return True def do_production(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'in_production'},context=context) return True def do_cancel(self, cr, uid, ids, context={}):
def state_set_proposed(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'change_proposed'},context=context) return True
if abs(amount) < 10 ** -(int(config['price_accuracy'])1):
if abs(amount) < 10 ** -(int(config['price_accuracy'])):
def validate(self, cr, uid, ids, context={}): if context and ('__last_update' in context): del context['__last_update'] ok = True for move in self.browse(cr, uid, ids, context): #unlink analytic lines on move_lines for obj_line in move.line_id: for obj in obj_line.analytic_lines: self.pool.get('account.analytic.line').unlink(cr,uid,obj.id)
password = base64.b64decode(password)
try: password = base64.b64decode(password) except: pass
def open_connection(self, cr, uid, ids, serverid=False, permission=True): if serverid: self.server[serverid] = self.getpassword(cr, uid, [serverid])[0] else: raise osv.except_osv(_('Read Error!'), _('Unable to read Server Settings')) if permission: if not self.check_permissions(cr, uid, [serverid]): raise osv.except_osv(_('Permission Error!'), _('You have no permission to access SMTP Server : %s ') % (self.server[serverid]['name'],) ) if self.server[serverid]: try: self.smtpServer[serverid] = smtplib.SMTP() self.smtpServer[serverid].debuglevel = 0 self.smtpServer[serverid].connect(str(self.server[serverid]['server']),str(self.server[serverid]['port'])) if self.server[serverid]['ssl']: self.smtpServer[serverid].ehlo() self.smtpServer[serverid].starttls() self.smtpServer[serverid].ehlo() if self.server[serverid]['auth']: password = self.server[serverid]['password'] password = base64.b64decode(password) self.smtpServer[serverid].login(str(self.server[serverid]['user']), password)
raise osv.except_osv(_('Error !'), _('You can not modify Project Time Unit as there are open or pending tasks created with current time unit.))
raise osv.except_osv(_('Error !'), _('You cannot modify Project Time Unit as there are open or pending tasks created with current time unit.'))
def write(self, cr, uid, ids,vals, context={}): task_ids=self.pool.get('project.task').search(cr, uid, [('state','in',['open', 'pending'])]) if ('project_time_mode_id' in vals) and task_ids: raise osv.except_osv(_('Error !'), _('You can not modify Project Time Unit as there are open or pending tasks created with current time unit.)) return super(res_company,self).write(cr, uid, ids, vals, context=context)
values['section_id']=case.section_id and case.section_id.id or False,
values['section_id']=case.section_id and case.section_id.id
def action_apply(self, cr, uid, ids, context=None): this = self.browse(cr, uid, ids)[0] values={} record_id = context and context.get('record_id', False) or False if record_id: for case in self.pool.get('crm.phonecall').browse(cr, uid, [record_id], context=context): values['name']=this.name values['user_id']=this.user_id and this.user_id.id values['categ_id']=case.categ_id and case.categ_id.id or False values['section_id']=case.section_id and case.section_id.id or False, values['description']=case.description or '' values['partner_id']=case.partner_id.id values['partner_address_id']=case.partner_address_id.id values['partner_mobile']=case.partner_mobile or False values['priority']=case.priority values['partner_phone']=case.partner_phone or False values['date']=this.date phonecall_proxy = self.pool.get('crm.phonecall') phonecall_id = phonecall_proxy.create(cr, uid, values, context=context) value = { 'name': _('Phone Call'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'crm.phonecall', 'view_id': False, 'type': 'ir.actions.act_window', 'res_id': phonecall_id } return value
'db_datas': fields.binary('Data', oldname='datas'), 'index_content': fields.text('Indexed Content'), 'write_date': fields.datetime('Date Modified', readonly=True), 'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True), 'create_date': fields.datetime('Date Created', readonly=True), 'create_uid': fields.many2one('res.users', 'Creator', readonly=True), 'store_method': fields.selection([('db', 'Database'), ('fs', 'Filesystem'), ('link', 'Link')], "Storing Method"), 'datas': fields.function(_data_get, method=True, fnct_inv=_data_set, string='File Content', type="binary", nodrop=True), 'url': fields.char('File URL',size=64),
def _data_set(self, cr, uid, id, name, value, arg, context): if not value: return True fbro = self.browse(cr, uid, id, context=context) nctx = nodes.get_node_context(cr, uid, context) fnode = nodes.node_file(None, None, nctx, fbro) res = fnode.set_data(cr, base64.decodestring(value), fbro) return res
'res_model': fields.char('Attached Model', size=64), 'res_id': fields.integer('Attached ID'), 'partner_id':fields.many2one('res.partner', 'Partner', select=1), 'type':fields.selection([ ('url','URL'), ('binary','Binary'), ],'Type', help="Type is used to separate URL and binary File"), 'company_id': fields.many2one('res.company', 'Company'),
def _data_set(self, cr, uid, id, name, value, arg, context): if not value: return True fbro = self.browse(cr, uid, id, context=context) nctx = nodes.get_node_context(cr, uid, context) fnode = nodes.node_file(None, None, nctx, fbro) res = fnode.set_data(cr, base64.decodestring(value), fbro) return res
'store_method': lambda *args: 'db', 'type': 'binary',
def __get_def_directory(self, cr, uid, context=None): dirobj = self.pool.get('document.directory') return dirobj._get_root_directory(cr, uid, context)
for i in data_l:
for record in data_l:
def export_data(self, cr, uid, ids, fields_to_export, context=None):
for key, value in i.items(): if key not in fields_to_export: continue
for key in fields_to_export: value = record.get(key,'')
def export_data(self, cr, uid, ids, fields_to_export, context=None):
def _get_fiscalyear(self, form):
def _get_fiscalyear(self, data):
def _get_fiscalyear(self, form): if data.get('form', False) and data['form'].get('fiscalyear_id', False): return pooler.get_pool(self.cr.dbname).get('account.fiscalyear').browse(self.cr, self.uid, data['form']['fiscalyear_id']).name return ''
def _get_company(self, form):
def _get_company(self, data):
def _get_company(self, form): if data.get('form', False) and data['form'].get('company_id', False): comp_obj = pooler.get_pool(self.cr.dbname).get('res.company').browse(self.cr, self.uid, data['form']['company_id']) return comp_obj.name
return super(scrum_product_backlog, self).name_search(cr, uid, name, args, operator,context, limit=limit)
return super(project_scrum_product_backlog, self).name_search(cr, uid, name, args, operator,context, limit=limit)
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100): if not args: args=[] if not context: context={} match = re.match('^S\(([0-9]+)\)$', name) if match: ids = self.search(cr, uid, [('sprint_id','=', int(match.group(1)))], limit=limit, context=context) return self.name_get(cr, uid, ids, context=context) return super(scrum_product_backlog, self).name_search(cr, uid, name, args, operator,context, limit=limit)
result.append(_name_get(self.read(cr, user, product.id, ['variants','name','default_code'], context=context)))
mydict = { 'id': product.id, 'name': product.name, 'default_code': product.default_code, 'variants': product.variants } result.append(_name_get(mydict))
def _name_get(d): name = d.get('name','') code = d.get('default_code',False) if code: name = '[%s] %s' % (code,name) if d.get('variants'): name = name + ' - %s' % (d['variants'],) return (d['id'], name)
cr.execute('select id from %s where recurrent_uid=%s' , (self._table, event_id))
cr.execute("select id from %s where recurrent_uid=%%s" % (self._table), (event_id,))
def unlink_events(self, cr, uid, ids, context=None): """ This function deletes event which are linked with the event with recurrent_uid (Removes the events which refers to the same UID value) """ if not context: context = {} for event_id in ids: cr.execute('select id from %s where recurrent_uid=%s' , (self._table, event_id)) r_ids = map(lambda x: x[0], cr.fetchall()) self.unlink(cr, uid, r_ids, context=context) return True
valss['journal_id'] = data['journal_id']
vals['journal_id'] = data['journal_id']
def get_in(self, cr, uid, ids, context=None): """ Create the entry of statement in journal. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return :Return of operation of product """ statement_obj = self.pool.get('account.bank.statement') res_obj = self.pool.get('res.users') product_obj = self.pool.get('product.product') bank_statement = self.pool.get('account.bank.statement.line') for data in self.read(cr, uid, ids, context=context): vals = {} curr_company = res_obj.browse(cr, uid, uid, context=context).company_id.id statement_id = statement_obj.search(cr, uid, [('journal_id', '=', data['journal_id']), ('company_id', '=', curr_company), ('user_id', '=', uid), ('state', '=', 'open')], context=context) if not statement_id: raise osv.except_osv(_('Error !'), _('You have to open at least one cashbox'))
pick_name = picking.name or ''
def create_chained_picking(self, cr, uid, moves, context=None): new_moves = [] if context is None: context = {} for picking, todo in self._chain_compute(cr, uid, moves, context=context).items(): ptype = todo[0][1][5] and todo[0][1][5] or location_obj.picking_type_get(cr, uid, todo[0][0].location_dest_id, todo[0][1][0]) pick_name = picking.name or '' if picking: pickid = self._create_chained_picking(cr, uid, pick_name,picking,ptype,todo,context) else: pickid = False for move, (loc, dummy, delay, dummy, company_id, ptype) in todo: new_id = move_obj.copy(cr, uid, move.id, { 'location_id': move.location_dest_id.id, 'location_dest_id': loc.id, 'date_moved': time.strftime('%Y-%m-%d'), 'picking_id': pickid, 'state': 'waiting', 'company_id': company_id or res_obj._company_default_get(cr, uid, 'stock.company', context=context) , 'move_history_ids': [], 'date': (datetime.strptime(move.date, '%Y-%m-%d %H:%M:%S') + relativedelta(days=delay or 0)).strftime('%Y-%m-%d'), 'move_history_ids2': []} ) move_obj.write(cr, uid, [move.id], { 'move_dest_id': new_id, 'move_history_ids': [(4, new_id)] }) new_moves.append(self.browse(cr, uid, [new_id])[0]) if pickid: wf_service.trg_validate(uid, 'stock.picking', pickid, 'button_confirm', cr) if new_moves: new_moves += create_chained_picking(self, cr, uid, new_moves, context) return new_moves
pickid = self._create_chained_picking(cr, uid, pick_name,picking,ptype,todo,context)
new_pick_name = seq_obj.get(cr, uid, 'stock.picking.' + ptype) pickid = self._create_chained_picking(cr, uid, new_pick_name, picking, ptype, todo, context=context) old_ptype = location_obj.picking_type_get(cr, uid, picking.move_lines[0].location_id, picking.move_lines[0].location_dest_id) if old_ptype != picking.type: old_pick_name = seq_obj.get(cr, uid, 'stock.picking.' + old_ptype) self.pool.get('stock.picking').write(cr, uid, picking.id, {'name': old_pick_name}, context=context)
def create_chained_picking(self, cr, uid, moves, context=None): new_moves = [] if context is None: context = {} for picking, todo in self._chain_compute(cr, uid, moves, context=context).items(): ptype = todo[0][1][5] and todo[0][1][5] or location_obj.picking_type_get(cr, uid, todo[0][0].location_dest_id, todo[0][1][0]) pick_name = picking.name or '' if picking: pickid = self._create_chained_picking(cr, uid, pick_name,picking,ptype,todo,context) else: pickid = False for move, (loc, dummy, delay, dummy, company_id, ptype) in todo: new_id = move_obj.copy(cr, uid, move.id, { 'location_id': move.location_dest_id.id, 'location_dest_id': loc.id, 'date_moved': time.strftime('%Y-%m-%d'), 'picking_id': pickid, 'state': 'waiting', 'company_id': company_id or res_obj._company_default_get(cr, uid, 'stock.company', context=context) , 'move_history_ids': [], 'date': (datetime.strptime(move.date, '%Y-%m-%d %H:%M:%S') + relativedelta(days=delay or 0)).strftime('%Y-%m-%d'), 'move_history_ids2': []} ) move_obj.write(cr, uid, [move.id], { 'move_dest_id': new_id, 'move_history_ids': [(4, new_id)] }) new_moves.append(self.browse(cr, uid, [new_id])[0]) if pickid: wf_service.trg_validate(uid, 'stock.picking', pickid, 'button_confirm', cr) if new_moves: new_moves += create_chained_picking(self, cr, uid, new_moves, context) return new_moves
def name_search(self, cr, uid, name='', args=None, operator='ilike', context=None, limit=None): if not args: args = [] if context is None: context = {} if name: ids = self.search(cr, uid, ['|',('name', operator, name),('first_name', operator, name)] + args, limit=limit, context=context) else: ids = self.search(cr, uid, args, limit=limit, context=context) return self.name_get(cr, uid, ids, context=context)
def name_get(self, cr, user, ids, context={}):
return res def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False): """ search parnter job @param self: The object pointer @param cr: the current row, from the database cursor, @param user: the current user @param args: list of tuples of form [(‘name_of_the_field’, ‘operator’, value), ...]. @param offset: The Number of Results to Pass @param limit: The Number of Results to Return @param context: A standard dictionary for contextual values """ job_ids = [] for arg in args: if arg[0] == 'address_id': self._order = 'sequence_partner' elif arg[0] == 'contact_id': self._order = 'sequence_contact' contact_obj = self.pool.get('res.partner.contact') if arg[2] and not count: search_arg = ['|', ('first_name', 'ilike', arg[2]), ('name', 'ilike', arg[2])] contact_ids = contact_obj.search(cr, user, search_arg, offset=offset, limit=limit, order=order, context=context, count=count) if not contact_ids: continue contacts = contact_obj.browse(cr, user, contact_ids, context=context) for contact in contacts: job_ids.extend([item.id for item in contact.job_ids]) res = super(res_partner_job,self).search(cr, user, args, offset=offset,\ limit=limit, order=order, context=context, count=count) if job_ids: res = list(set(res + job_ids))
def name_get(self, cr, uid, ids, context=None): """ @param self: The object pointer @param cr: the current row, from the database cursor, @param user: the current user, @param ids: List of partner address’s IDs @param context: A standard dictionary for contextual values """ if context is None: context = {}
produc_id = self.pool.get('stock.location').search(cr, uid, [('name','=','Production')])[0]
product_id = self.pool.get('stock.location').search(cr, uid, [('name','=','Production')])[0]
def onchange_operation_type(self, cr, uid, ids, type, guarantee_limit): """ On change of operation type it sets source location, destination location and to invoice field. @param product: Changed operation type. @param guarantee_limit: Guarantee limit of current record. @return: Dictionary of values. """ if not type: return {'value': { 'location_id': False, 'location_dest_id': False } } produc_id = self.pool.get('stock.location').search(cr, uid, [('name','=','Production')])[0]
'location_id': produc_id,
'location_id': product_id,
def onchange_operation_type(self, cr, uid, ids, type, guarantee_limit): """ On change of operation type it sets source location, destination location and to invoice field. @param product: Changed operation type. @param guarantee_limit: Guarantee limit of current record. @return: Dictionary of values. """ if not type: return {'value': { 'location_id': False, 'location_dest_id': False } } produc_id = self.pool.get('stock.location').search(cr, uid, [('name','=','Production')])[0]
'location_dest_id': produc_id
'location_dest_id': product_id
def onchange_operation_type(self, cr, uid, ids, type, guarantee_limit): """ On change of operation type it sets source location, destination location and to invoice field. @param product: Changed operation type. @param guarantee_limit: Guarantee limit of current record. @return: Dictionary of values. """ if not type: return {'value': { 'location_id': False, 'location_dest_id': False } } produc_id = self.pool.get('stock.location').search(cr, uid, [('name','=','Production')])[0]
if not pick.move_lines: return False
def test_cancel(self, cr, uid, ids, context=None): """ Test whether the move lines are canceled or not. @return: True or False """ for pick in self.browse(cr, uid, ids, context=context): if not pick.move_lines: return False for move in pick.move_lines: if move.state not in ('cancel',): return False return True
create or replace view report_stock_move as ( select
CREATE OR REPLACE view report_stock_move AS ( SELECT
def init(self, cr): tools.drop_view_if_exists(cr, 'report_stock_move') cr.execute(""" create or replace view report_stock_move as ( select min(sm_id) as id, sum(value) as value, al.dp as date, al.curr_year as year, al.curr_month as month, al.curr_day as day, al.curr_day_diff as day_diff, al.curr_day_diff1 as day_diff1, al.curr_day_diff2 as day_diff2, al.location_id as location_id, al.picking_id as picking_id, al.company_id as company_id, al.location_dest_id as location_dest_id, al.product_qty, al.out_qty as product_qty_out, al.in_qty as product_qty_in, al.address_id as partner_id, al.product_id as product_id, al.state as state , al.product_uom as product_uom, al.categ_id as categ_id, coalesce(al.type, 'other') as type, al.stock_journal as stock_journal FROM (SELECT
sum(value) as value,
def init(self, cr): tools.drop_view_if_exists(cr, 'report_stock_move') cr.execute(""" create or replace view report_stock_move as ( select min(sm_id) as id, sum(value) as value, al.dp as date, al.curr_year as year, al.curr_month as month, al.curr_day as day, al.curr_day_diff as day_diff, al.curr_day_diff1 as day_diff1, al.curr_day_diff2 as day_diff2, al.location_id as location_id, al.picking_id as picking_id, al.company_id as company_id, al.location_dest_id as location_dest_id, al.product_qty, al.out_qty as product_qty_out, al.in_qty as product_qty_in, al.address_id as partner_id, al.product_id as product_id, al.state as state , al.product_uom as product_uom, al.categ_id as categ_id, coalesce(al.type, 'other') as type, al.stock_journal as stock_journal FROM (SELECT
al.stock_journal as stock_journal
al.stock_journal as stock_journal, sum(al.in_value - al.out_value) as value
def init(self, cr): tools.drop_view_if_exists(cr, 'report_stock_move') cr.execute(""" create or replace view report_stock_move as ( select min(sm_id) as id, sum(value) as value, al.dp as date, al.curr_year as year, al.curr_month as month, al.curr_day as day, al.curr_day_diff as day_diff, al.curr_day_diff1 as day_diff1, al.curr_day_diff2 as day_diff2, al.location_id as location_id, al.picking_id as picking_id, al.company_id as company_id, al.location_dest_id as location_dest_id, al.product_qty, al.out_qty as product_qty_out, al.in_qty as product_qty_in, al.address_id as partner_id, al.product_id as product_id, al.state as state , al.product_uom as product_uom, al.categ_id as categ_id, coalesce(al.type, 'other') as type, al.stock_journal as stock_journal FROM (SELECT
sum(sm.product_qty) END AS out_qty,
sum(sm.product_qty * pu.factor) ELSE 0.0 END AS out_qty,
def init(self, cr): tools.drop_view_if_exists(cr, 'report_stock_move') cr.execute(""" create or replace view report_stock_move as ( select min(sm_id) as id, sum(value) as value, al.dp as date, al.curr_year as year, al.curr_month as month, al.curr_day as day, al.curr_day_diff as day_diff, al.curr_day_diff1 as day_diff1, al.curr_day_diff2 as day_diff2, al.location_id as location_id, al.picking_id as picking_id, al.company_id as company_id, al.location_dest_id as location_dest_id, al.product_qty, al.out_qty as product_qty_out, al.in_qty as product_qty_in, al.address_id as partner_id, al.product_id as product_id, al.state as state , al.product_uom as product_uom, al.categ_id as categ_id, coalesce(al.type, 'other') as type, al.stock_journal as stock_journal FROM (SELECT
sum(sm.product_qty) END AS in_qty,
sum(sm.product_qty * pu.factor) ELSE 0.0 END AS in_qty, CASE WHEN sp.type in ('out','delivery') THEN sum(sm.product_qty * pu.factor) * pt.standard_price ELSE 0.0 END AS out_value, CASE WHEN sp.type in ('in') THEN sum(sm.product_qty * pu.factor) * pt.standard_price ELSE 0.0 END AS in_value,
def init(self, cr): tools.drop_view_if_exists(cr, 'report_stock_move') cr.execute(""" create or replace view report_stock_move as ( select min(sm_id) as id, sum(value) as value, al.dp as date, al.curr_year as year, al.curr_month as month, al.curr_day as day, al.curr_day_diff as day_diff, al.curr_day_diff1 as day_diff1, al.curr_day_diff2 as day_diff2, al.location_id as location_id, al.picking_id as picking_id, al.company_id as company_id, al.location_dest_id as location_dest_id, al.product_qty, al.out_qty as product_qty_out, al.in_qty as product_qty_in, al.address_id as partner_id, al.product_id as product_id, al.state as state , al.product_uom as product_uom, al.categ_id as categ_id, coalesce(al.type, 'other') as type, al.stock_journal as stock_journal FROM (SELECT
sum(sm.product_qty) as product_qty , (pt.standard_price *pu.factor* sum(sm.product_qty)) as value,
sum(sm.product_qty) as product_qty,
def init(self, cr): tools.drop_view_if_exists(cr, 'report_stock_move') cr.execute(""" create or replace view report_stock_move as ( select min(sm_id) as id, sum(value) as value, al.dp as date, al.curr_year as year, al.curr_month as month, al.curr_day as day, al.curr_day_diff as day_diff, al.curr_day_diff1 as day_diff1, al.curr_day_diff2 as day_diff2, al.location_id as location_id, al.picking_id as picking_id, al.company_id as company_id, al.location_dest_id as location_dest_id, al.product_qty, al.out_qty as product_qty_out, al.in_qty as product_qty_in, al.address_id as partner_id, al.product_id as product_id, al.state as state , al.product_uom as product_uom, al.categ_id as categ_id, coalesce(al.type, 'other') as type, al.stock_journal as stock_journal FROM (SELECT
sp.stock_journal_id as stock_journal from
sp.stock_journal_id AS stock_journal FROM
def init(self, cr): tools.drop_view_if_exists(cr, 'report_stock_move') cr.execute(""" create or replace view report_stock_move as ( select min(sm_id) as id, sum(value) as value, al.dp as date, al.curr_year as year, al.curr_month as month, al.curr_day as day, al.curr_day_diff as day_diff, al.curr_day_diff1 as day_diff1, al.curr_day_diff2 as day_diff2, al.location_id as location_id, al.picking_id as picking_id, al.company_id as company_id, al.location_dest_id as location_dest_id, al.product_qty, al.out_qty as product_qty_out, al.in_qty as product_qty_in, al.address_id as partner_id, al.product_id as product_id, al.state as state , al.product_uom as product_uom, al.categ_id as categ_id, coalesce(al.type, 'other') as type, al.stock_journal as stock_journal FROM (SELECT
left join stock_picking sp on (sm.picking_id=sp.id) left join product_product pp on (sm.product_id=pp.id) left join product_uom pu on (sm.product_uom=pu.id) left join product_template pt on (pp.product_tmpl_id=pt.id) left join stock_location sl on (sm.location_id = sl.id) group by
LEFT JOIN stock_picking sp ON (sm.picking_id=sp.id) LEFT JOIN product_product pp ON (sm.product_id=pp.id) LEFT JOIN product_uom pu ON (sm.product_uom=pu.id) LEFT JOIN product_template pt ON (pp.product_tmpl_id=pt.id) LEFT JOIN stock_location sl ON (sm.location_id = sl.id) GROUP BY
def init(self, cr): tools.drop_view_if_exists(cr, 'report_stock_move') cr.execute(""" create or replace view report_stock_move as ( select min(sm_id) as id, sum(value) as value, al.dp as date, al.curr_year as year, al.curr_month as month, al.curr_day as day, al.curr_day_diff as day_diff, al.curr_day_diff1 as day_diff1, al.curr_day_diff2 as day_diff2, al.location_id as location_id, al.picking_id as picking_id, al.company_id as company_id, al.location_dest_id as location_dest_id, al.product_qty, al.out_qty as product_qty_out, al.in_qty as product_qty_in, al.address_id as partner_id, al.product_id as product_id, al.state as state , al.product_uom as product_uom, al.categ_id as categ_id, coalesce(al.type, 'other') as type, al.stock_journal as stock_journal FROM (SELECT
as al group by
AS al GROUP BY
def init(self, cr): tools.drop_view_if_exists(cr, 'report_stock_move') cr.execute(""" create or replace view report_stock_move as ( select min(sm_id) as id, sum(value) as value, al.dp as date, al.curr_year as year, al.curr_month as month, al.curr_day as day, al.curr_day_diff as day_diff, al.curr_day_diff1 as day_diff1, al.curr_day_diff2 as day_diff2, al.location_id as location_id, al.picking_id as picking_id, al.company_id as company_id, al.location_dest_id as location_dest_id, al.product_qty, al.out_qty as product_qty_out, al.in_qty as product_qty_in, al.address_id as partner_id, al.product_id as product_id, al.state as state , al.product_uom as product_uom, al.categ_id as categ_id, coalesce(al.type, 'other') as type, al.stock_journal as stock_journal FROM (SELECT
create or replace view report_stock_inventory as ( (select
CREATE OR REPLACE view report_stock_inventory AS ( (SELECT
def init(self, cr): tools.drop_view_if_exists(cr, 'report_stock_inventory') cr.execute("""