[ADD] v8 api

This commit is contained in:
Holger Brunn 2017-04-02 17:38:28 +02:00
parent 1f486da4cd
commit 4d24e983c1

View File

@ -22,20 +22,20 @@ import pytz
import logging import logging
from datetime import datetime from datetime import datetime
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from openerp.osv.orm import Model from openerp import api, fields, models
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
class IrAttachment(Model): class IrAttachment(models.Model):
_inherit = 'ir.attachment' _inherit = 'ir.attachment'
def _attachments_to_filesystem_init(self, cr, uid, context=None): @api.model
def _attachments_to_filesystem_init(self):
"""Set up config parameter and cron job""" """Set up config parameter and cron job"""
module_name = __name__.split('.')[-3] module_name = __name__.split('.')[-3]
ir_model_data = self.pool['ir.model.data'] ir_model_data = self.env['ir.model.data']
ir_cron = self.pool['ir.cron'] location = self.env['ir.config_parameter'].get_param(
location = self.pool['ir.config_parameter'].get_param( 'ir_attachment.location'
cr, uid, 'ir_attachment.location') )
if location: if location:
# we assume the user knows what she's doing. Might be file:, but # we assume the user knows what she's doing. Might be file:, but
# also whatever other scheme shouldn't matter. We want to bring # also whatever other scheme shouldn't matter. We want to bring
@ -43,22 +43,23 @@ class IrAttachment(Model):
pass pass
else: else:
ir_model_data._update( ir_model_data._update(
cr, uid, 'ir.config_parameter', module_name, 'ir.config_parameter', module_name,
{ {
'key': 'ir_attachment.location', 'key': 'ir_attachment.location',
'value': 'file', 'value': 'file',
}, },
xml_id='config_parameter_ir_attachment_location', xml_id='config_parameter_ir_attachment_location'
context=context) )
# synchronous behavior # synchronous behavior
if self.pool['ir.config_parameter'].get_param( if self.env['ir.config_parameter'].get_param(
cr, uid, 'attachments_to_filesystem.move_during_init'): 'attachments_to_filesystem.move_during_init'
self._attachments_to_filesystem_cron(cr, uid, context, limit=None) ):
self._attachments_to_filesystem_cron(limit=None)
return return
# otherwise, configure our cronjob to run next night # otherwise, configure our cronjob to run next night
user = self.pool['res.users'].browse(cr, uid, uid, context=context) user = self.env.user
next_night = datetime.now() + relativedelta( next_night = datetime.now() + relativedelta(
hour=01, minute=42, second=0) hour=01, minute=42, second=0)
user_tz = user.tz or 'UTC' user_tz = user.tz or 'UTC'
@ -66,37 +67,29 @@ class IrAttachment(Model):
pytz.utc).replace(tzinfo=None) pytz.utc).replace(tzinfo=None)
if next_night < datetime.now(): if next_night < datetime.now():
next_night += relativedelta(days=1) next_night += relativedelta(days=1)
ir_cron.write( self.env.ref('%s.cron_move_attachments' % module_name).write({
cr, uid, 'nextcall': fields.Datetime.to_string(next_night),
[ 'doall': True,
ir_model_data.get_object_reference( 'interval_type': 'days',
cr, uid, module_name, 'cron_move_attachments')[1], 'interval_number': 1,
], })
{
'nextcall':
next_night.strftime(DEFAULT_SERVER_DATETIME_FORMAT),
'doall': True,
'interval_type': 'days',
'interval_number': 1,
},
context=context)
def _attachments_to_filesystem_cron(self, cr, uid, context=None, @api.model
limit=10000): def _attachments_to_filesystem_cron(self, limit=10000):
"""Do the actual moving""" """Do the actual moving"""
limit = int( limit = int(
self.pool['ir.config_parameter'].get_param( self.env['ir.config_parameter'].get_param(
cr, uid, 'attachments_to_filesystem.limit', '0')) or limit 'attachments_to_filesystem.limit', '0')) or limit
ir_attachment = self.pool['ir.attachment'] ir_attachment = self.env['ir.attachment']
attachment_ids = ir_attachment.search( attachment_ids = ir_attachment.search(
cr, uid, [('db_datas', '!=', False)], limit=limit, context=context) [('db_datas', '!=', False)], limit=limit)
logging.info('moving %d attachments to filestore', len(attachment_ids)) logging.info('moving %d attachments to filestore', len(attachment_ids))
# attachments can be big, so we read every attachment on its own # attachments can be big, so we read every attachment on its own
for counter, attachment_id in enumerate(attachment_ids, start=1): for counter, attachment_id in enumerate(attachment_ids, start=1):
attachment_data = ir_attachment.read( attachment_data = ir_attachment.read(
cr, uid, [attachment_id], ['datas', 'res_model'], [attachment_id], ['datas', 'res_model']
context=context)[0] )[0]
if attachment_data['res_model'] and not self.pool.get( if attachment_data['res_model'] and not self.env.registry.get(
attachment_data['res_model']): attachment_data['res_model']):
logging.warning( logging.warning(
'not moving attachment %d because it links to unknown ' 'not moving attachment %d because it links to unknown '
@ -104,12 +97,11 @@ class IrAttachment(Model):
continue continue
try: try:
ir_attachment.write( ir_attachment.write(
cr, uid, [attachment_id], [attachment_id],
{ {
'datas': attachment_data['datas'], 'datas': attachment_data['datas'],
'db_datas': False, 'db_datas': False,
}, })
context=context)
except Exception: except Exception:
logging.exception('Error moving attachment #%d', attachment_id) logging.exception('Error moving attachment #%d', attachment_id)
if not counter % (len(attachment_ids) / 100 or limit): if not counter % (len(attachment_ids) / 100 or limit):