Skip to content

Commit

Permalink
Introduce a 30 seconds buffer between 2 batches of imports to avoid t…
Browse files Browse the repository at this point in the history
…o miss records.

The 'created_at' date on Magento is set at the beginning of a transaction, and
the transaction may be committed seconds after. The buffer will allow to
include these records in the imports.
  • Loading branch information
guewen committed May 26, 2014
1 parent a4b2648 commit b3e3910
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 10 deletions.
54 changes: 45 additions & 9 deletions magentoerpconnect/magento_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
##############################################################################

import logging
from datetime import datetime
from datetime import datetime, timedelta
from openerp.osv import fields, orm
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
import openerp.addons.connector as connector
Expand All @@ -44,6 +44,8 @@

_logger = logging.getLogger(__name__)

IMPORT_DELTA_BUFFER = 30 # seconds


class magento_backend(orm.Model):
_name = 'magento.backend'
Expand Down Expand Up @@ -231,7 +233,7 @@ def _import_from_date(self, cr, uid, ids, model, from_date_field, context=None):
ids = [ids]
self.check_magento_structure(cr, uid, ids, context=context)
session = ConnectorSession(cr, uid, context=context)
import_start_time = datetime.now().strftime(DEFAULT_SERVER_DATETIME_FORMAT)
import_start_time = datetime.now()
for backend in self.browse(cr, uid, ids, context=context):
from_date = getattr(backend, from_date_field)
if from_date:
Expand All @@ -241,8 +243,18 @@ def _import_from_date(self, cr, uid, ids, model, from_date_field, context=None):
from_date = None
import_batch.delay(session, model,
backend.id, filters={'from_date': from_date})
self.write(cr, uid, ids,
{from_date_field: import_start_time})
# Records from Magento are imported based on their `created_at`
# date. This date is set on Magento at the beginning of a
# transaction, so if the import is run between the beginning and
# the end of a transaction, the import of a record may be
# missed. That's why we add a small buffer back in time where
# the eventually missed records will be retrieved. This also
# means that we'll have jobs that import twice the same records,
# but this is not a big deal because they will be skipped when
# the last `sync_date` is the same.
next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER)
next_time = next_time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
self.write(cr, uid, ids, {from_date_field: next_time}, context=context)

def import_product_categories(self, cr, uid, ids, context=None):
self._import_from_date(cr, uid, ids, 'magento.product.category',
Expand Down Expand Up @@ -345,7 +357,7 @@ def import_partners(self, cr, uid, ids, context=None):
if not hasattr(ids, '__iter__'):
ids = [ids]
session = ConnectorSession(cr, uid, context=context)
import_start_time = datetime.now().strftime(DEFAULT_SERVER_DATETIME_FORMAT)
import_start_time = datetime.now()
for website in self.browse(cr, uid, ids, context=context):
backend_id = website.backend_id.id
if website.import_partners_from_date:
Expand All @@ -358,8 +370,19 @@ def import_partners(self, cr, uid, ids, context=None):
session, 'magento.res.partner', backend_id,
{'magento_website_id': website.magento_id,
'from_date': from_date})
self.write(cr, uid, ids,
{'import_partners_from_date': import_start_time})
# Records from Magento are imported based on their `created_at`
# date. This date is set on Magento at the beginning of a
# transaction, so if the import is run between the beginning and
# the end of a transaction, the import of a record may be
# missed. That's why we add a small buffer back in time where
# the eventually missed records will be retrieved. This also
# means that we'll have jobs that import twice the same records,
# but this is not a big deal because they will be skipped when
# the last `sync_date` is the same.
next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER)
next_time = next_time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
self.write(cr, uid, ids, {'import_partners_from_date': next_time},
context=context)
return True


Expand Down Expand Up @@ -502,7 +525,7 @@ class magento_storeview(orm.Model):

def import_sale_orders(self, cr, uid, ids, context=None):
session = ConnectorSession(cr, uid, context=context)
import_start_time = datetime.now().strftime(DEFAULT_SERVER_DATETIME_FORMAT)
import_start_time = datetime.now()
for storeview in self.browse(cr, uid, ids, context=context):
if storeview.no_sales_order_sync:
_logger.debug("The storeview '%s' is active in Magento "
Expand All @@ -523,7 +546,20 @@ def import_sale_orders(self, cr, uid, ids, context=None):
{'magento_storeview_id': storeview.magento_id,
'from_date': from_date},
priority=1) # executed as soon as possible
self.write(cr, uid, ids, {'import_orders_from_date': import_start_time})
# Records from Magento are imported based on their `created_at`
# date. This date is set on Magento at the beginning of a
# transaction, so if the import is run between the beginning and
# the end of a transaction, the import of a record may be
# missed. That's why we add a small buffer back in time where
# the eventually missed records will be retrieved. This also
# means that we'll have jobs that import twice the same records,
# but this is not a big deal because the sales orders will be
# imported the first time and the jobs will be skipped on the
# subsequent imports
next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER)
next_time = next_time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
self.write(cr, uid, ids, {'import_orders_from_date': next_time},
context=context)
return True


Expand Down
18 changes: 17 additions & 1 deletion magentoerpconnect/sale.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,9 @@
import logging
import xmlrpclib
from datetime import datetime, timedelta
from openerp.osv import fields, orm
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, orm
from openerp.tools.translate import _
from openerp.addons.connector.connector import ConnectorUnit
from openerp.addons.connector.exception import (NothingToDoJob,
FailedJobError,
Expand Down Expand Up @@ -373,6 +374,21 @@ def mapper(self):
self._mapper = self.environment.get_connector_unit(SaleOrderImportMapper)
return self._mapper

def _must_skip(self):
""" Hook called right after we read the data from the backend.
If the method returns a message giving a reason for the
skipping, the import will be interrupted and the message
recorded in the job (if the import is called directly by the
job, not by dependencies).
If it returns None, the import will continue normally.
:returns: None | str | unicode
"""
if self.binder.to_openerp(self.magento_id):
return _('Already imported')

def _clean_magento_items(self, resource):
"""
Method that clean the sale order line given by magento before importing it
Expand Down

0 comments on commit b3e3910

Please sign in to comment.