Skip to content

Commit

Permalink
Merge pull request #27 from ITVaan/dev_item_status_change_while_proce…
Browse files Browse the repository at this point in the history
…ssing

Added processing of the case where item status was changed while bot …
  • Loading branch information
IrynaPomazan authored May 25, 2017
2 parents 7c22ad8 + 85a3738 commit 915ba7d
Show file tree
Hide file tree
Showing 3 changed files with 163 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,4 @@
DATABRIDGE_DOC_SERVICE_CONN_ERROR = 'edr_databridge_doc_service_conn_error'
DATABRIDGE_PROXY_SERVER_CONN_ERROR = 'edr_databridge_proxy_server_conn_error'
DATABRIDGE_422_UPLOAD_TO_TENDER = 'edr_databridge_422_upload_to_tender'
DATABRIDGE_ITEM_STATUS_CHANGED_WHILE_PROCESSING = 'edr_databridge_item_status_changed_while_processing'
56 changes: 42 additions & 14 deletions openprocurement/bot/identification/databridge/upload_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@
DATABRIDGE_SUCCESS_UPLOAD_TO_DOC_SERVICE, DATABRIDGE_UNSUCCESS_UPLOAD_TO_DOC_SERVICE,
DATABRIDGE_UNSUCCESS_RETRY_UPLOAD_TO_DOC_SERVICE, DATABRIDGE_SUCCESS_UPLOAD_TO_TENDER,
DATABRIDGE_UNSUCCESS_UPLOAD_TO_TENDER, DATABRIDGE_UNSUCCESS_RETRY_UPLOAD_TO_TENDER, DATABRIDGE_START_UPLOAD,
DATABRIDGE_422_UPLOAD_TO_TENDER
)
DATABRIDGE_422_UPLOAD_TO_TENDER, DATABRIDGE_ITEM_STATUS_CHANGED_WHILE_PROCESSING)
from openprocurement.bot.identification.databridge.constants import file_name

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -53,7 +52,7 @@ def upload_to_doc_service(self):
object to upload_file_to_tender, otherwise put Data to retry_upload_file_queue."""
while not self.exit:
try:
tender_data = self.upload_to_doc_service_queue.get()
tender_data = self.upload_to_doc_service_queue.peek()
document_id = tender_data.file_content.get('meta', {}).get('id')
except LoopExit:
gevent.sleep(0)
Expand All @@ -70,11 +69,13 @@ def upload_to_doc_service(self):
"ITEM_ID": tender_data.item_id, "DOCUMENT_ID": document_id}))
logger.exception(e)
self.retry_upload_to_doc_service_queue.put(tender_data)
self.upload_to_doc_service_queue.get()
else:
if response.status_code == 200:
data = Data(tender_data.tender_id, tender_data.item_id, tender_data.code,
tender_data.item_name, tender_data.edr_ids, dict(response.json(), **{'meta': {'id': document_id}}))
self.upload_to_tender_queue.put(data)
self.upload_to_doc_service_queue.get()
logger.info('Successfully uploaded file to doc service {} {} {} {}'.format(
tender_data.tender_id, tender_data.item_name, tender_data.item_id, document_id),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_SUCCESS_UPLOAD_TO_DOC_SERVICE},
Expand All @@ -86,14 +87,15 @@ def upload_to_doc_service(self):
params={"TENDER_ID": tender_data.tender_id,
"ITEM_ID": tender_data.item_id, "DOCUMENT_ID": document_id}))
self.retry_upload_to_doc_service_queue.put(tender_data)
self.upload_to_doc_service_queue.get()
gevent.sleep(0)

def retry_upload_to_doc_service(self):
"""Get data from retry_upload_to_doc_service_queue; If upload were successful put Data obj to
upload_to_tender_queue, otherwise put Data obj back to retry_upload_file_queue"""
while not self.exit:
try:
tender_data = self.retry_upload_to_doc_service_queue.get()
tender_data = self.retry_upload_to_doc_service_queue.peek()
document_id = tender_data.file_content.get('meta', {}).get('id')
except LoopExit:
gevent.sleep(0)
Expand All @@ -110,12 +112,15 @@ def retry_upload_to_doc_service(self):
params={"TENDER_ID": tender_data.tender_id,
"ITEM_ID": tender_data.item_id, "DOCUMENT_ID": document_id}))
logger.exception(e)
self.retry_upload_to_doc_service_queue.get()
self.update_processing_items(tender_data.tender_id, tender_data.item_id)
raise e
else:
if response.status_code == 200:
data = Data(tender_data.tender_id, tender_data.item_id, tender_data.code,
tender_data.item_name, tender_data.edr_ids, dict(response.json(), **{'meta': {'id': document_id}}))
self.upload_to_tender_queue.put(data)
self.retry_upload_to_doc_service_queue.get()
logger.info('Successfully uploaded file to doc service {} {} {} {}'.format(
tender_data.tender_id, tender_data.item_name, tender_data.item_id, document_id),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_SUCCESS_UPLOAD_TO_DOC_SERVICE},
Expand All @@ -126,7 +131,6 @@ def retry_upload_to_doc_service(self):
extra=journal_context({"MESSAGE_ID": DATABRIDGE_UNSUCCESS_RETRY_UPLOAD_TO_DOC_SERVICE},
params={"TENDER_ID": tender_data.tender_id,
"ITEM_ID": tender_data.item_id, "DOCUMENT_ID": document_id}))
self.retry_upload_to_doc_service_queue.put(tender_data)
gevent.sleep(0)

@retry(stop_max_attempt_number=5, wait_exponential_multiplier=1000)
Expand All @@ -141,7 +145,7 @@ def upload_to_tender(self):
award/qualification from processing_items."""
while not self.exit:
try:
tender_data = self.upload_to_tender_queue.get()
tender_data = self.upload_to_tender_queue.peek()
except LoopExit:
gevent.sleep(0)
continue
Expand All @@ -156,36 +160,52 @@ def upload_to_tender(self):
tender_data.item_id))
except ResourceError as re:
if re.status_int == 422: # WARNING and don't retry
logger.warning("Accept 422, skip tender {} {} {} {}.".format(tender_data.tender_id,
logger.warning("Accept 422, skip tender {} {} {} {}. Message: {}".format(tender_data.tender_id,
tender_data.item_name,
tender_data.item_id, document_id),
tender_data.item_id, document_id, re.msg),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_422_UPLOAD_TO_TENDER},
{"TENDER_ID": tender_data.tender_id, "DOCUMENT_ID": document_id}))
self.update_processing_items(tender_data.tender_id, tender_data.item_id)
self.upload_to_tender_queue.get()
continue
elif re.status_int == 403:
logger.warning("Accept 403 while uploading to tender {} {} {} doc_id: {}. Message {}".format(
tender_data.tender_id, tender_data.item_name, tender_data.item_id, document_id, re.msg),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_ITEM_STATUS_CHANGED_WHILE_PROCESSING},
{"TENDER_ID": tender_data.tender_id, "DOCUMENT_ID": document_id})
)
self.update_processing_items(tender_data.tender_id, tender_data.item_id)
self.upload_to_tender_queue.get()
else:
logger.warning('Exception while retry uploading file to tender {} {} {} {}. Message: {}'.format(
tender_data.tender_id, tender_data.item_name, tender_data.item_id, document_id, re.message),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_UNSUCCESS_RETRY_UPLOAD_TO_TENDER},
params={"TENDER_ID": tender_data.tender_id, "DOCUMENT_ID": document_id}))
self.retry_upload_to_tender_queue.put(tender_data)
self.upload_to_tender_queue.get()
except Exception as e:
logger.info('Exception while uploading file to tender {} {} {} {}. Message: {}'.format(
tender_data.tender_id, tender_data.item_name, tender_data.item_id, document_id, e.message),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_UNSUCCESS_UPLOAD_TO_TENDER},
params={"TENDER_ID": tender_data.tender_id, "DOCUMENT_ID": document_id}))
self.retry_upload_to_tender_queue.put(tender_data)
self.upload_to_tender_queue.get()
else:
logger.info('Successfully uploaded file to tender {} {} {} {}'.format(
tender_data.tender_id, tender_data.item_name, tender_data.item_id, document_id),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_SUCCESS_UPLOAD_TO_TENDER},
params={"TENDER_ID": tender_data.tender_id, "DOCUMENT_ID": document_id}))
# delete current tender after successful upload file (to avoid reloading file)
self.update_processing_items(tender_data.tender_id, tender_data.item_id)
self.upload_to_tender_queue.get()
gevent.sleep(0)

def retry_upload_to_tender(self):
"""Get data from retry_upload_to_tender_queue; If upload was unsuccessful put Data obj back to
retry_upload_to_tender_queue"""
while not self.exit:
try:
tender_data = self.retry_upload_to_tender_queue.get()
tender_data = self.retry_upload_to_tender_queue.peek()
except LoopExit:
gevent.sleep(0)
continue
Expand All @@ -195,33 +215,41 @@ def retry_upload_to_tender(self):
self.client_upload_to_tender(tender_data)
except ResourceError as re:
if re.status_int == 422: # WARNING and don't retry
logger.warning("Accept 422, skip tender {} {} {} {}.".format(tender_data.tender_id, tender_data.item_name,
tender_data.item_id, document_id),
logger.warning("Accept 422, skip tender {} {} {} {}. Message {}".format(tender_data.tender_id, tender_data.item_name,
tender_data.item_id, document_id, re.msg),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_422_UPLOAD_TO_TENDER},
{"TENDER_ID": tender_data.tender_id, "DOCUMENT_ID": document_id}))
self.update_processing_items(tender_data.tender_id, tender_data.item_id)
self.retry_upload_to_tender_queue.get()
continue
elif re.status_int == 403:
logger.warning("Accept 403 while uploading to tender {} {} {} doc_id: {}. Message {}".format(
tender_data.tender_id, tender_data.item_name, tender_data.item_id, document_id, re.msg),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_ITEM_STATUS_CHANGED_WHILE_PROCESSING},
{"TENDER_ID": tender_data.tender_id, "DOCUMENT_ID": document_id})
)
self.update_processing_items(tender_data.tender_id, tender_data.item_id)
self.retry_upload_to_tender_queue.get()
continue
else:
logger.info('Exception while retry uploading file to tender {} {} {} {}. Message: {}'.format(
tender_data.tender_id, tender_data.item_name, tender_data.item_id, document_id, re.message),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_UNSUCCESS_RETRY_UPLOAD_TO_TENDER},
params={"TENDER_ID": tender_data.tender_id, "DOCUMENT_ID": document_id}))
logger.exception(re)
self.retry_upload_to_tender_queue.put(tender_data)
except Exception as e:
logger.info('Exception while retry uploading file to tender {} {} {} {}. Message: {}'.format(
tender_data.tender_id, tender_data.item_name, tender_data.item_id, document_id, e.message),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_UNSUCCESS_RETRY_UPLOAD_TO_TENDER},
params={"TENDER_ID": tender_data.tender_id, "DOCUMENT_ID": document_id}))
logger.exception(e)
self.retry_upload_to_tender_queue.put(tender_data)
else:
logger.info('Successfully uploaded file to tender {} {} {} {} in retry'.format(
tender_data.tender_id, tender_data.item_name, tender_data.item_id, document_id),
extra=journal_context({"MESSAGE_ID": DATABRIDGE_SUCCESS_UPLOAD_TO_TENDER},
params={"TENDER_ID": tender_data.tender_id, "DOCUMENT_ID": document_id}))
# delete current tender after successful upload file (to avoid reloading file)
self.update_processing_items(tender_data.tender_id, tender_data.item_id)
self.retry_upload_to_tender_queue.get()
gevent.sleep(0)

@retry(stop_max_attempt_number=5, wait_exponential_multiplier=1000)
Expand Down
Loading

0 comments on commit 915ba7d

Please sign in to comment.