Class: Edi::Amazon::FeedSubmissionResultProcessor
- Inherits:
-
BaseEdiService
- Object
- BaseService
- BaseEdiService
- Edi::Amazon::FeedSubmissionResultProcessor
- Defined in:
- app/services/edi/amazon/feed_submission_result_processor.rb
Constant Summary
Constants included from Edi::AddressAbbreviator
Edi::AddressAbbreviator::MAX_LENGTH
Instance Attribute Summary
Attributes inherited from BaseEdiService
Instance Method Summary collapse
- #get_errors_from_feed_result(upload) ⇒ Object
- #instantiate_transporter(transporter, transporter_profile = nil) ⇒ Object
- #process(edi_communication_logs = nil) ⇒ Object
Methods inherited from BaseEdiService
#duplicate_po_already_notified?, #initialize, #mark_duplicate_po_as_notified, #report_order_creation_issues, #safe_process_edi_communication_log
Methods included from Edi::AddressAbbreviator
#abbreviate_street, #collect_street_originals, #record_address_abbreviation_notes
Methods inherited from BaseService
#initialize, #log_debug, #log_error, #log_info, #log_warning, #logger, #options, #tagged_logger
Constructor Details
This class inherits a constructor from Edi::BaseEdiService
Instance Method Details
#get_errors_from_feed_result(upload) ⇒ Object
81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 |
# File 'app/services/edi/amazon/feed_submission_result_processor.rb', line 81 def get_errors_from_feed_result(upload) errs = [] if upload.category == 'feed_document_result_xml' xml = Nokogiri::XML(upload.&.data) num_processed = xml.xpath('//AmazonEnvelope/Message/ProcessingReport/ProcessingSummary/MessagesProcessed').text.to_i num_errors = xml.xpath('//AmazonEnvelope/Message/ProcessingReport/ProcessingSummary/MessagesWithError').text.to_i if num_processed > 0 && num_errors > 0 errs += xml.xpath('//AmazonEnvelope/Message/ProcessingReport/Result').select{|r| r.xpath('ResultCode').text == 'Error'}.map{|r| "SKU: #{r.xpath('AdditionalInfo/SKU').text}, error code: #{r.xpath('ResultMessageCode').text}: #{r.xpath('ResultDescription').text}"} end else # see: https://github.com/amzn/selling-partner-api-models/blob/main/schemas/feeds/listings-feed-processing-report-schema-v2.json # see: https://github.com/amzn/selling-partner-api-models/blob/main/schemas/feeds/listings-feed-processing-report-schema-v2.example.json hsh = JSON.parse(upload.&.data).with_indifferent_access num_processed = hsh&.dig(:summary)&.dig(:messagesProcessed).to_i num_errors = hsh&.dig(:summary)&.dig(:messagesInvalid).to_i if num_processed > 0 && num_errors > 0 errs += hsh&.dig(:issues).select{|issue| issue.dig(:severity)&.upcase == 'ERROR'}.map{|issue| "Message: #{issue.dig(:messageId)}, error code: #{issue.dig(:code)}: #{issue.dig(:message)}"} end end errs end |
#instantiate_transporter(transporter, transporter_profile = nil) ⇒ Object
72 73 74 75 76 77 78 79 |
# File 'app/services/edi/amazon/feed_submission_result_processor.rb', line 72 def instantiate_transporter(transporter, transporter_profile = nil) case transporter when :http_seller_api Transport::HttpSellerApiConnection.new({ profile: transporter_profile }) else raise "Unknown transporter: #{transporter}" end end |
#process(edi_communication_logs = nil) ⇒ Object
5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
# File 'app/services/edi/amazon/feed_submission_result_processor.rb', line 5 def process(edi_communication_logs = nil) edi_communication_logs ||= EdiCommunicationLog .where(state: 'processing') .where(partner: orchestrator.partner) .where.not(transaction_id: nil) .order(:created_at) [edi_communication_logs].flatten.each do |ecl| logger.info "EdiCommunicationLog:#{ecl.id} - Retrieving API transaction result from #{orchestrator.partner} for transaction id: #{ecl.transaction_id}" begin transport = instantiate_transporter(orchestrator.transporter, orchestrator.transporter_profile) res = transport.send_data('', "#{orchestrator.}/feeds/#{ecl.transaction_id}", 'GET') ecl.update(notes: "HTTP CODE: #{res[:http_result]&.code}, HTTP BODY: #{res[:http_result]&.body}, Timestamp: #{Time.current.to_datetime.to_fs(:crm_default)}") logger.info "Result: HTTP CODE: #{res[:http_result]&.code}, HTTP BODY: #{res[:http_result]&.body}" if res[:success] && (data = res[:http_result]&.body.to_s).present? json_hash = JSON.parse(data).with_indifferent_access status = json_hash.dig('processingStatus') timeout_time = (ecl.transmit_datetime || Time.current) + orchestrator.failure_timeout_in_minutes.minutes if status == 'DONE' result_feed_document_id = json_hash.dig('resultFeedDocumentId') doc_res = transport.send_data('', "#{orchestrator.}/documents/#{result_feed_document_id}", 'GET') if doc_res[:success] && (data = doc_res[:http_result]&.body.to_s).present? json_hash = JSON.parse(data).with_indifferent_access # {"compressionAlgorithm"=>"GZIP", # "feedDocumentId"=>"amzn1.tortuga.4.na.4e5353ab-eab8-421f-887c-6fa58cee4e41.TO8T33D96XVRM", # "url"=> # "https://tortuga-prod-na.s3-external-1.amazonaws.com/2a374b13-3293-4698-9271-cff909891acf.amzn1.tortuga.4.na.TO8T33D96XVRM?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20241106T001459Z&X-Amz-SignedHeaders=host&X-Amz-Expires=300&X-Amz-Credential=AKIA5U6MO6RACOWQRBEF%2F20241106%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Signature=a98b0c5fb811bc7a4dfe7925ef722fa187a1c8fd9157ef0cc435cab25bbdc642"} upload = nil unless json_hash[:compressionAlgorithm] == 'GZIP' if ecl.data_type == 'xml' upload = Upload.uploadify_from_url(file_name: "#{result_feed_document_id.split('.').last}.xml", url: json_hash[:url], category: 'feed_document_result_xml') else upload = Upload.uploadify_from_url(file_name: "#{result_feed_document_id.split('.').last}.json", url: json_hash[:url], category: 'feed_document_result_json') end else # <HERE IS WHERE I CAN TRY TO MAYBE USE https://api.rubyonrails.org/classes/ActiveSupport/Gzip.html#method-c-decompress BUT MAYBE THERE"S A BETTER WAY TO DO IT VIA THE DOWN CONFIG> gzip_upload = Upload.uploadify_from_url(file_name: "#{result_feed_document_id.split('.').last}.gzip", url: json_hash[:url], category: 'feed_document_result_gzip') res_data = ActiveSupport::Gzip.decompress(gzip_upload.&.data) if ecl.data_type == 'xml' upload = Upload.uploadify_from_data(file_name: "#{result_feed_document_id.split('.').last}.xml", data: res_data, category: 'feed_document_result_xml') else upload = Upload.uploadify_from_data(file_name: "#{result_feed_document_id.split('.').last}.json", data: res_data, category: 'feed_document_result_json') end end if upload ecl.uploads << upload if (errors = get_errors_from_feed_result(upload)).empty? ecl.complete! else ecl.update(notes: [ecl.notes, "There were #{errors.size} errors reported in feed result document: #{errors.join(', ')}"].compact.join(' | ')) ecl.error end end else ecl.update(notes: [ecl.notes, "Could not get feed result document #{result_feed_document_id}"].compact.join(' | ')) ecl.error end elsif %w[CANCELLED FATAL].include?(status) || Time.current > timeout_time ecl.update(notes: [ecl.notes, "Timed out after #{orchestrator.failure_timeout_in_minutes} minutes"].compact.join(' | ')) if Time.current > timeout_time ecl.error! end else ecl.error end end end end |