Class: DataImport

Inherits:
ApplicationRecord show all
Defined in:
app/models/data_import.rb

Overview

== Schema Information

Table name: data_imports
Database name: primary

id :integer not null, primary key
activity_description :string(255)
activity_notes :text
activity_schedule_offset_days :integer
complete :boolean
default_affiliation :string
default_country_iso3 :string(3)
disable_address_correction :boolean default(FALSE)
do_not_auto_assign :boolean default(FALSE)
file_name :string
file_uid :string
first_row_is_header :boolean default(TRUE), not null
initial_state :string(255)
model_version :integer default(2), not null
name :string(255)
note :text
reception_type :string(255)
state :string
created_at :datetime
updated_at :datetime
activity_assigned_resource_id :integer
activity_type_id :integer
creator_id :integer
default_buying_group_id :integer
default_catalog_id :integer
default_primary_sales_rep_id :integer
default_profile_id :integer
reuse_data_import_id :integer
source_id :integer
tier2_program_pricing_id :integer
updater_id :integer

Indexes

data_imports_reuse_data_import_id_idx (reuse_data_import_id)

Foreign Keys

fk_rails_... (reuse_data_import_id => data_imports.id) ON DELETE => nullify

Defined Under Namespace

Classes: BaseParser, CsvParser, XlsxParser

Has many collapse

Belongs to collapse

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from ApplicationRecord

ransackable_associations, ransackable_attributes, ransackable_scopes, ransortable_attributes, #to_relation

Methods included from Models::EventPublishable

#publish_event

Class Method Details

.options_for_selectObject



83
84
85
# File 'app/models/data_import.rb', line 83

def self.options_for_select
  all.order(Arel.sql('COALESCE(updated_at,created_at) DESC')).limit(50).pluck(:name, :id)
end

Instance Method Details

#activity_resourceEmployee

Returns:

See Also:



60
# File 'app/models/data_import.rb', line 60

belongs_to :activity_resource, class_name: 'Employee', optional: true

#activity_typeActivityType



59
# File 'app/models/data_import.rb', line 59

belongs_to :activity_type, optional: true

#can_delete?Boolean

can only delete if not processed

Returns:

  • (Boolean)


166
167
168
# File 'app/models/data_import.rb', line 166

def can_delete?
  !data_import_rows.where.not(import_state: 0).exists?
end

#data_import_cellsActiveRecord::Relation<DataImportCell>

Returns:

See Also:



51
# File 'app/models/data_import.rb', line 51

has_many :data_import_cells, through: :data_import_columns

#data_import_columnsActiveRecord::Relation<DataImportColumn>

Returns:

See Also:



50
# File 'app/models/data_import.rb', line 50

has_many :data_import_columns, inverse_of: :data_import, dependent: :delete_all

#data_import_rowsActiveRecord::Relation<DataImportRow>

Returns:

See Also:



52
# File 'app/models/data_import.rb', line 52

has_many :data_import_rows, inverse_of: :data_import, dependent: :delete_all

#deep_dupObject



70
71
72
73
74
75
# File 'app/models/data_import.rb', line 70

def deep_dup
  deep_clone(
    include: :data_import_columns,
    except: %i[file_uid file_name reuse_data_import_id state]
  )
end

#default_buying_groupBuyingGroup



58
# File 'app/models/data_import.rb', line 58

belongs_to :default_buying_group, class_name: 'BuyingGroup', optional: true

#default_catalogCatalog

Returns:

See Also:



57
# File 'app/models/data_import.rb', line 57

belongs_to :default_catalog, class_name: 'Catalog', optional: true

#default_profileProfile

Returns:

See Also:



56
# File 'app/models/data_import.rb', line 56

belongs_to :default_profile, class_name: 'Profile', optional: true

#notes_data_setObject



125
126
127
# File 'app/models/data_import.rb', line 125

def notes_data_set
  DataDictionarySet.find_by(destination: 'notes')
end

#parserObject



129
130
131
# File 'app/models/data_import.rb', line 129

def parser
  @parser ||= DataImport::BaseParser.instantiate(file.path, file.name, first_row_is_header: first_row_is_header)
end

#populate_cellsObject



133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
# File 'app/models/data_import.rb', line 133

def populate_cells
  cells = []
  columns = {}
  # Retrieve a map of column index to data import column id
  columns = setup_header_row(parser.column_names)
  rows_processed = 0
  total_ops = parser.size + 1
  # Transaction globbing, makes inserts faster and all in or all fail
  DataImportRow.transaction do
    parser.each_with_index do |row,row_index|
      next if row_index.zero? && first_row_is_header
      Rails.logger.info "Processing row #{row_index}"
      if new_row = setup_value_row(row, row_index, columns)
        cells += new_row
        rows_processed += 1
      end
      if block_given?
        yield({ position: rows_processed, total: total_ops, message: "Splitting row #{rows_processed} into cells" })
      end
    end
    Rails.logger.info " ** Starting Inserts of #{cells.size} cells"
    if block_given?
      yield({ position: total_ops, total: total_ops, message: "Inserting #{cells.size} cells in bulk (might take a while)" })
    end
    require 'activerecord-import/base'
    require 'activerecord-import/active_record/adapters/postgresql_adapter'
    DataImportCell.import cells
    Rails.logger.info "Populate cells completed, total cells #{cells.size}, total rows: #{rows_processed}"
  end
  rows_processed
end

#process_import(row_ids = []) ⇒ Object



170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
# File 'app/models/data_import.rb', line 170

def process_import(row_ids = [])
  res = { requested: 0, queued: 0, processed: 0, mode: nil }
  rows = data_import_rows.queued.order(:row_index)
  rows = rows.where(id: row_ids) if row_ids.present?
  res[:requested] = rows.size
  res[:mode] = res[:requested] > 10 ? :queued : :direct
  rows.find_each do |dir|
    if dir.customer
      dir.complete!
    elsif res[:mode] == :queued
      res[:queued] += 1
      DataImportWorker.perform_async(dir.id)
    elsif res[:mode] == :direct
      res[:processed] += 1
      result = dir.import
    end
  end
  res
end

#resetObject



87
88
89
90
91
92
# File 'app/models/data_import.rb', line 87

def reset
  Rails.logger.info ' ** Purging and deleting rows and columns'
  data_import_rows.delete_all
  data_import_columns.delete_all
  Rails.logger.info ' ** Purge complete'
end

#setup_header_row(row) ⇒ Object



94
95
96
97
98
99
100
101
102
103
104
105
106
# File 'app/models/data_import.rb', line 94

def setup_header_row(row)
  columns = {}
  notes_data_set_id = notes_data_set.id
  row.each_with_index do |column_name, index|
    dic = data_import_columns.where(column_index: index)
                             .first_or_create(name: column_name,
                                              data_dictionary_set_id: notes_data_set_id)
    Rails.logger.info " ** Mapping column #{index} #{dic.name} to data import column #{dic.id}"
    columns[index] = dic.id
  end

  columns
end

#setup_value_row(row, row_index, columns) ⇒ Object



108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
# File 'app/models/data_import.rb', line 108

def setup_value_row(row, row_index, columns)
  cells = []
  # Create our row
  dr = data_import_rows.create! row_index: row_index
  Rails.logger.info " ** Row created id #{dr.id} for row index #{row_index}"
  # Now insert each column's cell value
  row.each_with_index do |cell_value, column_index|
    next unless cell_value.present?

    cells << DataImportCell.new(data_import_column_id: columns[column_index],
                                data_import_row_id: dr.id,
                                content: cell_value)
  end

  cells
end

#tier2_program_pricingCoupon::Tier2ProgramPricing



54
# File 'app/models/data_import.rb', line 54

belongs_to :tier2_program_pricing, class_name: 'Coupon::Tier2ProgramPricing', optional: true

#validate_fileObject



190
191
192
193
194
# File 'app/models/data_import.rb', line 190

def validate_file
  return unless file_changed? && file && file.stored?

  self.errors += parser.validate
end