Class: ProductLine::ContentOptimizer

Inherits:
BaseService show all
Defined in:
app/services/product_line/content_optimizer.rb

Overview

Builds optimized content for a ProductLine using OpenAI via RubyLLM
Returns a hash of proposed attributes for fields that are currently missing.

Instance Attribute Summary collapse

Instance Method Summary collapse

Methods inherited from BaseService

#log_debug, #log_error, #log_info, #log_warning, #logger, #options, #tagged_logger

Constructor Details

#initialize(options = {}) ⇒ ContentOptimizer

Returns a new instance of ContentOptimizer.



8
9
10
11
12
# File 'app/services/product_line/content_optimizer.rb', line 8

def initialize(options = {})
  super
  # Use app default so the model exists in LlmModel registry
  @model = options[:model].presence || RubyLLM.config.default_model
end

Instance Attribute Details

#modelObject (readonly)

Returns the value of attribute model.



6
7
8
# File 'app/services/product_line/content_optimizer.rb', line 6

def model
  @model
end

Instance Method Details

#process(product_line) ⇒ Object

Generate suggestions for missing fields for a given product line.
Only generates content if description_html is present.
Returns a Hash with keys among:
:tag_line, :short_description, :seo_title, :seo_keywords, :seo_description, :features (Array), :public_name



18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# File 'app/services/product_line/content_optimizer.rb', line 18

def process(product_line)
  return {} unless product_line&.description_html.present?

  missing = missing_fields(product_line)
  return {} if missing.empty?

  messages = build_messages(product_line, missing)

  # Use RubyLLM which wraps OpenAI, configured in initializer
  provider = case model.to_s
             when /^claude-/ then :anthropic
             when /^gpt-/ then :openai
             when /^gemini-/ then :gemini
             else :anthropic
             end
  chat = RubyLLM.chat(model: model, provider: provider, assume_model_exists: true)
  chat.with_temperature(0.7)
  messages.select { |m| m[:role] == 'system' }.each { |msg| chat.with_instructions(msg[:content]) }
  user_prompt = messages.select { |m| m[:role] == 'user' }.map { |m| m[:content] }.join("\n")
  response = chat.ask(user_prompt)

  parse_response(response&.content)
rescue RubyLLM::RateLimitError => e
  log_error("Rate limited for ProductLine ##{product_line&.id}: #{e.message}")
  raise
rescue RubyLLM::UnauthorizedError => e
  log_error("Auth failure for ProductLine ##{product_line&.id}: #{e.message}")
  raise
rescue RubyLLM::Error => e
  log_error("RubyLLM error (#{e.class.name}) for ProductLine ##{product_line&.id}: #{e.message}")
  {}
rescue StandardError => e
  log_error("Content optimization failed for ProductLine ##{product_line&.id}: #{e.message}")
  {}
end