Skip to content

Commit

Permalink
Merge pull request #1333 from internetee/fix-ai-sorting
Browse files Browse the repository at this point in the history
Fixed open_ai auction sorting
  • Loading branch information
vohmar authored Oct 18, 2024
2 parents c2b51d2 + a490eba commit 3b82e36
Show file tree
Hide file tree
Showing 9 changed files with 712 additions and 48 deletions.
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion .github/workflows/ruby.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-22.04]
ruby: [ '3.2', '3.2.2' ]
ruby: [ '3.2.2' ]
runs-on: ${{ matrix.os }}
continue-on-error: ${{ endsWith(matrix.ruby, 'head') || matrix.ruby == 'debug' }}
steps:
Expand Down
20 changes: 10 additions & 10 deletions Gemfile
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ gem 'rails', '>= 6.0.3.5'
gem 'rails-i18n'
gem 'recaptcha'
gem 'redis', '~> 5.0'
gem 'ruby-openai'
gem 'ruby-openai', '~> 7.3'
gem 'scenic'
gem 'simpleidn'
gem 'turbo-rails'
Expand All @@ -50,15 +50,6 @@ group :development, :test do
gem 'bullet'
gem 'byebug', platforms: %i[mri mingw x64_mingw]
gem 'pry'

# https://github.com/rubocop/rubocop-performance
gem 'rubocop'
gem 'rubocop-packaging'
gem 'rubocop-performance', require: false
gem 'rubocop-rspec'
gem 'rubocop-shopify'
gem 'ruby-lsp'
# gem "rubocop-thread_safety"
end

group :development do
Expand All @@ -72,6 +63,15 @@ group :development do
gem 'listen', '>= 3.0.5', '< 3.10'
gem 'ruby-lsp-rails'
gem 'web-console', '>= 3.3.0'

# https://github.com/rubocop/rubocop-performance
gem 'rubocop'
gem 'rubocop-packaging'
gem 'rubocop-performance', require: false
gem 'rubocop-rspec'
gem 'rubocop-shopify'
gem 'ruby-lsp'
# gem "rubocop-thread_safety"
end

group :test do
Expand Down
4 changes: 2 additions & 2 deletions Gemfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ GEM
rails (>= 6.0)
ruby-lsp (>= 0.6.2, < 0.8.0)
sorbet-runtime (>= 0.5.9897)
ruby-openai (7.0.1)
ruby-openai (7.3.1)
event_stream_parser (>= 0.3.0, < 2.0.0)
faraday (>= 1)
faraday-multipart (>= 1)
Expand Down Expand Up @@ -590,7 +590,7 @@ DEPENDENCIES
rubocop-shopify
ruby-lsp
ruby-lsp-rails
ruby-openai
ruby-openai (~> 7.3)
scenic
simplecov
simpleidn
Expand Down
100 changes: 69 additions & 31 deletions app/jobs/active_auctions_ai_sorting_job.rb
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
# frozen_string_literal: true

class ActiveAuctionsAiSortingJob < ApplicationJob
retry_on StandardError, wait: 5.seconds, attempts: 3

def perform
DEFAULT_TEMPERATURE = 0.7

def perform(temperature = DEFAULT_TEMPERATURE)
return unless self.class.needs_to_run?

auctions_list = Auction.active_with_offers_count
ai_response = fetch_ai_response(auctions_list)
ai_response = fetch_ai_response(auctions_list, temperature)
process_ai_response(ai_response)
rescue StandardError, OpenAI::Error => e
handle_openai_error(e)
Expand All @@ -17,55 +21,81 @@ def self.needs_to_run?

private

def system_message
Setting.find_by(code: 'openai_domains_evaluation_prompt').retrieve
end

def model
Setting.find_by(code: 'openai_model').retrieve
end

def handle_openai_error(error)
Rails.logger.info "OpenAI API error: #{error.message}"
end

def process_ai_response(response)
Rails.logger.info "AI response received: #{response}"
parsed_response = JSON.parse(response)
ai_scores = parsed_response.map { |item| { id: item['id'], ai_score: item['ai_score'] } }
ai_scores = parsed_response['scores'].map { |item| { id: item['id'], ai_score: item['ai_score'] } }

update_auctions_with_ai_scores(ai_scores)
end

def fetch_ai_response(auctions_list)
def fetch_ai_response(auctions_list, temperature)
ai_client = OpenAI::Client.new
response = ai_client.chat(parameters: chat_parameters(auctions_list))
response = ai_client.chat(parameters: chat_parameters(auctions_list, temperature))

finish_reason = response.dig('choices', 0, 'finish_reason')
raise StandardError, 'Incomplete response' if finish_reason && finish_reason == 'length'

refusal = response.dig('choices', 0, 'message', 'refusal')
raise StandardError, refusal if refusal

content = response.dig('choices', 0, 'message', 'content')
raise StandardError, response.dig('error', 'message') || 'No response content' if content.nil?

ai_response = response.dig('choices', 0, 'message', 'content')
raise StandardError, response.dig('error', 'message') if ai_response.nil?
content
end

ai_response
def chat_parameters(auctions_list, temp)
{
model: openai_model,
response_format: {
type: 'json_schema',
json_schema: schema
},
messages: messages(auctions_list),
temperature: temp
}
end

def chat_parameters(auctions_list)
# rubocop:disable Metrics/MethodLength
def schema
{
model: model,
messages: [
{ role: 'system', content: system_message },
{ role: 'user', content: format(auctions_list) },
{ role: 'user', content: 'Please provide a detailed response in JSON format without any text and only the result.
Here is an example of how I expect the JSON output:
[
{
id:,
domain_name:,
ai_score:
name: 'ai_response',
schema: {
type: 'object',
properties: {
scores: {
type: 'array',
items: {
type: 'object',
properties: {
id: { type: 'number' },
domain_name: { type: 'string' },
ai_score: { type: 'number' }
},
required: %w[id domain_name ai_score],
additionalProperties: false
}
]' }
],
temperature: 0.7
}
},
required: ['scores'],
additionalProperties: false
},
strict: true
}
end
# rubocop:enable Metrics/MethodLength

def messages(auctions_list)
[
{ role: 'system', content: system_message },
{ role: 'user', content: format(auctions_list) }
]
end

def format(auctions_list)
auctions_list.map do |a|
Expand Down Expand Up @@ -93,4 +123,12 @@ def update_auctions_with_ai_scores(ai_scores)

ActiveRecord::Base.connection.execute(sql_query)
end

def system_message
Setting.find_by(code: 'openai_domains_evaluation_prompt').retrieve
end

def openai_model
Setting.find_by(code: 'openai_model').retrieve
end
end
2 changes: 1 addition & 1 deletion config/initializers/openai.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@
config.organization_id = AuctionCenter::Application.config.customization.dig(:openai, :organization_id) # Optional
# config.uri_base = "https://oai.hconeai.com/" # Optional
config.request_timeout = 240 # Optional
config.log_errors = true
config.log_errors = Rails.env.production? ? false : true
end
Loading

0 comments on commit 3b82e36

Please sign in to comment.