diff --git a/app/jobs/openai_request_job.rb b/app/jobs/openai_request_job.rb index 95bbb5261..3c5f5bbc1 100644 --- a/app/jobs/openai_request_job.rb +++ b/app/jobs/openai_request_job.rb @@ -10,7 +10,7 @@ def perform openai_request_id return unless instance = openai_request.instance - MatchingServices::Connect.new(instance: instance).perform do |on| + OpenaiServices::MatchingPerformer.new(instance: instance).perform do |on| on.success do |response| openai_request.update_columns( error: nil, diff --git a/app/models/openai_request.rb b/app/models/openai_request.rb index 27eda7567..a75f0c433 100644 --- a/app/models/openai_request.rb +++ b/app/models/openai_request.rb @@ -32,9 +32,9 @@ def formatted_response end def matching_response - @matching_response ||= MatchingServices::Response.new(response: JSON.parse(response)) + @matching_response ||= OpenaiServices::MatchingResponse.new(response: JSON.parse(response)) rescue - @matching_response ||= MatchingServices::Response.new(response: Hash.new) + @matching_response ||= OpenaiServices::MatchingResponse.new(response: Hash.new) end attr_accessor :forced_matching diff --git a/app/services/openai_services/basic_performer.rb b/app/services/openai_services/basic_performer.rb new file mode 100644 index 000000000..b4beb339d --- /dev/null +++ b/app/services/openai_services/basic_performer.rb @@ -0,0 +1,89 @@ +module OpenaiServices + class BasicPerformer + attr_reader :configuration, :client, :callback, :assistant_id, :instance + + class BasicPerformerCallback < Callback + end + + def initialize instance: + @callback = BasicPerformerCallback.new + + @configuration = get_configuration + + @client = OpenAI::Client.new(access_token: @configuration.api_key) + @assistant_id = @configuration.assistant_id + + @instance = instance + end + + def perform + yield callback if block_given? + + # create new thread + thread = client.threads.create + + # create instance message + message = client.messages.create(thread_id: thread['id'], parameters: user_message) + + # run the thread + run = client.runs.create(thread_id: thread['id'], parameters: { + assistant_id: assistant_id, + max_prompt_tokens: configuration.max_prompt_tokens, + max_completion_tokens: configuration.max_completion_tokens + }) + + # wait for completion + status = status_loop(thread['id'], run['id']) + + return callback.on_failure.try(:call, "Failure status #{status}") unless ['completed', 'requires_action'].include?(status) + + response = get_response_class.new(response: find_run_message(thread['id'], run['id'])) + + return callback.on_failure.try(:call, "Response not valid", response) unless response.valid? + + callback.on_success.try(:call, response) + rescue => e + callback.on_failure.try(:call, e.message, nil) + end + + def status_loop thread_id, run_id + status = nil + + while true do + response = client.runs.retrieve(id: run_id, thread_id: thread_id) + status = response['status'] + + break if ['completed'].include?(status) # success + break if ['requires_action'].include?(status) # success + break if ['cancelled', 'failed', 'expired'].include?(status) # error + break if ['incomplete'].include?(status) # ??? + + sleep 1 if ['queued', 'in_progress', 'cancelling'].include?(status) + end + + status + end + + def find_run_message thread_id, run_id + messages = client.messages.list(thread_id: thread_id) + messages['data'].find { |message| message['run_id'] == run_id && message['role'] == 'assistant' } + end + + private + + # OpenaiAssistant.find_by_version(?) + def get_configuration + raise NotImplementedError, "this method get_configuration has to be defined in your class" + end + + # format: { role: string, content: { type: "text", text: string }} + def user_message + raise NotImplementedError, "this method user_message has to be defined in your class" + end + + # example: MatchingResponse + def get_response_class + raise NotImplementedError, "this method get_response_class has to be defined in your class" + end + end +end diff --git a/app/services/openai_services/matching_performer.rb b/app/services/openai_services/matching_performer.rb new file mode 100644 index 000000000..a8dc2fca7 --- /dev/null +++ b/app/services/openai_services/matching_performer.rb @@ -0,0 +1,97 @@ +module OpenaiServices + class MatchingPerformer < BasicPerformer + attr_reader :user + + class MatcherCallback < Callback + end + + def initialize instance: + super(instance: instance) + + @user = instance.user + end + + def get_configuration + OpenaiAssistant.find_by_module_type(:matching) + end + + def user_message + { + role: "user", + content: [ + { type: "text", text: get_formatted_prompt }, + { type: "text", text: get_recommandations.to_json } + ] + } + end + + def get_response_class + MatchingResponse + end + + private + + def get_formatted_prompt + action_type = opposite_action_type = instance.class.name.camelize.downcase + + if instance.respond_to?(:action) && instance.action? + action_type = instance.contribution? ? 'contribution' : 'solicitation' + opposite_action_type = instance.contribution? ? 'solicitation' : 'contribution' + end + + @configuration.prompt + .gsub("{{action_type}}", action_type) + .gsub("{{opposite_action_type}}", opposite_action_type) + .gsub("{{name}}", instance.name) + .gsub("{{description}}", instance.description) + end + + def get_recommandations + { + recommandations: + get_contributions.map { |contribution| Openai::ContributionSerializer.new(contribution).as_json } + + get_solicitations.map { |solicitation| Openai::SolicitationSerializer.new(solicitation).as_json } + + get_outings.map { |outing| Openai::OutingSerializer.new(outing).as_json } + + get_pois.map { |poi| Openai::PoiSerializer.new(poi).as_json } + + get_resources.map { |resource| Openai::ResourceSerializer.new(resource).as_json } + } + end + + def get_contributions + return [] if instance.is_a?(Entourage) && instance.contribution? + + ContributionServices::Finder.new(user, Hash.new) + .find_all + .where("created_at > ?", @configuration.days_for_actions.days.ago) + .limit(100) + end + + def get_solicitations + return [] if instance.is_a?(Entourage) && instance.solicitation? + + SolicitationServices::Finder.new(user, Hash.new) + .find_all + .where("created_at > ?", @configuration.days_for_actions.days.ago) + .limit(100) + end + + def get_outings + OutingsServices::Finder.new(user, Hash.new) + .find_all + .between(Time.zone.now, @configuration.days_for_outings.days.from_now) + .limit(100) + end + + def get_pois + return if @configuration.poi_from_file + + Poi.validated.around(instance.latitude, instance.longitude, user.travel_distance).limit(300) + end + + def get_resources + return if @configuration.resource_from_file + + Resource.where(status: :active) + end + end +end diff --git a/app/services/openai_services/matching_response.rb b/app/services/openai_services/matching_response.rb new file mode 100644 index 000000000..052b512a9 --- /dev/null +++ b/app/services/openai_services/matching_response.rb @@ -0,0 +1,76 @@ +module OpenaiServices + # response example + # {"recommandations"=> + # [{ + # "type"=>"resource", + # "id"=>"e8bWJqPHAcxY", + # "name"=>"Sophie : les portraits des bénévoles", + # "score"=>"0.96", + # "explanation"=>"Ce ressource présente des histoires de bénévoles et peut vous inspirer pour obtenir de l'aide." + # }] + # } + + MatchingResponse = Struct.new(:response) do + TYPES = %w{contribution solicitation outing resource poi} + + def initialize(response: nil) + @response = response + @parsed_response = parsed_response + end + + def valid? + recommandations.any? + end + + def parsed_response + return unless @response + return unless content = @response["content"] + return unless content.any? && first_content = content[0] + return unless first_content["type"] == "text" + return unless value = first_content["text"]["value"]&.gsub("\n", "") + return unless json = value[/\{.*\}/m] + + JSON.parse(json) + end + + def to_json + @response.to_json + end + + def recommandations + return [] unless @parsed_response + + @parsed_response["recommandations"] + end + + def metadata + { + message_id: @response["id"], + assistant_id: @response["assistant_id"], + thread_id: @response["thread_id"], + run_id: @response["run_id"] + } + end + + def best_recommandation + each_recommandation do |instance, score, explanation, index| + return { + instance: instance, + score: score, + explanation: explanation, + index: index, + } + end + end + + def each_recommandation &block + recommandations.each_with_index do |recommandation, index| + next unless recommandation["id"] + next unless TYPES.include?(recommandation["type"]) + next unless instance = recommandation["type"].classify.constantize.find_by_id(recommandation["id"]) + + yield(instance, recommandation["score"], recommandation["explanation"], index) + end + end + end +end diff --git a/app/views/admin/actions/index.html.erb b/app/views/admin/actions/index.html.erb index 3fed18e38..df8955507 100644 --- a/app/views/admin/actions/index.html.erb +++ b/app/views/admin/actions/index.html.erb @@ -49,7 +49,8 @@
- <%= "#{matching.match&.name} : score #{matching.score} (#{matching.match_type} : #{matching.explanation})" %>
+ <%= matching.score %> <%= matching.match&.name %> (<%= matching.match_type %>)
+ <%= matching.explanation %>