From 7f072d051590ddd2303e3339e35a61069cb17511 Mon Sep 17 00:00:00 2001 From: Miguel Michelson Martinez Date: Fri, 3 Mar 2023 22:56:56 -0300 Subject: [PATCH] conversations & bots added (#913) * conversations & bots added * cache strategy --- app/services/message_apis/open_ai/api.rb | 102 +++++++--------- .../message_apis/open_ai/presenter.rb | 111 ++++++++++++++++-- ...214128_add_data_to_conversation_channel.rb | 5 + db/schema.rb | 3 +- lib/app_packages_catalog.rb | 11 +- spec/controllers/api/v1/hooks/openai_spec.rb | 15 ++- spec/rails_helper.rb | 2 + 7 files changed, 176 insertions(+), 73 deletions(-) create mode 100644 db/migrate/20230303214128_add_data_to_conversation_channel.rb diff --git a/app/services/message_apis/open_ai/api.rb b/app/services/message_apis/open_ai/api.rb index 0b94cc6ea..489468e43 100644 --- a/app/services/message_apis/open_ai/api.rb +++ b/app/services/message_apis/open_ai/api.rb @@ -4,7 +4,7 @@ module MessageApis::OpenAi class Api < MessageApis::BasePackage include MessageApis::Helpers - BASE_URL = "https://api.openai.com/v1" + BASE_URL = "https://api.openai.com" PROVIDER = "openai" attr_accessor :url, :api_secret, :conn @@ -12,7 +12,7 @@ class Api < MessageApis::BasePackage def initialize(config:) @api_secret = config["api_secret"] - @url = "#{BASE_URL}/engines/davinci/completions" + @url = "#{BASE_URL}/v1/chat/completions" @conn = Faraday.new( request: { @@ -71,52 +71,39 @@ def locked_for_channel?(conversation, part) end def notify_message(conversation:, part:, channel:) - return if conversation.conversation_channels.blank? + gpt_channel = conversation.conversation_channels.find_by(provider: "open_ai") + return if gpt_channel.blank? return unless part.messageable.is_a?(ConversationPartContent) return true if locked_for_channel?(conversation, part) return if part.conversation_part_channel_sources.where(provider: "open_ai").any? - Rails.logger.info "ENTRA #{part.id}" + Rails.logger.info "NOTIFY MESSAGE OPEN AI #{part.id}" unless part.authorable.is_a?(Agent) - ##### - ## cache this - messages = conversation.messages.where( - messageable_type: "ConversationPartContent" - ).where.not(id: part.id).order("id") - ##### - # conversation.conversation_channels.find_by(provider_channel_id: channel) - # cache this thing: - previous = messages.map do |m| - { - text: m.message.text_from_serialized, - from: m.authorable_type - } - end - previous = previous.map do |item| - "#{item[:from] == 'Agent' ? "\nAI:" : "\nHuman:"}#{item[:text]}" - end.join("\n") - - start_log = "'''The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly. -Human: Hello, who are you? -AI: I am an AI created by OpenAI. How can I help you today? -#{previous} -Human:'''" + previous = previous_messages(conversation, part) + parsed_content = part&.message&.parsed_content human_input = parsed_content["blocks"] human_input = human_input&.map do |o| o["text"] end&.join(" ") - prompt = "#{start_log}\nHuman: #{human_input}\nAI:" + messages = previous << { role: "user", content: human_input } + + Rails.cache.write("/conversation/#{conversation.key}/openai", messages) - Rails.logger.info "PROMPT: #{prompt}" - data = prompt_settings(prompt) + Rails.logger.info "PROMPT: #{messages}" - gpt_result = get_gpt_response(data) + gpt_result = get_gpt_response(gpt_channel.data["prompt"], messages, part.authorable.id.to_s) + + Rails.logger.info(gpt_result) + text = begin + gpt_result["choices"].first["message"]["content"] + rescue StandardError + nil + end - text = gpt_result[:text] return if text.nil? blocks = { @@ -135,22 +122,20 @@ def notify_message(conversation:, part:, channel:) end end - def prompt_settings(prompt) - { - prompt: prompt, - stop: ["\n", "\nHuman:", "\nAI:"], - temperature: 0.9, - top_p: 1, - frequency_penalty: 0, - presence_penalty: 0.6, - best_of: 1, - max_tokens: 150 - # frequency_penalty: 0 - # length: 150 - # presence_penalty: 0.6 - # temperature: 0.9 - # top_p: 1 - } + def previous_messages(conversation, part) + Rails.cache.fetch("/conversation/#{conversation.key}/openai", expires_in: 1.hour) do + messages = conversation.messages.where( + messageable_type: "ConversationPartContent" + ).where.not(id: part.id) + .order("id") + + messages.map do |m| + { + "content" => m.message.text_from_serialized, + "role" => m.authorable_type == "Agent" ? "assistant" : "user" + } + end + end end def add_message(conversation:, from:, text:, blocks:, message_id:) @@ -178,18 +163,21 @@ def post_data(url, data) end end - def get_gpt_response(data) - response = post_data(@url, data) + def get_gpt_response(prompt, data, user_key) + system_prompt = { role: "system", content: prompt } + messages = [] + messages << system_prompt + messages << data - return nil unless response.success? + message_data = { + model: "gpt-3.5-turbo", + messages: messages.flatten, + user: user_key + } - if (json_body = JSON.parse(response.body)) && json_body - json_body - Rails.logger.info "GOT RESPONSE FROM GPT-3: #{json_body}" - end + Rails.logger.debug message_data - text = json_body["choices"].map { |o| o["text"] }.join(" ") - { text: text, id: json_body["id"] } + JSON.parse(post_data(@url, message_data).body) end def process_event(params, package) diff --git a/app/services/message_apis/open_ai/presenter.rb b/app/services/message_apis/open_ai/presenter.rb index deb5fdf68..390dbb5f8 100644 --- a/app/services/message_apis/open_ai/presenter.rb +++ b/app/services/message_apis/open_ai/presenter.rb @@ -4,7 +4,7 @@ class Presenter # Sent when an app has been inserted into a conversation, message or # the home screen, so that you can render the app. def self.initialize_hook(kind:, ctx:) - record = PromptRecord.new(prompt: ctx.dig(:values, :prompt)) + record = MessageApis::OpenAi::PromptRecord.new(prompt: ctx.dig(:values, :prompt)) { kind: kind, # ctx: ctx, @@ -21,9 +21,13 @@ def self.submit_hook(kind:, ctx:) message = ConversationPart.find_by(key: ctx["message_key"]) conversation = message.conversation + + prompt_field = message.message.blocks["schema"].find { |o| o["id"] == "prompt-value" } + conversation.conversation_channels.create({ provider: "open_ai", - provider_channel_id: conversation.id + provider_channel_id: conversation.id, + data: { prompt: prompt_field["value"] } }) return { @@ -57,16 +61,12 @@ def self.configure_hook(kind:, ctx:) label = "epa" app = ctx[:package].app - default_prompt = <<~HEREDOC - The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly. - Human: Hello, who are you? - AI: I am an AI created by OpenAI. How can I help you today? - HEREDOC + default_prompt = ctx[:package].settings["main_prompt"] value = ctx.dig(:values, :prompt) value = default_prompt if ctx.dig(:field, :action, :type) != "submit" - record = PromptRecord.new(prompt: value) + record = MessageApis::OpenAi::PromptRecord.new(prompt: value) schema = record.default_schema if ctx.dig(:field, :action, :type) != "submit" @@ -104,4 +104,99 @@ def self.sheet_hook(params) [] end end + + class PromptRecord + include ActiveModel::Model + include ActiveModel::Validations + attr_accessor :prompt + + def initialize(prompt:) + self.prompt = prompt + end + + def default_schema + [ + { type: "text", text: "Open AI ChatGPT", style: "header" }, + { type: "text", text: "Configure your bot", style: "muted" }, + { type: "textarea", + id: "prompt", + name: "prompt", + label: "System prompt", + placeholder: "Enter prompt here...", + value: send(:prompt), + errors: errors[:prompt]&.uniq&.join(", ") }, + { + type: "button", + id: "add-prompt", + variant: "outlined", + size: "small", + label: "save prompt", + action: { + type: "submit" + } + } + ] + end + + def error_schema + [ + { type: "text", text: "This is a header", style: "header" }, + { type: "text", text: "This is a header", style: "muted" }, + { type: "textarea", + id: "textarea-3", + name: "textarea-3", + label: "Error", + placeholder: "Enter text here...", + value: send(:prompt), + errors: errors[:prompt]&.uniq&.join(", ") }, + { + type: "button", + id: "add-prompt", + variant: "outlined", + size: "small", + label: "save prompt", + action: { + type: "submit" + } + } + ] + end + + def schema + [ + { type: "text", text: "This is a header", style: "header" }, + { type: "text", text: "This is a header", style: "muted" } + ] + end + + def success_schema + [ + { type: "text", text: "Open AI conversation", style: "header" }, + { type: "text", text: "you are going to start a conversation with GPT-3 bot", style: "muted" }, + { type: "hidden", value: send(:prompt), id: "prompt-value" }, + { + type: "button", + id: "prompt-ok", + variant: "success", + align: "center", + size: "medium", + label: "Start chat", + action: { + type: "submit" + } + }, + { + type: "button", + id: "prompt-no", + variant: "link", + size: "medium", + align: "center", + label: "Cancel", + action: { + type: "submit" + } + } + ] + end + end end diff --git a/db/migrate/20230303214128_add_data_to_conversation_channel.rb b/db/migrate/20230303214128_add_data_to_conversation_channel.rb new file mode 100644 index 000000000..cc829b315 --- /dev/null +++ b/db/migrate/20230303214128_add_data_to_conversation_channel.rb @@ -0,0 +1,5 @@ +class AddDataToConversationChannel < ActiveRecord::Migration[7.0] + def change + add_column :conversation_channels, :data, :jsonb + end +end diff --git a/db/schema.rb b/db/schema.rb index d596d8c27..0fc6fe7db 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -10,7 +10,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema[7.0].define(version: 2022_10_05_144614) do +ActiveRecord::Schema[7.0].define(version: 2023_03_03_214128) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" @@ -457,6 +457,7 @@ t.bigint "conversation_id", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.jsonb "data" t.index ["conversation_id"], name: "index_conversation_channels_on_conversation_id" t.index ["provider"], name: "index_conversation_channels_on_provider" t.index ["provider_channel_id"], name: "index_conversation_channels_on_provider_channel_id" diff --git a/lib/app_packages_catalog.rb b/lib/app_packages_catalog.rb index b2b05a076..1a2b07e45 100644 --- a/lib/app_packages_catalog.rb +++ b/lib/app_packages_catalog.rb @@ -177,7 +177,7 @@ def self.packages(dev_packages: false) description: "Open AI GPT-3 tasks", icon: "https://logo.clearbit.com/openai.com", state: "enabled", - capability_list: ["conversations"], + capability_list: %w[conversations bots], definitions: [ { name: "api_secret", @@ -185,6 +185,15 @@ def self.packages(dev_packages: false) type: "string", required: true, grid: { xs: "w-full", sm: "w-full" } + }, + { + name: "main_prompt", + label: "Main prompt", + type: "textarea", + hint: "You can change this later, on demand", + placeholder: "You are the Chaskiq chatbot, you are friendly and playful.", + required: true, + grid: { xs: "w-full", sm: "w-full" } } ] }, diff --git a/spec/controllers/api/v1/hooks/openai_spec.rb b/spec/controllers/api/v1/hooks/openai_spec.rb index aea0eb569..fc8d961da 100644 --- a/spec/controllers/api/v1/hooks/openai_spec.rb +++ b/spec/controllers/api/v1/hooks/openai_spec.rb @@ -84,8 +84,11 @@ # ) @pkg = app.app_package_integrations.create( - api_secret: "sk-xxx", - app_package: app_package + app_package: app_package, + settings: { + main_prompt: "system prompt", + api_secret: "sk-xxx" + } ) end @@ -93,7 +96,8 @@ it "receive message" do conversation.conversation_channels.create({ provider: "open_ai", - provider_channel_id: conversation.id + provider_channel_id: conversation.id, + data: { prompt: "foofof" } }) channel = conversation.conversation_channels.find_by(provider: "open_ai") @@ -111,7 +115,7 @@ ).to receive( :get_gpt_response ).and_return( - { text: "yay", id: 1 } + { "id" => "xx", "choices" => [{ "message" => { "content" => "bla bla bla" } }] } ) perform_enqueued_jobs do @@ -125,8 +129,7 @@ end expect(conversation.messages.last.authorable).to be_a(Agent) - - expect(conversation.messages.last.messageable.html_content).to be == "yay" + expect(conversation.messages.last.messageable.html_content).to be == "bla bla bla" end end end diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb index 6b3a295d1..76ef57ca4 100644 --- a/spec/rails_helper.rb +++ b/spec/rails_helper.rb @@ -40,6 +40,8 @@ # DatabaseCleaner.strategy = :truncation +# include ActiveSupport::Testing::TaggedLogging + RSpec.configure do |config| # https://github.com/rspec/rspec-rails/issues/2410 config.include ActiveSupport::Testing::Assertions