add localization for discussion summary prototype
refs ADV-49 flag = discussion_summary Test plan: 1. enable the discussion_summary flag 2. summarize the course, validate the summary is based on your language preferences 3. modify your language preferences 4. reopen the discussion, check if a new summary is generated using the new language option Change-Id: Ie727d1848570e80b817beb94b4b3051301cf7ef3 Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/349148 Tested-by: Service Cloud Jenkins <svc.cloudjenkins@instructure.com> Reviewed-by: Omar Soto-Fortuño <omar.soto@instructure.com> QA-Review: Richard Zana <rzana@instructure.com> Product-Review: Richard Zana <rzana@instructure.com>
This commit is contained in:
parent
e07bcf1184
commit
e4e654c154
|
@ -25,6 +25,7 @@ class DiscussionTopicsApiController < ApplicationController
|
|||
include Api::V1::DiscussionTopics
|
||||
include Api::V1::User
|
||||
include SubmittableHelper
|
||||
include LocaleSelection
|
||||
|
||||
before_action :require_context_and_read_access
|
||||
before_action :require_topic
|
||||
|
@ -108,8 +109,11 @@ class DiscussionTopicsApiController < ApplicationController
|
|||
return render(json: { error: t("Sorry, we are unable to summarize this discussion at this time. Please try again later.") }, status: :unprocessable_entity)
|
||||
end
|
||||
|
||||
dynamic_content = DiscussionTopic::PromptPresenter.new(@topic).dynamic_content_for_summary
|
||||
dynamic_content_hash = Digest::SHA256.hexdigest(dynamic_content)
|
||||
dynamic_content = {
|
||||
content: DiscussionTopic::PromptPresenter.new(@topic).content_for_summary,
|
||||
locale: available_locales[@current_user.locale || I18n.default_locale.to_s]
|
||||
}
|
||||
dynamic_content_hash = Digest::SHA256.hexdigest(dynamic_content.to_json)
|
||||
|
||||
forced = params[:force] == "true"
|
||||
unless @topic.summary_enabled
|
||||
|
@ -129,10 +133,17 @@ class DiscussionTopicsApiController < ApplicationController
|
|||
|
||||
response = nil
|
||||
begin
|
||||
prompt, options = llm_config.generate_prompt_and_options(
|
||||
substitutions: {
|
||||
CONTENT: dynamic_content[:content],
|
||||
LOCALE: dynamic_content[:locale]
|
||||
}
|
||||
)
|
||||
|
||||
time = Benchmark.measure do
|
||||
response = InstLLMHelper.client(llm_config.model_id).chat(
|
||||
[{ role: "user", content: llm_config.generate_prompt(dynamic_content:) }],
|
||||
**llm_config.options.symbolize_keys
|
||||
[{ role: "user", content: prompt }],
|
||||
**options.symbolize_keys
|
||||
)
|
||||
end
|
||||
rescue => e
|
||||
|
|
|
@ -49,7 +49,7 @@ class DiscussionTopic
|
|||
# '''
|
||||
# I'm sorry to hear that. Could you please provide more details?
|
||||
# '''
|
||||
def dynamic_content_for_summary
|
||||
def content_for_summary
|
||||
anonymized_user_ids = {}
|
||||
instructor_count = 0
|
||||
student_count = 0
|
||||
|
|
|
@ -28,10 +28,32 @@ class LLMConfig
|
|||
validate!
|
||||
end
|
||||
|
||||
def generate_prompt(dynamic_content:)
|
||||
return dynamic_content if template.nil?
|
||||
def generate_prompt_and_options(substitutions:)
|
||||
new_template = template.dup
|
||||
|
||||
template.gsub("<PLACEHOLDER>", dynamic_content)
|
||||
substitutions.each do |placeholder_prefix, sub_value|
|
||||
new_template.gsub!("<#{placeholder_prefix}_PLACEHOLDER>", sub_value.to_s)
|
||||
end
|
||||
|
||||
if (remaining_placeholder = new_template.match(/<\w+_PLACEHOLDER>/))
|
||||
raise ArgumentError, "Template still contains placeholder: #{remaining_placeholder[0]}"
|
||||
end
|
||||
|
||||
new_options = options.deep_dup
|
||||
|
||||
new_options.each do |key, value|
|
||||
substitutions.each do |placeholder_prefix, sub_value|
|
||||
new_options[key] = value.gsub("<#{placeholder_prefix}_PLACEHOLDER>", sub_value.to_s) if value.is_a?(String)
|
||||
end
|
||||
end
|
||||
|
||||
new_options.each_value do |value|
|
||||
if value.is_a?(String) && (remaining_placeholder = value.match(/<\w+_PLACEHOLDER>/))
|
||||
raise ArgumentError, "Options still contain placeholder: #{remaining_placeholder[0]}"
|
||||
end
|
||||
end
|
||||
|
||||
[new_template, new_options]
|
||||
end
|
||||
|
||||
private
|
||||
|
@ -39,7 +61,7 @@ class LLMConfig
|
|||
def validate!
|
||||
raise ArgumentError, "Name must be a string" unless @name.is_a?(String)
|
||||
raise ArgumentError, "Model ID must be a string" unless @model_id.is_a?(String)
|
||||
raise ArgumentError, "Template must be a string or nil" unless @template.nil? || @template.is_a?(String)
|
||||
raise ArgumentError, "Template must be a string" unless @template.is_a?(String)
|
||||
raise ArgumentError, "Options must be a hash" unless @options.is_a?(Hash)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -15,9 +15,9 @@
|
|||
# You should have received a copy of the GNU Affero General Public License along
|
||||
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
name: "V1_A"
|
||||
name: "V2_A"
|
||||
model_id: "anthropic.claude-3-haiku-20240307-v1:0"
|
||||
template: null
|
||||
template: <CONTENT_PLACEHOLDER>
|
||||
options:
|
||||
max_tokens: 500
|
||||
system: |
|
||||
|
@ -37,6 +37,7 @@ options:
|
|||
- Student Questions and Inquiries: "Summarizes technical or course-related issues." - Quickly resolving common student hurdles.
|
||||
- Other Needs: "Covers miscellaneous but important student inputs." - Ensuring no student feedback is overlooked, no matter how unique.
|
||||
5. Skip the preamble and provide only the summary in one paragraph. The response should be normal flowing text, e.g. no list of points.
|
||||
6. Compile the responses from the students' perspective.
|
||||
7. Include all specifics from the student input, that are relevant to the output.
|
||||
8. Do not mention anything about the instructions in the response.
|
||||
6. Respond in language "<LOCALE_PLACEHOLDER>".
|
||||
7. Compile the responses from the students' perspective.
|
||||
8. Include all specifics from the student input, that are relevant to the output.
|
||||
9. Do not mention anything about the instructions in the response.
|
||||
|
|
|
@ -172,6 +172,7 @@ describe DiscussionTopicsApiController do
|
|||
context "summary" do
|
||||
before do
|
||||
course_with_teacher(active_course: true)
|
||||
@teacher.update!(locale: "en")
|
||||
@topic = @course.discussion_topics.create!(title: "discussion", summary_enabled: true)
|
||||
user_session(@teacher)
|
||||
|
||||
|
@ -183,8 +184,9 @@ describe DiscussionTopicsApiController do
|
|||
before do
|
||||
expect(LLMConfigs).to receive(:config_for).and_return(
|
||||
LLMConfig.new(
|
||||
name: "V0_A",
|
||||
model_id: "model"
|
||||
name: "V1_A",
|
||||
model_id: "model",
|
||||
template: "<CONTENT_PLACEHOLDER>"
|
||||
)
|
||||
)
|
||||
end
|
||||
|
@ -193,8 +195,11 @@ describe DiscussionTopicsApiController do
|
|||
before do
|
||||
@summary = @topic.summaries.create!(
|
||||
summary: "summary",
|
||||
dynamic_content_hash: "fcfa768511b6a7a5881d017dae0d5aaa8dd8615937c1b89ea680c4dbb9af2a51",
|
||||
llm_config_version: "V0_A"
|
||||
dynamic_content_hash: Digest::SHA256.hexdigest({
|
||||
content: DiscussionTopic::PromptPresenter.new(@topic).content_for_summary,
|
||||
locale: "English (United States)"
|
||||
}.to_json),
|
||||
llm_config_version: "V1_A"
|
||||
)
|
||||
end
|
||||
|
||||
|
@ -230,6 +235,29 @@ describe DiscussionTopicsApiController do
|
|||
expect(@topic.reload.summary_enabled).to be_truthy
|
||||
expect(response.parsed_body["id"]).not_to eq(@summary.id)
|
||||
end
|
||||
|
||||
it "returns a new summary if locale has changed" do
|
||||
expect_any_instance_of(DiscussionTopic).to receive(:user_can_summarize?).and_return(true)
|
||||
@teacher.update!(locale: "es")
|
||||
|
||||
expect(@inst_llm).to receive(:chat).and_return(
|
||||
InstLLM::Response::ChatResponse.new(
|
||||
model: "model",
|
||||
message: { role: :assistant, content: "summary_1" },
|
||||
stop_reason: "stop_reason",
|
||||
usage: {
|
||||
input_tokens: 10,
|
||||
output_tokens: 20,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
get "summary", params: { topic_id: @topic.id, course_id: @course.id, user_id: @teacher.id }, format: "json"
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(@topic.reload.summary_enabled).to be_truthy
|
||||
expect(response.parsed_body["id"]).not_to eq(@summary.id)
|
||||
end
|
||||
end
|
||||
|
||||
it "returns a new summary" do
|
||||
|
@ -300,7 +328,7 @@ describe DiscussionTopicsApiController do
|
|||
@summary = @topic.summaries.create!(
|
||||
summary: "summary",
|
||||
dynamic_content_hash: "fcfa768511b6a7a5881d017dae0d5aaa8dd8615937c1b89ea680c4dbb9af2a51",
|
||||
llm_config_version: "V0_A"
|
||||
llm_config_version: "V1_A"
|
||||
)
|
||||
user_session(@teacher)
|
||||
end
|
||||
|
|
|
@ -47,7 +47,7 @@ describe DiscussionTopic::PromptPresenter do
|
|||
end
|
||||
end
|
||||
|
||||
describe "#dynamic_content_for_summary" do
|
||||
describe "#content_for_summary" do
|
||||
it "generates correct discussion summary" do
|
||||
expected_output = <<~TEXT
|
||||
DISCUSSION BY instructor_1 WITH TITLE:
|
||||
|
@ -76,7 +76,7 @@ describe DiscussionTopic::PromptPresenter do
|
|||
'''
|
||||
TEXT
|
||||
|
||||
expect(@presenter.dynamic_content_for_summary.strip).to eq(expected_output.strip)
|
||||
expect(@presenter.content_for_summary.strip).to eq(expected_output.strip)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -22,48 +22,55 @@ describe LLMConfig do
|
|||
describe "#initialize" do
|
||||
context "with valid attributes" do
|
||||
it "initializes successfully" do
|
||||
config = LLMConfig.new(name: "TestConfig", model_id: "model123")
|
||||
config = LLMConfig.new(name: "TestConfig", model_id: "model123", template: "template")
|
||||
expect(config.name).to eq("TestConfig")
|
||||
expect(config.model_id).to eq("model123")
|
||||
expect(config.template).to be_nil
|
||||
expect(config.template).to eq("template")
|
||||
expect(config.options).to eq({})
|
||||
end
|
||||
end
|
||||
|
||||
context "with invalid attributes" do
|
||||
it "raises an error if name is not a string" do
|
||||
expect { LLMConfig.new(name: nil, model_id: "model123") }.to raise_error(ArgumentError, "Name must be a string")
|
||||
expect { LLMConfig.new(name: nil, model_id: "model123", template: "template") }.to raise_error(ArgumentError, "Name must be a string")
|
||||
end
|
||||
|
||||
it "raises an error if model_id is not a string" do
|
||||
expect { LLMConfig.new(name: "TestConfig", model_id: nil) }.to raise_error(ArgumentError, "Model ID must be a string")
|
||||
expect { LLMConfig.new(name: "TestConfig", model_id: nil, template: "template") }.to raise_error(ArgumentError, "Model ID must be a string")
|
||||
end
|
||||
|
||||
it "raises an error if template is neither string nor nil" do
|
||||
expect { LLMConfig.new(name: "TestConfig", model_id: "model123", template: 123) }.to raise_error(ArgumentError, "Template must be a string or nil")
|
||||
expect { LLMConfig.new(name: "TestConfig", model_id: "model123", template: 123) }.to raise_error(ArgumentError, "Template must be a string")
|
||||
end
|
||||
|
||||
it "raises an error if options is not a hash" do
|
||||
expect { LLMConfig.new(name: "TestConfig", model_id: "model123", options: "invalid") }.to raise_error(ArgumentError, "Options must be a hash")
|
||||
expect { LLMConfig.new(name: "TestConfig", model_id: "model123", template: "template", options: "invalid") }.to raise_error(ArgumentError, "Options must be a hash")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#generate_prompt" do
|
||||
let(:config) { LLMConfig.new(name: "TestConfig", model_id: "model123", template: "Hello <PLACEHOLDER>") }
|
||||
describe "#generate_prompt_and_options" do
|
||||
let(:config) { LLMConfig.new(name: "TestConfig", model_id: "model123", template: "Hello <TEMPLATE_PLACEHOLDER>", options: { max_tokens: 100, system: "Hello <OPTIONS_PLACEHOLDER>" }) }
|
||||
|
||||
context "when template is not nil" do
|
||||
it "replaces the placeholder with dynamic content" do
|
||||
expect(config.generate_prompt(dynamic_content: "World")).to eq("Hello World")
|
||||
end
|
||||
it "replaces the placeholders with content in both prompt and options" do
|
||||
prompt, options = config.generate_prompt_and_options(substitutions: { TEMPLATE: "Template", OPTIONS: "Options" })
|
||||
expect(prompt).to eq("Hello Template")
|
||||
expect(options).to eq({ max_tokens: 100, system: "Hello Options" })
|
||||
end
|
||||
|
||||
context "when template is nil" do
|
||||
let(:config) { LLMConfig.new(name: "TestConfig", model_id: "model123") }
|
||||
it "raises an error if prompt still contains placeholders" do
|
||||
expect { config.generate_prompt_and_options(substitutions: {}) }.to raise_error(ArgumentError, "Template still contains placeholder: <TEMPLATE_PLACEHOLDER>")
|
||||
end
|
||||
|
||||
it "returns the dynamic content" do
|
||||
expect(config.generate_prompt(dynamic_content: "Hello")).to eq("Hello")
|
||||
end
|
||||
it "raises an error if options still contain placeholders" do
|
||||
expect { config.generate_prompt_and_options(substitutions: { TEMPLATE: "Template" }) }.to raise_error(ArgumentError, "Options still contain placeholder: <OPTIONS_PLACEHOLDER>")
|
||||
end
|
||||
|
||||
it "returns the prompt and options when no placeholders are present" do
|
||||
config = LLMConfig.new(name: "TestConfig", model_id: "model123", template: "Hello", options: { max_tokens: 100, system: "Hello" })
|
||||
prompt, options = config.generate_prompt_and_options(substitutions: {})
|
||||
expect(prompt).to eq("Hello")
|
||||
expect(options).to eq({ max_tokens: 100, system: "Hello" })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in New Issue