Skip to content

Commit

Permalink
Merge pull request #64 from alphagov/duplicate-openai-directories
Browse files Browse the repository at this point in the history
Tidy up pipeline namespaces
  • Loading branch information
jackbot authored Feb 19, 2025
2 parents e5e1dea + 63f3afc commit 35eb933
Show file tree
Hide file tree
Showing 7 changed files with 185 additions and 187 deletions.
2 changes: 1 addition & 1 deletion lib/answer_composition/composer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def compose_answer
PipelineRunner.call(question:, pipeline: [
Pipeline::JailbreakGuardrails,
Pipeline::QuestionRephraser.new(llm_provider: :openai),
Pipeline::QuestionRouter,
Pipeline::OpenAI::QuestionRouter,
Pipeline::QuestionRoutingGuardrails,
Pipeline::SearchResultFetcher,
Pipeline::OpenAI::StructuredAnswerComposer,
Expand Down
179 changes: 179 additions & 0 deletions lib/answer_composition/pipeline/openai/question_router.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
module AnswerComposition::Pipeline::OpenAI
class QuestionRouter
OPENAI_MODEL = "gpt-4o-mini".freeze
MAX_COMPLETION_TOKENS = 160

def self.call(...) = new(...).call

def initialize(context)
@context = context
end

def call
start_time = Clock.monotonic_time

answer = context.answer
answer.assign_llm_response("question_routing", openai_response_choice)

if genuine_rag?
answer.assign_attributes(
question_routing_label:,
question_routing_confidence_score: llm_classification_data["confidence"],
)
else
answer.assign_attributes(
message: use_llm_answer? ? llm_answer : Answer::CannedResponses.response_for_question_routing_label(question_routing_label),
status: answer_status,
question_routing_label:,
question_routing_confidence_score: llm_classification_data["confidence"],
)

context.abort_pipeline unless use_llm_answer?
end

answer.assign_metrics(
"question_routing", build_metrics(start_time)
)
end

private

attr_reader :context

def label_config
Rails.configuration.question_routing_labels[question_routing_label]
end

def use_llm_answer?
return false if openai_token_limit_reached?

label_config[:use_answer]
end

def answer_status
label_config[:answer_status]
end

def openai_token_limit_reached?
openai_response_choice["finish_reason"] == "length"
end

def llm_answer
llm_classification_data["answer"]
end

def genuine_rag?
question_routing_label == "genuine_rag"
end

def question_routing_label
llm_classification_function["name"]
end

def llm_classification_function
@llm_classification_function ||= openai_response_choice.dig(
"message", "tool_calls", 0, "function"
)
end

def openai_response_choice
@openai_response_choice ||= openai_response.dig("choices", 0)
end

def raw_llm_classification_data
llm_classification_function["arguments"]
end

def llm_classification_data
JSON.parse(raw_llm_classification_data)
end

def openai_response
@openai_response ||= openai_client.chat(
parameters: {
model: OPENAI_MODEL,
messages:,
temperature: 0.0,
tools:,
tool_choice: "required",
parallel_tool_calls: false,
max_completion_tokens: MAX_COMPLETION_TOKENS,
},
)
end

def messages
[
{ role: "system", content: config[:system_prompt] },
{ role: "user", content: context.question_message },
]
end

def config
Rails.configuration.govuk_chat_private.llm_prompts.openai.question_routing
end

def openai_client
@openai_client ||= OpenAIClient.build
end

def tools
config[:classifications].map do |classification|
parameters = {
type: "object",
additionalProperties: false,
properties: {
confidence: config[:confidence_property],
},
required: %w[confidence],
}

if classification[:properties].present?
parameters[:required].concat(classification[:required])
parameters[:properties].merge!(classification[:properties])
end

{
type: "function",
function: {
name: classification[:name],
description: build_description(classification),
strict: true,
parameters:,
},
}
end
end

def build_description(classification)
description = [classification[:description].strip]

examples = {
positive_examples: Array(classification[:examples]),
negative_examples: Array(classification[:negative_examples]),
}

examples.each do |key, value|
next unless value.any?

example_string = value.map { |str| "'#{str}'" }.join(", ")
description << config["description_#{key}_template"].sub("{examples}", example_string).strip
end

description.compact.join(" ")
end

def error_message(error)
"class: #{error.class} message: #{error.message}"
end

def build_metrics(start_time)
{
duration: Clock.monotonic_time - start_time,
llm_prompt_tokens: openai_response.dig("usage", "prompt_tokens"),
llm_completion_tokens: openai_response.dig("usage", "completion_tokens"),
llm_cached_tokens: openai_response.dig("usage", "prompt_tokens_details", "cached_tokens"),
}
end
end
end
181 changes: 0 additions & 181 deletions lib/answer_composition/pipeline/question_router.rb

This file was deleted.

2 changes: 1 addition & 1 deletion spec/lib/answer_composition/composer_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
expected_pipeline = [
AnswerComposition::Pipeline::JailbreakGuardrails,
AnswerComposition::Pipeline::QuestionRephraser.new(llm_provider: :openai),
AnswerComposition::Pipeline::QuestionRouter,
AnswerComposition::Pipeline::OpenAI::QuestionRouter,
AnswerComposition::Pipeline::QuestionRoutingGuardrails,
AnswerComposition::Pipeline::SearchResultFetcher,
AnswerComposition::Pipeline::OpenAI::StructuredAnswerComposer,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
RSpec.describe AnswerComposition::Pipeline::OpenAI::QuestionRephraser do
RSpec.describe AnswerComposition::Pipeline::OpenAI::QuestionRephraser do # rubocop:disable RSpec/SpecFilePathFormat
let(:conversation) { create :conversation, :with_history }
let(:question) { conversation.questions.strict_loading(false).last }
let(:question_records) { conversation.questions.joins(:answer) }
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
RSpec.describe AnswerComposition::Pipeline::QuestionRouter do
RSpec.describe AnswerComposition::Pipeline::OpenAI::QuestionRouter do # rubocop:disable RSpec/SpecFilePathFormat
let(:classification_attributes) do
{
name: "greetings",
Expand Down
Loading

0 comments on commit 35eb933

Please sign in to comment.