From 8ff78026a015eba78f8dcda56fc00a07a60846f2 Mon Sep 17 00:00:00 2001 From: Benjamin Piouffle Date: Wed, 26 Jun 2024 16:21:49 +0200 Subject: [PATCH] feat: Use LLM to extract statements --- apps/cf/config/config.exs | 4 + apps/cf/lib/llms/statements_creator.ex | 219 ++++++++++++++++++ .../statements_extractor_system_prompt.eex | 52 +++++ .../statements_extractor_user_prompt.eex | 11 + apps/cf/mix.exs | 1 + apps/cf_graphql/lib/resolvers/videos.ex | 6 + apps/cf_graphql/lib/schema/schema.ex | 11 + apps/cf_jobs/config/config.exs | 4 +- config/releases.exs | 4 + 9 files changed, 310 insertions(+), 2 deletions(-) create mode 100644 apps/cf/lib/llms/statements_creator.ex create mode 100644 apps/cf/lib/llms/templates/statements_extractor_system_prompt.eex create mode 100644 apps/cf/lib/llms/templates/statements_extractor_user_prompt.eex diff --git a/apps/cf/config/config.exs b/apps/cf/config/config.exs index bed36274..35bfddea 100644 --- a/apps/cf/config/config.exs +++ b/apps/cf/config/config.exs @@ -42,3 +42,7 @@ config :algoliax, # Import environment specific config import_config "#{Mix.env()}.exs" + +config :openai, + http_options: [recv_timeout: 30_000], + api_url: "https://api.perplexity.ai" diff --git a/apps/cf/lib/llms/statements_creator.ex b/apps/cf/lib/llms/statements_creator.ex new file mode 100644 index 00000000..539001cb --- /dev/null +++ b/apps/cf/lib/llms/statements_creator.ex @@ -0,0 +1,219 @@ +defmodule CF.LLMs.StatementsCreator do + @moduledoc """ + Functions to create statements from a video that has captions using LLMs + """ + + import Ecto.Query + require EEx + require Logger + + @max_caption_length 1000 + + @model_lama_3_small %{ + name: "llama-3-sonar-small-32k-chat", + parameter_count: "8B", + context_length: 32768 + } + + @model_lama_3_large %{ + name: "llama-3-sonar-large-32k-chat", + parameter_count: "70B", + context_length: 32768 + } + + @model_mistral_7b %{ + name: "mistral-7b-instruct", + parameter_count: "8x7B", + context_length: 16384 + } + + # Load prompt messages templates + EEx.function_from_file( + :defp, + :generate_system_prompt, + Path.join(__DIR__, "templates/statements_extractor_system_prompt.eex") + ) + + EEx.function_from_file( + :defp, + :generate_user_prompt, + Path.join(__DIR__, "templates/statements_extractor_user_prompt.eex"), + [ + :video, + :captions + ] + ) + + @doc """ + Create statements from a video that has captions using LLMs + """ + def process_video!(video_id) do + DB.Schema.Video + |> join(:inner, [v], vc in DB.Schema.VideoCaption, on: v.id == vc.video_id) + |> where([v, vc], v.id == ^video_id) + |> order_by([v, vc], desc: vc.inserted_at) + |> limit(1) + |> select([v, vc], {v, vc}) + |> DB.Repo.one() + |> case do + nil -> + raise "Video or captions not found" + + {video, video_caption} -> + video_caption.parsed + |> chunk_captions() + |> Enum.map(fn captions -> + video + |> get_llm_suggested_statements(captions) + |> filter_known_statements(video) + |> create_statements_from_inputs(video) + |> broadcast_statements(video) + + Process.sleep(500) + end) + end + end + + @doc """ + Chunk captions everytime we reach the max caption length + """ + defp chunk_captions(captions) do + # TODO: Base on strings lengths + @max_caption_length + Enum.chunk_every(captions, 50) + end + + defp get_llm_suggested_statements(video, captions, retries \\ 0) do + unless Application.get_env(:openai, :api_key) do + raise "OpenAI API key not found" + end + + try do + headers = [ + {"Authorization", "Bearer #{Application.get_env(:openai, :api_key)}"}, + {"Content-Type", "application/json"}, + {"Accept", "application/json"} + ] + + system_prompt = generate_system_prompt() + user_prompt = generate_user_prompt(video, captions) + + body = + %{ + "model" => @model_lama_3_large[:name], + "max_tokens" => + @model_lama_3_large[:context_length] - + String.length(system_prompt) - String.length(user_prompt) - 500, + "stream" => false, + "messages" => [ + %{ + "role" => "system", + "content" => system_prompt + }, + %{ + "role" => "user", + "content" => user_prompt + } + ] + } + |> Jason.encode!() + + case HTTPoison.post("https://api.perplexity.ai/chat/completions", body, headers, + timeout: 30_000, + recv_timeout: 30_000 + ) do + {:ok, %HTTPoison.Response{status_code: 200, body: body}} -> + body + |> Jason.decode!() + |> Map.get("choices") + |> List.first() + |> get_in(["message", "content"]) + |> get_json_str_from_content!() + |> Jason.decode!() + |> Map.get("statements") + |> check_statements_input_format!() + + {:ok, %HTTPoison.Response{status_code: status_code, body: body}} -> + raise "Network error: #{status_code} - #{inspect(body)}" + + {:error, %HTTPoison.Error{reason: reason}} -> + raise inspect(reason) + end + rescue + error -> + if retries > 0 do + Logger.warn("Failed to get LLM suggested statements: #{inspect(error)}. Retrying...") + Process.sleep(1000) + get_llm_suggested_statements(video, captions, retries - 1) + else + Logger.error(inspect(error)) + reraise error, __STACKTRACE__ + end + end + end + + defp check_statements_input_format!(statements_inputs) do + for %{"text" => text, "time" => time} <- statements_inputs do + unless is_binary(text) and is_integer(time) do + raise "Invalid statement input format" + end + end + + statements_inputs + end + + # Remove statements when we already have a similar one at time/text + defp filter_known_statements(statements, video) do + existing_statements = + DB.Schema.Statement + |> where([s], s.video_id == ^video.id) + |> DB.Repo.all() + + Enum.reject(statements, fn %{"text" => text, "time" => time} -> + Enum.any?(existing_statements, fn s -> + s.time >= time - 5 and s.time <= time + 5 and String.jaro_distance(s.text, text) > 0.80 + end) + end) + end + + defp create_statements_from_inputs(statements_inputs, video) do + # TODO: Check if the statement doesn't already exist + # TODO: Record a reference to the caption that generated the statement + inserted_at = NaiveDateTime.utc_now() |> NaiveDateTime.truncate(:second) + + {nb_statements, statements} = + DB.Repo.insert_all( + DB.Schema.Statement, + Enum.map(statements_inputs, fn %{"text" => text, "time" => time} -> + %{ + video_id: video.id, + text: text, + time: time, + inserted_at: inserted_at, + updated_at: inserted_at + } + end), + returning: true + ) + + statements + end + + defp broadcast_statements(statements, video) do + statements + |> Enum.map(fn statement -> + CF.RestApi.Endpoint.broadcast( + "statements:video:#{DB.Type.VideoHashId.encode(video.id)}", + "statement_added", + CF.RestApi.StatementView.render("show.json", statement: statement) + ) + end) + end + + # JSON content can optionally be wrapped in a ```json ... ``` block + defp get_json_str_from_content!(content) do + case Regex.scan(~r/```json\n(.+)\n```/mis, content) do + [[_, json_str]] -> json_str + _ -> content + end + end +end diff --git a/apps/cf/lib/llms/templates/statements_extractor_system_prompt.eex b/apps/cf/lib/llms/templates/statements_extractor_system_prompt.eex new file mode 100644 index 00000000..42e3be83 --- /dev/null +++ b/apps/cf/lib/llms/templates/statements_extractor_system_prompt.eex @@ -0,0 +1,52 @@ +# Mission + +Ta tâche est d'extraire des citations intéressantes à vérifier depuis les sous-titres d'une vidéo, ainsi que le timecode du 1er mot de la citation. Le texte peut contenir des fautes ou des mots mal reconnus, tu les corrigeras. Tu peux aussi résumer ou remplacer certaines parties non-essentielles par "[...]" pour raccourcir la citation. + +Renvoie uniquement le résultat en JSON, **sans aucun commentaire ni conclusion**. + +# Comment choisir les extraits à vérifier + +Pour être pertinente, une citation doit : +- être vérifiable grâce à l'exposition de faits +- faire mention d'une source ou d'un contenu que l'on peut vérifier +Et remplir au moins un des critères suivants : +- présenter des éléments incomplets ou approximatifs +- présenter un argument fallacieux, trompeur ou mensonger +- présenter des informations intéressantes à vérifier + +Ne méritent pas travail de vérification : +- les évidences comme "Le ciel est bleu !" +- les figures de style et l'humour (comme les hyperboles, les métaphores, etc) +- les erreurs mineures +- les opinions personnelles ("j'aime ça") + +# Exemple + +## Input + +```json +{ + "video": { + "title": "Thinkerview - La diagonale du vide en France" + }, + "captions": [ + { "start": 10, "text": "Cette mesure sociale a été un désastre de la pensée ça ne m'évoque que du dégoût elle n'a fait que créer une augmentation du chômage, c'est pour moi une pure folie" }, + { "start": 85, "text": "il y a d'autres zones en France qui sont très peuplées elle s'affiche ici et juste là et oui je sais effectivement je pense que je peux tenter une" }, + { "start": 89, "text": "reconversion à devenir présentateur météo" }, + { "start": 94, "text": "dans les zones que vous voyez ici on compte seulement 6,5% de la population française métropolitaine pourtant et bien ces espaces" }, + { "start": 102, "text": "représentent 42% du territoire national mais alors pourquoi la diagonale du vide comme" } + ] +} +``` + +## Output + +```json +{ + "statements": [ + { "time": 10, "text": "Cette mesure sociale [...] n'a fait que créer une augmentation du chômage" }, + { "time": 94, "text": "ici on compte seulement 6,5% de la population française métropolitaine" }, + { "time": 94, "text": "ces espaces représentent 42% du territoire national" } + ], +} +``` diff --git a/apps/cf/lib/llms/templates/statements_extractor_user_prompt.eex b/apps/cf/lib/llms/templates/statements_extractor_user_prompt.eex new file mode 100644 index 00000000..466c9848 --- /dev/null +++ b/apps/cf/lib/llms/templates/statements_extractor_user_prompt.eex @@ -0,0 +1,11 @@ +```json +{ + "video": { + "title": "<%= video.id %>" + }, + "captions": <%= captions |> Enum.map(fn caption -> %{ + "start": floor(caption["start"]), + "text": String.trim(caption["text"]) + } end) |> Jason.encode! %> +} +``` diff --git a/apps/cf/mix.exs b/apps/cf/mix.exs index 6407e5dc..9dc7b54a 100644 --- a/apps/cf/mix.exs +++ b/apps/cf/mix.exs @@ -58,6 +58,7 @@ defmodule CF.Mixfile do {:sweet_xml, "~> 0.6"}, {:burnex, "~> 3.1"}, {:yaml_elixir, "~> 2.9.0"}, + {:jason, "~> 1.4"}, # ---- Internal ---- {:db, in_umbrella: true}, diff --git a/apps/cf_graphql/lib/resolvers/videos.ex b/apps/cf_graphql/lib/resolvers/videos.ex index 907ab982..79888753 100644 --- a/apps/cf_graphql/lib/resolvers/videos.ex +++ b/apps/cf_graphql/lib/resolvers/videos.ex @@ -99,4 +99,10 @@ defmodule CF.Graphql.Resolvers.Videos do |> Repo.all() |> Enum.group_by(& &1.video_id) end + + def start_automatic_statements_extraction(_root, %{video_id: video_id}, _info) do + video = DB.Repo.get!(DB.Schema.Video, video_id) + CF.LLMs.StatementsCreator.process_video!(video.id) + {:ok, video} + end end diff --git a/apps/cf_graphql/lib/schema/schema.ex b/apps/cf_graphql/lib/schema/schema.ex index 5f48db40..2fc83478 100644 --- a/apps/cf_graphql/lib/schema/schema.ex +++ b/apps/cf_graphql/lib/schema/schema.ex @@ -89,5 +89,16 @@ defmodule CF.Graphql.Schema do resolve(&Resolvers.Notifications.update_subscription/3) end + + # startAutomaticStatementsExtraction + @desc "Use this to start the automatic statements extraction job. Requires elevated permissions." + field :start_automatic_statements_extraction, :video do + middleware(Middleware.RequireAuthentication) + # TODO: Require reputation + + arg(:video_id, non_null(:id)) + + resolve(&Resolvers.Videos.start_automatic_statements_extraction/3) + end end end diff --git a/apps/cf_jobs/config/config.exs b/apps/cf_jobs/config/config.exs index 76aada30..cb522d10 100644 --- a/apps/cf_jobs/config/config.exs +++ b/apps/cf_jobs/config/config.exs @@ -41,8 +41,8 @@ config :cf_jobs, CF.Jobs.Scheduler, ], # Captions download_captions: [ - # every 10 minutes - schedule: "*/10 * * * *", + # every minute + schedule: "*/1 * * * *", task: {CF.Jobs.DownloadCaptions, :update, []}, overlap: false ] diff --git a/config/releases.exs b/config/releases.exs index 933059e3..243f5e29 100644 --- a/config/releases.exs +++ b/config/releases.exs @@ -101,6 +101,10 @@ config :cf, CF.Mailer, domain: load_secret.("mailgun_domain"), api_key: load_secret.("mailgun_api_key") +config :openai, + api_key: load_secret.("openai_api_key"), + api_url: load_secret.("openai_api_url", "https://api.perplexity.ai") + # ---- [APP CONFIG] :cf_rest_api ---- cors_allow_all? = load_bool.({"cors_allow_all", "false"})