From 6091e6628a1e83c057e70356348150234b06963d Mon Sep 17 00:00:00 2001 From: Andrew Heard Date: Fri, 22 Mar 2024 15:19:00 -0400 Subject: [PATCH] Update test JSON payloads with Vertex AI output --- FirebaseVertexAI/Tests/Unit/ChatTests.swift | 2 +- .../streaming-success-basic-reply-long.txt | 20 +++---- .../streaming-success-basic-reply-short.txt | 3 +- .../unary-success-basic-reply-long.json | 56 +++++++++---------- .../unary-success-basic-reply-short.json | 54 +++++++++--------- .../Tests/Unit/GenerativeModelTests.swift | 26 +++++---- 6 files changed, 75 insertions(+), 86 deletions(-) diff --git a/FirebaseVertexAI/Tests/Unit/ChatTests.swift b/FirebaseVertexAI/Tests/Unit/ChatTests.swift index 046cce0e8bd..5a7868fbe7a 100644 --- a/FirebaseVertexAI/Tests/Unit/ChatTests.swift +++ b/FirebaseVertexAI/Tests/Unit/ChatTests.swift @@ -66,7 +66,7 @@ final class ChatTests: XCTestCase { XCTAssertEqual(chat.history.count, 2) XCTAssertEqual(chat.history[0].parts[0].text, input) - let finalText = "1 2 3 4 5 6 7 8 9 10" + let finalText = "1 2 3 4 5 6 7 8" let assembledExpectation = ModelContent(role: "model", parts: finalText) XCTAssertEqual(chat.history[0].parts[0].text, input) XCTAssertEqual(chat.history[1], assembledExpectation) diff --git a/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/streaming-success-basic-reply-long.txt b/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/streaming-success-basic-reply-long.txt index bca95140490..218cae0b985 100644 --- a/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/streaming-success-basic-reply-long.txt +++ b/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/streaming-success-basic-reply-long.txt @@ -1,19 +1,15 @@ -data: {"candidates": [{"content": {"parts": [{"text": "1 "}]},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}],"promptFeedback": {"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}} +data: {"candidates": [{"content": {"role": "model","parts": [{"text": "1 "}]}}]} -data: {"candidates": [{"content": {"parts": [{"text": "2 "}]},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}]} +data: {"candidates": [{"content": {"role": "model","parts": [{"text": "2 "}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.0394904,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.04468087},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.034553625,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.03890198},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.09401018,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.025809621},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.036562506,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.047691282}]}]} -data: {"candidates": [{"content": {"parts": [{"text": "3 "}]},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}]} +data: {"candidates": [{"content": {"role": "model","parts": [{"text": "3 "}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.03507868,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.045183755},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.027742893,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.043528143},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.08803312,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.026105914},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.06681233,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.053899158}]}]} -data: {"candidates": [{"content": {"parts": [{"text": "4 "}]},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}]} +data: {"candidates": [{"content": {"role": "model","parts": [{"text": "4 "}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.037750278,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.05089372},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.040087357,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.05888469},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.071202725,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.034100424},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.07613248,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.051749535}]}]} -data: {"candidates": [{"content": {"parts": [{"text": "5 "}]},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}]} +data: {"candidates": [{"content": {"role": "model","parts": [{"text": "5 "}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.04672496,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.059210256},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.04977345,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.05623635},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.083890386,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.03825006},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.08359067,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.05975658}]}]} -data: {"candidates": [{"content": {"parts": [{"text": "6 "}]},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}]} +data: {"candidates": [{"content": {"role": "model","parts": [{"text": "6 "}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.07779744,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.06052939},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.041930523,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.056756895},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.12787028,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.05350215},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.09203286,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.048676573}]}]} -data: {"candidates": [{"content": {"parts": [{"text": "7 "}]},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}]} +data: {"candidates": [{"content": {"role": "model","parts": [{"text": "7 "}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.071202725,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.05291181},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.031439852,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.04509957},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.11417085,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.04922211},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.09451043,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.052716404}]}]} -data: {"candidates": [{"content": {"parts": [{"text": "8 "}]},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}]} - -data: {"candidates": [{"content": {"parts": [{"text": "9 "}]},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}]} - -data: {"candidates": [{"content": {"parts": [{"text": "10"}]},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}]} +data: {"candidates": [{"content": {"role": "model","parts": [{"text": "8"}]},"finishReason": "STOP","safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.06221698,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.045777276},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.03085051,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.04560694},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.0992954,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.040769264},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.100701615,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.061424047}]}],"usageMetadata": {"promptTokenCount": 6,"candidatesTokenCount": 326,"totalTokenCount": 332}} diff --git a/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/streaming-success-basic-reply-short.txt b/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/streaming-success-basic-reply-short.txt index a7f5476954e..78f569ddecf 100644 --- a/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/streaming-success-basic-reply-short.txt +++ b/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/streaming-success-basic-reply-short.txt @@ -1,2 +1 @@ -data: {"candidates": [{"content": {"parts": [{"text": "Cheyenne"}]},"finishReason": "STOP","index": 0,"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}],"promptFeedback": {"safetyRatings": [{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"}]}} - +data: {"candidates": [{"content": {"role": "model","parts": [{"text": "Mountain View, California"}]},"finishReason": "STOP","safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.02854415,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.052424565},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.24926445,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.0996453},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.087096825,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.043123372},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.14402841,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.086169556}]}],"usageMetadata": {"promptTokenCount": 6,"candidatesTokenCount": 4,"totalTokenCount": 10}} diff --git a/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/unary-success-basic-reply-long.json b/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/unary-success-basic-reply-long.json index 59b84de92fe..325d172b894 100644 --- a/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/unary-success-basic-reply-long.json +++ b/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/unary-success-basic-reply-long.json @@ -2,53 +2,49 @@ "candidates": [ { "content": { + "role": "model", "parts": [ { - "text": "You can ask me a wide range of questions on various topics. Here are some examples:\n\n1. **General Knowledge:**\n - What is the capital of France?\n - Who painted the Mona Lisa?\n - What is the largest ocean in the world?\n\n2. **Science and Technology:**\n - How does a computer work?\n - What is the difference between a virus and a bacteria?\n - What are the latest advancements in artificial intelligence?\n\n3. **History and Culture:**\n - Who was the first president of the United States?\n - What is the significance of the Great Wall of China?\n - What are some of the most famous works of Shakespeare?\n\n4. **Current Events:**\n - What is the latest news about the COVID-19 pandemic?\n - Who is the current president of Ukraine?\n - What are the major issues being discussed in the upcoming election?\n\n5. **Personal Questions:**\n - What are your hobbies?\n - What is your favorite book or movie?\n - What are your thoughts on the future of technology?\n\n6. **Fun and Games:**\n - Can you tell me a joke?\n - What is the answer to this riddle: \"I have keys but no locks. I have space but no room. You can enter, but can't go outside.\" (Answer: a keyboard)\n - Let's play a game of 20 questions.\n\n7. **Hypothetical Questions:**\n - What would you do if you won the lottery?\n - What would happen if time travel were possible?\n - What is the meaning of life?\n\n8. **Philosophical Questions:**\n - What is the nature of reality?\n - Does free will exist?\n - What is the difference between right and wrong?\n\n9. **Creative Questions:**\n - Write a poem about a sunset.\n - Design a logo for a new company.\n - Compose a song about your favorite season.\n\n10. **Technical Questions:**\n - How can I improve the performance of my computer?\n - What is the best way to troubleshoot a network issue?\n - How do I create a website using HTML and CSS?\n\nRemember, I am still under development and may not be able to answer all questions perfectly. However, I will do my best to provide you with accurate and informative responses." + "text": "You can ask me a wide range of questions, including:\n\n* **General knowledge questions:** What is the capital of France? Who painted the Mona Lisa?\n* **Science questions:** What is the chemical formula for water? How does photosynthesis work?\n* **History questions:** When did World War II start? Who was the first president of the United States?\n* **Math questions:** What is the square root of 16? How do you solve for x in the equation x + 5 = 10?\n* **Current events questions:** What is happening in Ukraine? Who is the current president of the United States?\n* **Personal questions:** What are your hobbies? What are your favorite books?\n* **Hypothetical questions:** What would happen if all the ice caps melted? What if humans could live on Mars?\n* **Questions about me:** What is your name? How old are you? What are your capabilities?\n\n**Tips for asking questions:**\n\n* Be specific and clear in your questions.\n* Use correct grammar and spelling.\n* Try to ask open-ended questions that allow for multiple answers.\n* Be patient and wait for my response. I am still under development, so I may take a few seconds to process your question.\n\nPlease note that I am still under development and may not be able to answer all questions accurately. However, I will do my best to provide you with the most relevant information I have available." } - ], - "role": "model" + ] }, "finishReason": "STOP", - "index": 0, "safetyRatings": [ { - "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", - "probability": "NEGLIGIBLE" + "category": "HARM_CATEGORY_HATE_SPEECH", + "probability": "NEGLIGIBLE", + "probabilityScore": 0.047869004, + "severity": "HARM_SEVERITY_NEGLIGIBLE", + "severityScore": 0.050705366 }, { - "category": "HARM_CATEGORY_HATE_SPEECH", - "probability": "NEGLIGIBLE" + "category": "HARM_CATEGORY_DANGEROUS_CONTENT", + "probability": "NEGLIGIBLE", + "probabilityScore": 0.052134257, + "severity": "HARM_SEVERITY_NEGLIGIBLE", + "severityScore": 0.036288295 }, { "category": "HARM_CATEGORY_HARASSMENT", - "probability": "NEGLIGIBLE" + "probability": "NEGLIGIBLE", + "probabilityScore": 0.08464396, + "severity": "HARM_SEVERITY_NEGLIGIBLE", + "severityScore": 0.033907957 }, { - "category": "HARM_CATEGORY_DANGEROUS_CONTENT", - "probability": "NEGLIGIBLE" + "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", + "probability": "NEGLIGIBLE", + "probabilityScore": 0.06290424, + "severity": "HARM_SEVERITY_NEGLIGIBLE", + "severityScore": 0.050611436 } ] } ], - "promptFeedback": { - "safetyRatings": [ - { - "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", - "probability": "NEGLIGIBLE" - }, - { - "category": "HARM_CATEGORY_HATE_SPEECH", - "probability": "NEGLIGIBLE" - }, - { - "category": "HARM_CATEGORY_HARASSMENT", - "probability": "NEGLIGIBLE" - }, - { - "category": "HARM_CATEGORY_DANGEROUS_CONTENT", - "probability": "NEGLIGIBLE" - } - ] + "usageMetadata": { + "promptTokenCount": 6, + "candidatesTokenCount": 303, + "totalTokenCount": 309 } } diff --git a/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/unary-success-basic-reply-short.json b/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/unary-success-basic-reply-short.json index 40a9a6da58e..3e3ddb80fd6 100644 --- a/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/unary-success-basic-reply-short.json +++ b/FirebaseVertexAI/Tests/Unit/GenerateContentResponses/unary-success-basic-reply-short.json @@ -2,53 +2,49 @@ "candidates": [ { "content": { + "role": "model", "parts": [ { "text": "Mountain View, California, United States" } - ], - "role": "model" + ] }, "finishReason": "STOP", - "index": 0, "safetyRatings": [ { - "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", - "probability": "NEGLIGIBLE" + "category": "HARM_CATEGORY_HATE_SPEECH", + "probability": "NEGLIGIBLE", + "probabilityScore": 0.029035643, + "severity": "HARM_SEVERITY_NEGLIGIBLE", + "severityScore": 0.05613278 }, { - "category": "HARM_CATEGORY_HATE_SPEECH", - "probability": "NEGLIGIBLE" + "category": "HARM_CATEGORY_DANGEROUS_CONTENT", + "probability": "NEGLIGIBLE", + "probabilityScore": 0.2641685, + "severity": "HARM_SEVERITY_NEGLIGIBLE", + "severityScore": 0.082253955 }, { "category": "HARM_CATEGORY_HARASSMENT", - "probability": "NEGLIGIBLE" + "probability": "NEGLIGIBLE", + "probabilityScore": 0.087252244, + "severity": "HARM_SEVERITY_NEGLIGIBLE", + "severityScore": 0.04509957 }, { - "category": "HARM_CATEGORY_DANGEROUS_CONTENT", - "probability": "NEGLIGIBLE" + "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", + "probability": "NEGLIGIBLE", + "probabilityScore": 0.1431877, + "severity": "HARM_SEVERITY_NEGLIGIBLE", + "severityScore": 0.11027937 } ] } ], - "promptFeedback": { - "safetyRatings": [ - { - "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", - "probability": "NEGLIGIBLE" - }, - { - "category": "HARM_CATEGORY_HATE_SPEECH", - "probability": "NEGLIGIBLE" - }, - { - "category": "HARM_CATEGORY_HARASSMENT", - "probability": "NEGLIGIBLE" - }, - { - "category": "HARM_CATEGORY_DANGEROUS_CONTENT", - "probability": "NEGLIGIBLE" - } - ] + "usageMetadata": { + "promptTokenCount": 6, + "candidatesTokenCount": 7, + "totalTokenCount": 13 } } diff --git a/FirebaseVertexAI/Tests/Unit/GenerativeModelTests.swift b/FirebaseVertexAI/Tests/Unit/GenerativeModelTests.swift index 13c625036cc..c249613f9e9 100644 --- a/FirebaseVertexAI/Tests/Unit/GenerativeModelTests.swift +++ b/FirebaseVertexAI/Tests/Unit/GenerativeModelTests.swift @@ -25,7 +25,7 @@ final class GenerativeModelTests: XCTestCase { .init(category: .hateSpeech, probability: .negligible), .init(category: .harassment, probability: .negligible), .init(category: .dangerousContent, probability: .negligible), - ] + ].sorted() var urlSession: URLSession! var model: GenerativeModel! @@ -62,15 +62,12 @@ final class GenerativeModelTests: XCTestCase { let candidate = try XCTUnwrap(response.candidates.first) let finishReason = try XCTUnwrap(candidate.finishReason) XCTAssertEqual(finishReason, .stop) - XCTAssertEqual(candidate.safetyRatings, safetyRatingsNegligible) + XCTAssertEqual(candidate.safetyRatings.sorted(), safetyRatingsNegligible) XCTAssertEqual(candidate.content.parts.count, 1) let part = try XCTUnwrap(candidate.content.parts.first) let partText = try XCTUnwrap(part.text) XCTAssertTrue(partText.hasPrefix("You can ask me a wide range of questions")) XCTAssertEqual(response.text, partText) - let promptFeedback = try XCTUnwrap(response.promptFeedback) - XCTAssertNil(promptFeedback.blockReason) - XCTAssertEqual(promptFeedback.safetyRatings, safetyRatingsNegligible) } func testGenerateContent_success_basicReplyShort() async throws { @@ -86,14 +83,11 @@ final class GenerativeModelTests: XCTestCase { let candidate = try XCTUnwrap(response.candidates.first) let finishReason = try XCTUnwrap(candidate.finishReason) XCTAssertEqual(finishReason, .stop) - XCTAssertEqual(candidate.safetyRatings, safetyRatingsNegligible) + XCTAssertEqual(candidate.safetyRatings.sorted(), safetyRatingsNegligible) XCTAssertEqual(candidate.content.parts.count, 1) let part = try XCTUnwrap(candidate.content.parts.first) XCTAssertEqual(part.text, "Mountain View, California, United States") XCTAssertEqual(response.text, part.text) - let promptFeedback = try XCTUnwrap(response.promptFeedback) - XCTAssertNil(promptFeedback.blockReason) - XCTAssertEqual(promptFeedback.safetyRatings, safetyRatingsNegligible) } func testGenerateContent_success_citations() async throws { @@ -131,7 +125,7 @@ final class GenerativeModelTests: XCTestCase { let candidate = try XCTUnwrap(response.candidates.first) let finishReason = try XCTUnwrap(candidate.finishReason) XCTAssertEqual(finishReason, .stop) - XCTAssertEqual(candidate.safetyRatings, safetyRatingsNegligible) + XCTAssertEqual(candidate.safetyRatings.sorted(), safetyRatingsNegligible) XCTAssertEqual(candidate.content.parts.count, 1) let part = try XCTUnwrap(candidate.content.parts.first) let partText = try XCTUnwrap(part.text) @@ -139,7 +133,7 @@ final class GenerativeModelTests: XCTestCase { XCTAssertEqual(response.text, part.text) let promptFeedback = try XCTUnwrap(response.promptFeedback) XCTAssertNil(promptFeedback.blockReason) - XCTAssertEqual(promptFeedback.safetyRatings, safetyRatingsNegligible) + XCTAssertEqual(promptFeedback.safetyRatings.sorted(), safetyRatingsNegligible) } func testGenerateContent_success_unknownEnum_safetyRatings() async throws { @@ -627,7 +621,7 @@ final class GenerativeModelTests: XCTestCase { responses += 1 } - XCTAssertEqual(responses, 10) + XCTAssertEqual(responses, 8) } func testGenerateContentStream_successBasicReplyShort() async throws { @@ -1049,3 +1043,11 @@ class AppCheckInteropFake: NSObject, AppCheckInterop { } struct AppCheckErrorFake: Error {} + +@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, *) +extension SafetyRating: Comparable { + public static func < (lhs: FirebaseVertexAI.SafetyRating, + rhs: FirebaseVertexAI.SafetyRating) -> Bool { + return lhs.category.rawValue < rhs.category.rawValue + } +}