diff --git a/Testcontainers.sln b/Testcontainers.sln
index 94af5e2c3..01618da1e 100644
--- a/Testcontainers.sln
+++ b/Testcontainers.sln
@@ -191,6 +191,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Testcontainers.Tests", "tes
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Testcontainers.WebDriver.Tests", "tests\Testcontainers.WebDriver.Tests\Testcontainers.WebDriver.Tests.csproj", "{EBA72C3B-57D5-43FF-A5B4-3D55B3B6D4C2}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Testcontainers.Ollama", "src\Testcontainers.Ollama\Testcontainers.Ollama.csproj", "{FDC88529-64F5-4F0A-95BE-8FBF653201C6}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Testcontainers.Ollama.Tests", "tests\Testcontainers.Ollama.Tests\Testcontainers.Ollama.Tests.csproj", "{0997BFAD-919D-482F-83E6-5DF9C4A1D313}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -556,6 +560,14 @@ Global
{EBA72C3B-57D5-43FF-A5B4-3D55B3B6D4C2}.Debug|Any CPU.Build.0 = Debug|Any CPU
{EBA72C3B-57D5-43FF-A5B4-3D55B3B6D4C2}.Release|Any CPU.ActiveCfg = Release|Any CPU
{EBA72C3B-57D5-43FF-A5B4-3D55B3B6D4C2}.Release|Any CPU.Build.0 = Release|Any CPU
+ {FDC88529-64F5-4F0A-95BE-8FBF653201C6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {FDC88529-64F5-4F0A-95BE-8FBF653201C6}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {FDC88529-64F5-4F0A-95BE-8FBF653201C6}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {FDC88529-64F5-4F0A-95BE-8FBF653201C6}.Release|Any CPU.Build.0 = Release|Any CPU
+ {0997BFAD-919D-482F-83E6-5DF9C4A1D313}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {0997BFAD-919D-482F-83E6-5DF9C4A1D313}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {0997BFAD-919D-482F-83E6-5DF9C4A1D313}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {0997BFAD-919D-482F-83E6-5DF9C4A1D313}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{5365F780-0E6C-41F0-B1B9-7DC34368F80C} = {673F23AE-7694-4BB9-ABD4-136D6C13634E}
@@ -647,5 +659,7 @@ Global
{1A1983E6-5297-435F-B467-E8E1F11277D6} = {7164F1FB-7F24-444A-ACD2-2C329C2B3CCF}
{27CDB869-A150-4593-958F-6F26E5391E7C} = {7164F1FB-7F24-444A-ACD2-2C329C2B3CCF}
{EBA72C3B-57D5-43FF-A5B4-3D55B3B6D4C2} = {7164F1FB-7F24-444A-ACD2-2C329C2B3CCF}
+ {FDC88529-64F5-4F0A-95BE-8FBF653201C6} = {673F23AE-7694-4BB9-ABD4-136D6C13634E}
+ {0997BFAD-919D-482F-83E6-5DF9C4A1D313} = {7164F1FB-7F24-444A-ACD2-2C329C2B3CCF}
EndGlobalSection
EndGlobal
diff --git a/docs/modules/index.md b/docs/modules/index.md
index e533faecf..6505388d4 100644
--- a/docs/modules/index.md
+++ b/docs/modules/index.md
@@ -52,6 +52,7 @@ await moduleNameContainer.StartAsync();
| MySQL | `mysql:8.0` | [NuGet](https://www.nuget.org/packages/Testcontainers.MySql) | [Source](https://github.com/testcontainers/testcontainers-dotnet/tree/develop/src/Testcontainers.MySql) |
| NATS | `nats:2.9` | [NuGet](https://www.nuget.org/packages/Testcontainers.Nats) | [Source](https://github.com/testcontainers/testcontainers-dotnet/tree/develop/src/Testcontainers.Nats) |
| Neo4j | `neo4j:5.4` | [NuGet](https://www.nuget.org/packages/Testcontainers.Neo4j) | [Source](https://github.com/testcontainers/testcontainers-dotnet/tree/develop/src/Testcontainers.Neo4j) |
+| Ollama | `ollama/ollama:latest` | [NuGet](https://www.nuget.org/packages/Testcontainers.Ollama) | [Source](https://github.com/testcontainers/testcontainers-dotnet/tree/develop/src/Testcontainers.Ollama) |
| Oracle | `gvenzl/oracle-xe:21.3.0-slim-faststart` | [NuGet](https://www.nuget.org/packages/Testcontainers.Oracle) | [Source](https://github.com/testcontainers/testcontainers-dotnet/tree/develop/src/Testcontainers.Oracle) |
| Papercut | `jijiechen/papercut:latest` | [NuGet](https://www.nuget.org/packages/Testcontainers.Papercut) | [Source](https://github.com/testcontainers/testcontainers-dotnet/tree/develop/src/Testcontainers.Papercut) |
| PostgreSQL | `postgres:15.1` | [NuGet](https://www.nuget.org/packages/Testcontainers.PostgreSql) | [Source](https://github.com/testcontainers/testcontainers-dotnet/tree/develop/src/Testcontainers.PostgreSql) |
diff --git a/src/Testcontainers.Ollama/.editorconfig b/src/Testcontainers.Ollama/.editorconfig
new file mode 100644
index 000000000..6f066619d
--- /dev/null
+++ b/src/Testcontainers.Ollama/.editorconfig
@@ -0,0 +1 @@
+root = true
\ No newline at end of file
diff --git a/src/Testcontainers.Ollama/OllamaBuilder.cs b/src/Testcontainers.Ollama/OllamaBuilder.cs
new file mode 100644
index 000000000..3a0323ee0
--- /dev/null
+++ b/src/Testcontainers.Ollama/OllamaBuilder.cs
@@ -0,0 +1,121 @@
+namespace Testcontainers.Ollama
+{
+ ///
+ [PublicAPI]
+ public sealed class OllamaBuilder : ContainerBuilder
+ {
+ ///
+ /// Gets the default port of the Ollama API.
+ ///
+ public const int DefaultPort = 11434;
+
+ ///
+ /// Default image name and version tag.
+ ///
+ public const string OllamaImage = "ollama/ollama:0.1.22";
+
+ ///
+ /// Default volume path.
+ ///
+ public const string DefaultVolumePath = "/root/.ollama";
+
+ ///
+ /// Default volume name.
+ ///
+ public const string DefaultVolumeName = "ollama-volume";
+
+ ///
+ /// The default model name for the OllamaBuilder.
+ ///
+ public const string DefaultModelName = OllamaModels.Llama2;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public OllamaBuilder()
+ : this(new OllamaConfiguration())
+ {
+ DockerResourceConfiguration = Init().DockerResourceConfiguration;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The Docker resource configuration.
+ private OllamaBuilder(OllamaConfiguration resourceConfiguration)
+ : base(resourceConfiguration)
+ {
+ DockerResourceConfiguration = resourceConfiguration;
+ }
+
+ ///
+ protected override OllamaConfiguration DockerResourceConfiguration { get; }
+
+ ///
+ /// Sets the Testcontainers.Ollama config.
+ ///
+ /// The Testcontainers.Ollama config.
+ /// A configured instance of .
+ public OllamaBuilder OllamaConfig(OllamaConfiguration config)
+ {
+ return Merge(DockerResourceConfiguration, config);
+ }
+
+ ///
+ public override OllamaContainer Build()
+ {
+ Validate();
+ return new OllamaContainer(DockerResourceConfiguration, TestcontainersSettings.Logger);
+ }
+
+ ///
+ protected override void Validate()
+ {
+ Guard.Argument(DockerResourceConfiguration.ModelName, nameof(DockerResourceConfiguration.ModelName)).NotNull().NotEmpty();
+ base.Validate();
+ }
+
+ ///
+ protected override OllamaBuilder Init()
+ {
+ return base.Init()
+ .WithImage(new DockerImage(OllamaImage))
+ .WithPortBinding(DefaultPort, true)
+ .WithVolumeMount(DefaultVolumeName, DefaultVolumePath)
+ ;
+ }
+
+ ///
+ protected override OllamaBuilder Clone(IResourceConfiguration resourceConfiguration)
+ {
+ return Merge(DockerResourceConfiguration, new OllamaConfiguration(resourceConfiguration));
+ }
+
+ ///
+ protected override OllamaBuilder Clone(IContainerConfiguration resourceConfiguration)
+ {
+ return Merge(DockerResourceConfiguration, new OllamaConfiguration(resourceConfiguration));
+ }
+
+ ///
+ protected override OllamaBuilder Merge(OllamaConfiguration oldValue, OllamaConfiguration newValue)
+ {
+ return new OllamaBuilder(new OllamaConfiguration(oldValue, newValue));
+ }
+
+ ///
+ /// Sets the name of the model to run.
+ ///
+ /// The name of the model to run.
+ /// A configured instance of .
+ /// The name of the model to run is .
+ /// The name of the model to run is empty.
+ ///
+ /// The name of the model to run is required.
+ ///
+ public OllamaBuilder WithModelName(string name)
+ {
+ return Merge(DockerResourceConfiguration, new OllamaConfiguration(DockerResourceConfiguration, new OllamaConfiguration(modelName: name)));
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/Testcontainers.Ollama/OllamaConfiguration.cs b/src/Testcontainers.Ollama/OllamaConfiguration.cs
new file mode 100644
index 000000000..52abab44f
--- /dev/null
+++ b/src/Testcontainers.Ollama/OllamaConfiguration.cs
@@ -0,0 +1,82 @@
+namespace Testcontainers.Ollama
+{
+ ///
+ [PublicAPI]
+ public sealed class OllamaConfiguration : ContainerConfiguration
+ {
+ ///
+ /// The OllamaConfiguration class represents the configuration for an Ollama container.
+ ///
+ public OllamaConfiguration(string modelName = null, string volumePath = null, string volumeName = null, int? port = null)
+ {
+ ModelName = modelName ?? string.Empty;
+ VolumePath = volumePath ?? OllamaBuilder.DefaultVolumePath;
+ VolumeName = volumeName ?? OllamaBuilder.DefaultVolumeName;
+ Port = port ?? OllamaBuilder.DefaultPort;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The Docker resource configuration.
+ public OllamaConfiguration(IResourceConfiguration resourceConfiguration)
+ : base(resourceConfiguration)
+ {
+ // Passes the configuration upwards to the base implementations to create an updated immutable copy.
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The Docker resource configuration.
+ public OllamaConfiguration(IContainerConfiguration resourceConfiguration)
+ : base(resourceConfiguration)
+ {
+ // Passes the configuration upwards to the base implementations to create an updated immutable copy.
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The Docker resource configuration.
+ public OllamaConfiguration(OllamaConfiguration resourceConfiguration)
+ : this(new OllamaConfiguration(), resourceConfiguration)
+ {
+ // Passes the configuration upwards to the base implementations to create an updated immutable copy.
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The old Docker resource configuration.
+ /// The new Docker resource configuration.
+ public OllamaConfiguration(OllamaConfiguration oldValue, OllamaConfiguration newValue)
+ : base(oldValue, newValue)
+ {
+ ModelName = BuildConfiguration.Combine(oldValue.ModelName, newValue.ModelName);
+ VolumePath = BuildConfiguration.Combine(oldValue.VolumePath, newValue.VolumePath);
+ VolumeName = BuildConfiguration.Combine(oldValue.VolumeName, newValue.VolumeName);
+ Port = BuildConfiguration.Combine(oldValue.Port, newValue.Port);
+ }
+
+ ///
+ /// Represents the configuration for the Ollama container.
+ ///
+ public string ModelName { get; set; }
+
+ ///
+ /// The OllamaConfiguration class represents the configuration for an Ollama container.
+ ///
+ public string VolumePath { get; set; }
+
+ ///
+ /// Gets or sets the name of the volume associated with the Ollama container.
+ ///
+ public string VolumeName { get; set; }
+
+ ///
+ /// The class represents the configuration for an Ollama container port.
+ ///
+ public int Port { get; set; }
+ }
+}
\ No newline at end of file
diff --git a/src/Testcontainers.Ollama/OllamaContainer.cs b/src/Testcontainers.Ollama/OllamaContainer.cs
new file mode 100644
index 000000000..53113a052
--- /dev/null
+++ b/src/Testcontainers.Ollama/OllamaContainer.cs
@@ -0,0 +1,59 @@
+using System.Threading;
+
+namespace Testcontainers.Ollama
+{
+ ///
+ [PublicAPI]
+ public sealed class OllamaContainer : DockerContainer
+ {
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The container configuration.
+ /// The logger.
+ public OllamaContainer(OllamaConfiguration configuration, ILogger logger)
+ : base(configuration, logger)
+ {
+ Configuration = configuration;
+ }
+
+ public OllamaConfiguration Configuration { get; private set; }
+
+ public Task Run(CancellationToken ct = default)
+ {
+ return Run(Configuration.ModelName, ct);
+ }
+
+ ///
+ /// Starts the Ollama container.
+ ///
+ public Task Run(string modelName, CancellationToken ct = default)
+ {
+ ModelName = modelName;
+ if (State!= TestcontainersStates.Created && State != TestcontainersStates.Running) {
+ ThrowIfResourceNotFound();
+ }
+
+ return ExecAsync(new List() {
+ "ollama", "run", ModelName,
+ }, ct);
+ }
+
+ ///
+ /// Gets the base URL of the Ollama API.
+ ///
+ /// The base URL of the Ollama API.
+ /// http://localhost:5000/api
+ public string GetBaseUrl() => $"http://{Hostname}:{GetMappedPublicPort(OllamaBuilder.DefaultPort)}/api";
+
+ ///
+ /// Gets the name of the Docker image to use.
+ ///
+ public string ImageName { get; }
+
+ ///
+ /// Gets the name of the model to run.
+ ///
+ public string ModelName { get; private set; }
+ }
+}
\ No newline at end of file
diff --git a/src/Testcontainers.Ollama/OllamaModels.cs b/src/Testcontainers.Ollama/OllamaModels.cs
new file mode 100644
index 000000000..315696f1e
--- /dev/null
+++ b/src/Testcontainers.Ollama/OllamaModels.cs
@@ -0,0 +1,76 @@
+namespace Testcontainers.Ollama
+{
+ ///
+ /// A selection of OLLAMA models from the readme.
+ ///
+ ///
+ /// See: https://github.com/ollama/ollama?tab=readme-ov-file#model-library
+ ///
+ public static class OllamaModels
+ {
+ ///
+ /// Llama 2: 7B parameters, Size: 3.8GB, Command: ollama run llama2
+ ///
+ public const string Llama2 = "llama2";
+
+ ///
+ /// Mistral: 7B parameters, Size: 4.1GB, Command: ollama run mistral
+ ///
+ public const string Mistral = "mistral";
+
+ ///
+ /// Dolphin Phi: 2.7B parameters, Size: 1.6GB, Command: ollama run dolphin-phi
+ ///
+ public const string DolphinPhi = "dolphin-phi";
+
+ ///
+ /// Phi-2: 2.7B parameters, Size: 1.7GB, Command: ollama run phi
+ ///
+ public const string Phi2 = "phi";
+
+ ///
+ /// Neural Chat: 7B parameters, Size: 4.1GB, Command: ollama run neural-chat
+ ///
+ public const string NeuralChat = "neural-chat";
+
+ ///
+ /// Starling: 7B parameters, Size: 4.1GB, Command: ollama run starling-lm
+ ///
+ public const string Starling = "starling-lm";
+
+ ///
+ /// Code Llama: 7B parameters, Size: 3.8GB, Command: ollama run codellama
+ ///
+ public const string CodeLlama = "codellama";
+
+ ///
+ /// Llama 2 Uncensored: 7B parameters, Size: 3.8GB, Command: ollama run llama2-uncensored
+ ///
+ public const string Llama2Uncensored = "llama2-uncensored";
+
+ ///
+ /// Llama 2 13B: 13B parameters, Size: 7.3GB, Command: ollama run llama2:13b
+ ///
+ public const string Llama213B = "llama2:13b";
+
+ ///
+ /// Llama 2 70B: 70B parameters, Size: 39GB, Command: ollama run llama2:70b
+ ///
+ public const string Llama270B = "llama2:70b";
+
+ ///
+ /// Orca Mini: 3B parameters, Size: 1.9GB, Command: ollama run orca-mini
+ ///
+ public const string OrcaMini = "orca-mini";
+
+ ///
+ /// Vicuna: 7B parameters, Size: 3.8GB, Command: ollama run vicuna
+ ///
+ public const string Vicuna = "vicuna";
+
+ ///
+ /// LLaVA: 7B parameters, Size: 4.5GB, Command: ollama run llava
+ ///
+ public const string LLaVA = "llava";
+ }
+}
\ No newline at end of file
diff --git a/src/Testcontainers.Ollama/Testcontainers.Ollama.csproj b/src/Testcontainers.Ollama/Testcontainers.Ollama.csproj
new file mode 100644
index 000000000..7048179d1
--- /dev/null
+++ b/src/Testcontainers.Ollama/Testcontainers.Ollama.csproj
@@ -0,0 +1,12 @@
+
+
+ netstandard2.0;netstandard2.1
+ latest
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/Testcontainers.Ollama/Usings.cs b/src/Testcontainers.Ollama/Usings.cs
new file mode 100644
index 000000000..13053db61
--- /dev/null
+++ b/src/Testcontainers.Ollama/Usings.cs
@@ -0,0 +1,11 @@
+global using Docker.DotNet.Models;
+global using DotNet.Testcontainers.Builders;
+global using DotNet.Testcontainers.Configurations;
+global using DotNet.Testcontainers.Containers;
+global using DotNet.Testcontainers;
+global using JetBrains.Annotations;
+global using Microsoft.Extensions.Logging;
+global using DotNet.Testcontainers.Images;
+global using System;
+global using System.Collections.Generic;
+global using System.Threading.Tasks;
\ No newline at end of file
diff --git a/tests/Testcontainers.Ollama.Tests/OllamaContainerTests.cs b/tests/Testcontainers.Ollama.Tests/OllamaContainerTests.cs
new file mode 100644
index 000000000..e9a9f7b39
--- /dev/null
+++ b/tests/Testcontainers.Ollama.Tests/OllamaContainerTests.cs
@@ -0,0 +1,42 @@
+namespace Testcontainers.Ollama.Tests
+{
+ public class OllamaContainerTests : IAsyncLifetime
+ {
+ private OllamaContainer _ollamaContainer;
+
+ public async Task InitializeAsync()
+ {
+ _ollamaContainer = new OllamaBuilder()
+ .OllamaConfig(new OllamaConfiguration(OllamaModels.Llama2))
+ .Build();
+ await _ollamaContainer.StartAsync();
+ await _ollamaContainer.Run();
+ }
+
+ public async Task DisposeAsync()
+ {
+ await _ollamaContainer.DisposeAsync().AsTask();
+ }
+
+ [Fact]
+ [Trait(nameof(DockerCli.DockerPlatform), nameof(DockerCli.DockerPlatform.Linux))]
+ public async Task OllamaContainerReturnsSuccessful()
+ {
+ var client = new OllamaApiClient(_ollamaContainer.GetBaseUrl(), _ollamaContainer.ModelName);
+
+ var chatRequest = new ChatRequest() {
+ Model = _ollamaContainer.ModelName,
+ Stream = false,
+ Messages = new List()
+ {
+ new Message() { Content = "What is a name", Role = ChatRole.User },
+ }
+ };
+
+ var response = await client.SendChat(chatRequest, stream => { });
+ response = response.ToList();
+
+ Assert.True(response.Any());
+ }
+ }
+}
diff --git a/tests/Testcontainers.Ollama.Tests/Testcontainers.Ollama.Tests.csproj b/tests/Testcontainers.Ollama.Tests/Testcontainers.Ollama.Tests.csproj
new file mode 100644
index 000000000..3da16aee4
--- /dev/null
+++ b/tests/Testcontainers.Ollama.Tests/Testcontainers.Ollama.Tests.csproj
@@ -0,0 +1,29 @@
+
+
+
+ net8.0
+ false
+ false
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/Testcontainers.Ollama.Tests/Usings.cs b/tests/Testcontainers.Ollama.Tests/Usings.cs
new file mode 100644
index 000000000..e610c15c9
--- /dev/null
+++ b/tests/Testcontainers.Ollama.Tests/Usings.cs
@@ -0,0 +1,8 @@
+global using System.Threading.Tasks;
+global using DotNet.Testcontainers.Commons;
+global using Xunit;
+global using System.Collections.Generic;
+global using System.Linq;
+global using DotNet.Testcontainers.Configurations;
+global using OllamaSharp;
+global using OllamaSharp.Models.Chat;