Skip to content

Commit

Permalink
Merge pull request #167 from elbruno/main
Browse files Browse the repository at this point in the history
Update C# samples and devcontainer to use Phi3.5
  • Loading branch information
leestott authored Aug 27, 2024
2 parents 1f4252f + 33839f5 commit 6d0cd20
Show file tree
Hide file tree
Showing 19 changed files with 25 additions and 33 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"name": "Ollama with Phi-3 for C#",
"name": "Ollama with Phi-3.5 for C#",
"image": "mcr.microsoft.com/dotnet/sdk:8.0",
"features": {
"ghcr.io/devcontainers/features/docker-in-docker:2": {},
Expand All @@ -10,7 +10,6 @@
"dotnetRuntimeVersions": "8.0",
"aspNetCoreRuntimeVersions": "8.0"
},
"ghcr.io/prulloac/devcontainer-features/ollama:1": {},
"sshd": "latest"
},
"customizations": {
Expand All @@ -29,8 +28,8 @@
32000,
32001
],
"postCreateCommand": "",
"postStartCommand": "ollama pull phi3",
"postCreateCommand": "sudo dotnet workload update & docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama",
"postStartCommand": "docker exec -it ollama ollama pull phi3.5",
"remoteUser": "vscode",
"hostRequirements": {
"memory": "8gb",
Expand Down
2 changes: 1 addition & 1 deletion md/02.QuickStart/Ollama_QuickStart.md
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ using Microsoft.SemanticKernel.ChatCompletion;
// add chat completion service using the local ollama server endpoint
#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434/"),
apiKey: "non required");

Expand Down
2 changes: 1 addition & 1 deletion md/02.QuickStart/translations/zh-cn/Ollama_QuickStart.md
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ using Microsoft.SemanticKernel.ChatCompletion;
// add chat completion service using the local ollama server endpoint
#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434/"),
apiKey: "non required");

Expand Down
2 changes: 1 addition & 1 deletion md/02.QuickStart/translations/zh-tw/Ollama_QuickStart.md
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ using Microsoft.SemanticKernel.ChatCompletion;
// 使用本地 ollama 伺服器端點添加聊天完成服務
#pragma warning disable SKEXP0001, SKEXP0003, SKEXP0010, SKEXP0011, SKEXP0050, SKEXP0052
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434/"),
apiKey: "non required");

Expand Down
2 changes: 1 addition & 1 deletion md/07.Labs/CsharpOllamaCodeSpaces/src/Sample01/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
// Create kernel with a custom http address
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434"),
apiKey: "apikey");
var kernel = builder.Build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Microsoft.SemanticKernel" Version="1.15.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.17.2" />
</ItemGroup>

</Project>
2 changes: 1 addition & 1 deletion md/07.Labs/CsharpOllamaCodeSpaces/src/Sample02/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
// Create kernel with a custom http address
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434"),
apiKey: "apikey");
var kernel = builder.Build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Microsoft.SemanticKernel" Version="1.15.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.17.2" />
</ItemGroup>

</Project>
5 changes: 1 addition & 4 deletions md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,8 @@
#pragma warning disable SKEXP0050
#pragma warning disable SKEXP0052

using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.KernelMemory;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel.Embeddings;
using Microsoft.SemanticKernel.Memory;
Expand All @@ -49,7 +46,7 @@
// Create a chat completion service
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434"),
apiKey: "apikey");
builder.AddLocalTextEmbeddingGeneration();
Expand Down
10 changes: 3 additions & 7 deletions md/07.Labs/CsharpOllamaCodeSpaces/src/Sample03/Sample03.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,15 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<RootNamespace>sk_tutorial_16</RootNamespace>
<RootNamespace>sample03</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<UserSecretsId>7a6ff44c-6967-4a98-af29-b08cb9f913ec</UserSecretsId>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.UserSecrets" Version="9.0.0-preview.5.24306.7" />
<PackageReference Include="Microsoft.KernelMemory.SemanticKernelPlugin" Version="0.65.240620.1" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.15.0" />
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Memory" Version="1.15.0-alpha" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.18.0-rc" />
<PackageReference Include="Microsoft.SemanticKernel.Plugins.Memory" Version="1.18.0-alpha" />
<PackageReference Include="SmartComponents.LocalEmbeddings.SemanticKernel" Version="0.1.0-preview10148" />
<PackageReference Include="System.Linq.Async" Version="6.0.1" />
</ItemGroup>

</Project>
6 changes: 3 additions & 3 deletions md/07.Labs/CsharpOllamaCodeSpaces/src/Sample04/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,13 @@
using System.Text;

// Define endpoints for telemetry and Phi-3
var otlpEndPoint = "http://cpc-bruno-83lkq-docker-desktop:4317/"; // "http://localhost:4317";
var phi3EndPoint = "http://cpc-bruno-83lkq-docker-desktop:11434/"; // "http://localhost:11434";
var otlpEndPoint = "http://localhost:4317";
var phi3EndPoint = "http://localhost:11434";

// Create kernel with a custom http address
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri(phi3EndPoint),
apiKey: "apikey");
ConfigureOpenTelemetry(builder, otlpEndPoint);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
<PackageReference Include="Microsoft.Extensions.Configuration.UserSecrets" Version="9.0.0-preview.5.24306.7" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="9.0.0-preview.5.24306.7" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="9.0.0-preview.5.24306.7" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.15.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.17.2" />
<PackageReference Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.9.0" />
<PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.9.0" />
<PackageReference Include="OpenTelemetry.Instrumentation.Http" Version="1.9.0" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
// Create kernel with a custom http address
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434"),
apiKey: "apikey");
var kernel = builder.Build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
// Create kernel with a custom http address
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434"),
apiKey: "apikey");
var kernel = builder.Build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
// Create a chat completion service
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434"),
apiKey: "apikey");
builder.AddLocalTextEmbeddingGeneration();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
// Create kernel with a custom http address
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri(phi3EndPoint),
apiKey: "apikey");
ConfigureOpenTelemetry(builder, otlpEndPoint);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
// Create kernel with a custom http address
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434"),
apiKey: "apikey");
var kernel = builder.Build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
// Create kernel with a custom http address
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434"),
apiKey: "apikey");
var kernel = builder.Build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
// Create a chat completion service
var builder = Kernel.CreateBuilder();
builder.AddOpenAIChatCompletion(
modelId: "phi3",
modelId: "phi3.5",
endpoint: new Uri("http://localhost:11434"),
apiKey: "apikey");
builder.AddLocalTextEmbeddingGeneration();
Expand Down

0 comments on commit 6d0cd20

Please sign in to comment.