-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
test: use default model in ollama and reduce code redundancy
Signed-off-by: Adrian Cole <[email protected]>
- Loading branch information
1 parent
5b34bc5
commit 667296c
Showing
6 changed files
with
32 additions
and
51 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,33 +1,21 @@ | ||
from typing import Tuple | ||
|
||
import os | ||
import pytest | ||
|
||
from exchange import Text | ||
from exchange.message import Message | ||
from exchange.providers.base import Usage | ||
from exchange.providers.ollama import OllamaProvider, OLLAMA_MODEL | ||
from exchange.providers.ollama import OllamaProvider | ||
from .conftest import complete, OLLAMA_MODEL | ||
|
||
|
||
@pytest.mark.vcr() | ||
def test_ollama_completion(default_openai_api_key): | ||
reply_message, reply_usage = ollama_complete() | ||
def test_ollama_complete(default_openai_api_key): | ||
reply_message, reply_usage = complete(OllamaProvider, OLLAMA_MODEL) | ||
|
||
assert reply_message.content == [Text(text="Hello! I'm here to help. How can I assist you today? Let's chat. 😊")] | ||
assert reply_usage.total_tokens == 33 | ||
|
||
|
||
@pytest.mark.integration | ||
def test_ollama_completion_integration(): | ||
reply = ollama_complete() | ||
def test_ollama_complete_integration(): | ||
reply = complete(OllamaProvider, OLLAMA_MODEL) | ||
|
||
assert reply[0].content is not None | ||
print("Completion content from OpenAI:", reply[0].content) | ||
|
||
|
||
def ollama_complete() -> Tuple[Message, Usage]: | ||
provider = OllamaProvider.from_env() | ||
model = os.getenv("OLLAMA_MODEL", OLLAMA_MODEL) | ||
system = "You are a helpful assistant." | ||
messages = [Message.user("Hello")] | ||
return provider.complete(model=model, system=system, messages=messages, tools=None) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,39 +1,21 @@ | ||
from typing import Tuple | ||
|
||
import os | ||
import pytest | ||
|
||
from exchange import Text | ||
from exchange.message import Message | ||
from exchange.providers.base import Usage | ||
from exchange.providers.openai import OpenAiProvider | ||
from .conftest import OPENAI_MODEL, OPENAI_API_KEY | ||
from .conftest import complete, OPENAI_MODEL | ||
|
||
|
||
@pytest.mark.vcr() | ||
def test_openai_completion(monkeypatch): | ||
# When running VCR tests the first time, it needs OPENAI_API_KEY to call | ||
# the real service. Afterward, it is not needed as VCR mocks the service. | ||
if "OPENAI_API_KEY" not in os.environ: | ||
monkeypatch.setenv("OPENAI_API_KEY", OPENAI_API_KEY) | ||
|
||
reply_message, reply_usage = openai_complete() | ||
def test_openai_complete(default_openai_api_key): | ||
reply_message, reply_usage = complete(OpenAiProvider, OPENAI_MODEL) | ||
|
||
assert reply_message.content == [Text(text="Hello! How can I assist you today?")] | ||
assert reply_usage.total_tokens == 27 | ||
|
||
|
||
@pytest.mark.integration | ||
def test_openai_completion_integration(): | ||
reply = openai_complete() | ||
def test_openai_complete_integration(): | ||
reply = complete(OpenAiProvider, OPENAI_MODEL) | ||
|
||
assert reply[0].content is not None | ||
print("Completion content from OpenAI:", reply[0].content) | ||
|
||
|
||
def openai_complete() -> Tuple[Message, Usage]: | ||
provider = OpenAiProvider.from_env() | ||
model = OPENAI_MODEL | ||
system = "You are a helpful assistant." | ||
messages = [Message.user("Hello")] | ||
return provider.complete(model=model, system=system, messages=messages, tools=None) | ||
print("Complete content from OpenAI:", reply[0].content) |