Skip to content

Commit

Permalink
Use a smaller LLM
Browse files Browse the repository at this point in the history
  • Loading branch information
loverdos committed Oct 1, 2024
1 parent e5a4963 commit 256370b
Show file tree
Hide file tree
Showing 7 changed files with 10 additions and 10 deletions.
2 changes: 1 addition & 1 deletion examples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ executables there.

### Ollama

For the LLM component we install and use [Ollama][ollama] with the `llama3.1` model. You can
For the LLM component we install and use [Ollama][ollama] with the `llama3.2:1b` model. You can
install both with `just ollama-setup`.

## Example: Instagram Post Planner
Expand Down
2 changes: 1 addition & 1 deletion examples/cli_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def run(self, user_input):
return "Cluster execution failed"

print(f"Cluster execution started with ID: {execution_id}")
return get_cluster_execution_response(self.client, execution_id)
return get_cluster_execution_response(self.client, execution_id, 600)


# Runs the CLI agent example using the provided Nexus package ID.
Expand Down
2 changes: 1 addition & 1 deletion examples/ig_post_planner.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def run(self):
return "Cluster execution failed"

print(f"Cluster execution started with ID: {execution_id}")
return get_cluster_execution_response(self.client, execution_id)
return get_cluster_execution_response(self.client, execution_id, 600)


# Runs the Instagram Post Planner example using the provided Nexus package ID.
Expand Down
8 changes: 4 additions & 4 deletions examples/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,16 +200,16 @@ def create_llama_model(client, package_id, node_id):
client=client,
package_id=package_id,
node_id=node_id,
name="llama3.1",
model_hash=b"llama3.1_8B_hash",
name="llama3.2:1b",
model_hash=b"llama3.2_1b_hash",
url="http://localhost:11434",
token_price=1000,
capacity=1000000,
num_params=1000000000,
description="Llama3.1 8B",
description="llama3.2 1b",
max_context_length=8192,
is_fine_tuned=False,
family="Llama3.1",
family="Llama3.2",
vendor="Meta",
is_open_source=True,
datasets=["test"],
Expand Down
2 changes: 1 addition & 1 deletion examples/trip_planner.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def run(self):
return "Cluster execution failed"

print(f"Cluster execution started with ID: {execution_id}")
return get_cluster_execution_response(self.client, execution_id)
return get_cluster_execution_response(self.client, execution_id, 600)


# Runs the Trip Planner example using the provided Nexus package ID.
Expand Down
2 changes: 1 addition & 1 deletion justfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
python_version := "3.10"
llama_version := "llama3.1"
llama_version := "llama3.2:1b"
sui_tag := "testnet-v1.28.3"

[private]
Expand Down
2 changes: 1 addition & 1 deletion offchain/tools/src/nexus_tools/server/models/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ class Prompt(BaseModel):
"""

prompt: str = Field(alias="prompt")
model: str = Field(alias="model", default="llama3:70b")
model: str = Field(alias="model", default="llama3.2:1b")
max_tokens: int = Field(alias="max_tokens", default=1000)
temperature: float = Field(alias="temperature", default=1.0)
tools: Optional[List[str]] = Field(
Expand Down

0 comments on commit 256370b

Please sign in to comment.