Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Prompt mgmt #231

Merged
merged 11 commits into from
Dec 3, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/metadata/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ You should be able to add a single tag to your workflow:

```python
tag = MetadataTag("wftag", "val")
metadata_client.addWorkflowTag(tag, 'python_workflow_example_from_code')
metadata_client.add_workflow_tag(tag, 'python_workflow_example_from_code')
```

### Fetch tags added to your workflow
Expand Down
2 changes: 1 addition & 1 deletion docs/workflow/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ workflow_client.restart_workflow(workflow_id, use_latest_def=True)
When called, the task in the failed state is scheduled again, and the workflow moves to RUNNING status. If resumeSubworkflowTasks is set and the last failed task was a sub-workflow, the server restarts the sub-workflow from the failed task. If set to false, the sub-workflow is re-executed.

```python
workflow_client.retry_workflow(workflow_id, resumeSubworkflowTasks=True)
workflow_client.retry_workflow(workflow_id, resume_subworkflow_tasks=True)
```

### Skip task from workflow
Expand Down
Empty file.
17 changes: 17 additions & 0 deletions src/conductor/client/ai/configuration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
from typing import List

from typing_extensions import Self


class AIConfiguration:
"""Default configurations for the AI workflows.
"""
def __init__(self, llm_provider: str, text_complete_model: str, chat_complete_model: str, embedding_model: str,
vector_db: str) -> Self:
self.llm_provider = llm_provider
self.text_complete_model = text_complete_model
self.chat_complete_model = chat_complete_model
self.embedding_model = embedding_model
self.vector_db = vector_db


64 changes: 64 additions & 0 deletions src/conductor/client/ai/integrations.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
from __future__ import annotations

from abc import ABC, abstractmethod


class IntegrationConfig(ABC):
def __init__(self):
pass

@abstractmethod
def to_dict(self) -> dict:
pass


class WeviateConfig(IntegrationConfig):

def __init__(self, api_key: str, endpoint: str, classname: str) -> None:
self.api_key = api_key
self.endpoint = endpoint
self.classname = classname

def to_dict(self) -> dict:
return {
'api_key': self.api_key,
'endpoint': self.endpoint
}


class OpenAIConfig(IntegrationConfig):

def __init__(self, api_key: str) -> None:
self.api_key = api_key

def to_dict(self) -> dict:
return {
'api_key': self.api_key
}


class AzureOpenAIConfig(IntegrationConfig):

def __init__(self, api_key: str, endpoint: str) -> None:
self.api_key = api_key
self.endpoint = endpoint

def to_dict(self) -> dict:
return {
'api_key': self.api_key,
'endpoint': self.endpoint
}


class PineconeConfig(IntegrationConfig):

def __init__(self, api_key: str, endpoint: str, classname: str) -> None:
self.api_key = api_key
self.endpoint = endpoint
self.classname = classname

def to_dict(self) -> dict:
return {
'api_key': self.api_key,
'endpoint': self.endpoint
}
85 changes: 85 additions & 0 deletions src/conductor/client/ai/orchestrator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
from __future__ import annotations

import time
from typing import Optional, List
from uuid import uuid4

from typing_extensions import Self

from conductor.client.ai.integrations import IntegrationConfig
from conductor.client.configuration.configuration import Configuration
from conductor.client.http.models.integration_api_update import IntegrationApiUpdate
from conductor.client.http.models.integration_update import IntegrationUpdate
from conductor.client.orkes_clients import OrkesClients
from conductor.client.ai.configuration import AIConfiguration


class AIOrchestrator:
def __init__(self, api_configuration: Configuration, ai_configuration: AIConfiguration,
prompt_test_workflow_name: str = '') -> Self:
self.ai_configuration = ai_configuration
orkes_clients = OrkesClients(api_configuration)

self.integration_client = orkes_clients.get_integration_client()
self.workflow_client = orkes_clients.get_integration_client()
self.workflow_executor = orkes_clients.get_workflow_executor()
self.prompt_client = orkes_clients.get_prompt_client()

self.prompt_test_workflow_name = prompt_test_workflow_name
if self.prompt_test_workflow_name == '':
self.prompt_test_workflow_name = 'prompt_test_' + str(uuid4())

def add_prompt_template(self, name: str, prompt_template: str, description: str):
self.prompt_client.save_prompt(name, description, prompt_template)
return self

def associate_prompt_template(self, name: str, ai_integration: str, ai_models: List[str]):
for ai_model in ai_models:
self.integration_client.associate_prompt_with_integration(ai_integration, ai_model, name)

def test_prompt_template(self, text: str, variables: dict,
ai_integration: str,
text_complete_model: str,
stop_words: Optional[List[str]] = [], max_tokens: Optional[int] = 100,
temperature: int = 0,
top_p: int = 1):

return self.prompt_client.test_prompt(text, variables, ai_integration, text_complete_model, temperature, top_p,
stop_words)

def add_ai_integration(self, name: str, provider: str, models: List[str], description: str,
config: IntegrationConfig):
details = IntegrationUpdate()
details.configuration = config.to_dict()
details.type = provider
details.category = 'AI_MODEL'
details.enabled = True
details.description = description
self.integration_client.save_integration(name, details)
for model in models:
api_details = IntegrationApiUpdate()
api_details.enabled = True
api_details.description = description
self.integration_client.save_integration_api(name, model, api_details)

def add_vector_store(self, name: str, provider: str, indices: List[str], description: str,
config: IntegrationConfig):
vector_db = IntegrationUpdate()
vector_db.configuration = config.to_dict()
vector_db.type = provider
vector_db.category = 'VECTOR_DB'
vector_db.enabled = True
vector_db.description = description
self.integration_client.save_integration(name, vector_db)
for index in indices:
api_details = IntegrationApiUpdate()
api_details.enabled = True
api_details.description = description
self.integration_client.save_integration_api(name, index, api_details)
pass

def get_token_used(self, ai_integration: str) -> dict:
return self.integration_client.get_token_usage_for_integration_provider(ai_integration)

def get_token_used_by_model(self, ai_integration: str, model: str) -> int:
return self.integration_client.get_token_usage_for_integration(ai_integration, model)
Loading
Loading