Skip to content

Commit

Permalink
v0.3.74
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions committed Oct 31, 2024
1 parent 1dfab04 commit 1e0afd3
Show file tree
Hide file tree
Showing 4 changed files with 98 additions and 1 deletion.
2 changes: 2 additions & 0 deletions premai/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from .catch_all_error import CatchAllError
from .catch_all_error_code_enum import CatchAllErrorCodeEnum
from .chat_completion_input import ChatCompletionInput, ChatCompletionInputDict
from .chat_completion_input_response_format_type_0 import ChatCompletionInputResponseFormatType0
from .chat_completion_response import ChatCompletionResponse
from .conflict_error import ConflictError
from .conflict_error_code_enum import ConflictErrorCodeEnum
Expand Down Expand Up @@ -81,6 +82,7 @@
"CatchAllError",
"CatchAllErrorCodeEnum",
"ChatCompletionInput",
"ChatCompletionInputResponseFormatType0",
"ChatCompletionResponse",
"ConflictError",
"ConflictErrorCodeEnum",
Expand Down
46 changes: 46 additions & 0 deletions premai/models/chat_completion_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from attrs import field as _attrs_field
from typing_extensions import Any, NotRequired, TypedDict, TypeVar

from ..models.chat_completion_input_response_format_type_0 import ChatCompletionInputResponseFormatType0
from ..models.enhancement import Enhancement
from ..models.message import Message
from ..models.tool import Tool
Expand All @@ -24,6 +25,7 @@ class ChatCompletionInputDict(TypedDict):
stream: NotRequired[Union[Unset, bool]]
temperature: Union[Unset, float] = 1.0
tools: NotRequired[Union[List["Tool"], None, Unset]]
response_format: NotRequired[Union["ChatCompletionInputResponseFormatType0", None, Unset]]
pass


Expand All @@ -41,6 +43,8 @@ class ChatCompletionInput:
stream (Union[Unset, bool]): If set, partial message deltas will be sent, like in ChatGPT.
temperature (Union[Unset, float]): What sampling temperature to use, between 0 and 2. Default: 1.0.
tools (Union[List['Tool'], None, Unset]): The tools to use in the completion.
response_format (Union['ChatCompletionInputResponseFormatType0', None, Unset]): The format of the response. Can
be a JSON schema or a simple `json_object` type
"""

project_id: int
Expand All @@ -53,10 +57,13 @@ class ChatCompletionInput:
stream: Union[Unset, bool] = UNSET
temperature: Union[Unset, float] = 1.0
tools: Union[List["Tool"], None, Unset] = UNSET
response_format: Union["ChatCompletionInputResponseFormatType0", None, Unset] = UNSET

additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)

def to_dict(self) -> Dict[str, Any]:
from ..models.chat_completion_input_response_format_type_0 import ChatCompletionInputResponseFormatType0

project_id = self.project_id

messages = []
Expand Down Expand Up @@ -96,6 +103,14 @@ def to_dict(self) -> Dict[str, Any]:
else:
tools = self.tools

response_format: Union[Dict[str, Any], None, Unset]
if isinstance(self.response_format, Unset):
response_format = UNSET
elif isinstance(self.response_format, ChatCompletionInputResponseFormatType0):
response_format = self.response_format.to_dict()
else:
response_format = self.response_format

field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update(
Expand All @@ -120,6 +135,8 @@ def to_dict(self) -> Dict[str, Any]:
field_dict["temperature"] = temperature
if tools is not UNSET:
field_dict["tools"] = tools
if response_format is not UNSET:
field_dict["response_format"] = response_format

return field_dict

Expand Down Expand Up @@ -181,6 +198,14 @@ def to_multipart(self) -> Dict[str, Any]:
else:
tools = self.tools

response_format: Union[None, Tuple[None, bytes, str], Unset]
if isinstance(self.response_format, Unset):
response_format = UNSET
elif isinstance(self.response_format, ChatCompletionInputResponseFormatType0):
response_format = (None, json.dumps(self.response_format.to_dict()).encode(), "application/json")
else:
response_format = self.response_format

field_dict: Dict[str, Any] = {}
field_dict.update(
{key: (None, str(value).encode(), "text/plain") for key, value in self.additional_properties.items()}
Expand All @@ -207,11 +232,14 @@ def to_multipart(self) -> Dict[str, Any]:
field_dict["temperature"] = temperature
if tools is not UNSET:
field_dict["tools"] = tools
if response_format is not UNSET:
field_dict["response_format"] = response_format

return field_dict

@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
from ..models.chat_completion_input_response_format_type_0 import ChatCompletionInputResponseFormatType0
from ..models.enhancement import Enhancement
from ..models.message import Message
from ..models.tool import Tool
Expand Down Expand Up @@ -274,6 +302,23 @@ def _parse_tools(data: object) -> Union[List["Tool"], None, Unset]:

tools = _parse_tools(d.pop("tools", UNSET))

def _parse_response_format(data: object) -> Union["ChatCompletionInputResponseFormatType0", None, Unset]:
if data is None:
return data
if isinstance(data, Unset):
return data
try:
if not isinstance(data, dict):
raise TypeError()
response_format_type_0 = ChatCompletionInputResponseFormatType0.from_dict(data)

return response_format_type_0
except: # noqa: E722
pass
return cast(Union["ChatCompletionInputResponseFormatType0", None, Unset], data)

response_format = _parse_response_format(d.pop("response_format", UNSET))

chat_completion_input = cls(
project_id=project_id,
messages=messages,
Expand All @@ -285,6 +330,7 @@ def _parse_tools(data: object) -> Union[List["Tool"], None, Unset]:
stream=stream,
temperature=temperature,
tools=tools,
response_format=response_format,
)

chat_completion_input.additional_properties = d
Expand Down
49 changes: 49 additions & 0 deletions premai/models/chat_completion_input_response_format_type_0.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
from typing import Dict, List, Type

from attrs import define as _attrs_define
from attrs import field as _attrs_field
from typing_extensions import Any, TypedDict, TypeVar

T = TypeVar("T", bound="ChatCompletionInputResponseFormatType0")


class ChatCompletionInputResponseFormatType0Dict(TypedDict):
pass


@_attrs_define
class ChatCompletionInputResponseFormatType0:
"""The format of the response. Can be a JSON schema or a simple `json_object` type"""

additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)

def to_dict(self) -> Dict[str, Any]:
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})

return field_dict

@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy() if src_dict else {}
chat_completion_input_response_format_type_0 = cls()

chat_completion_input_response_format_type_0.additional_properties = d
return chat_completion_input_response_format_type_0

@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())

def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]

def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value

def __delitem__(self, key: str) -> None:
del self.additional_properties[key]

def __contains__(self, key: str) -> bool:
return key in self.additional_properties
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "premai"
version = "0.3.73"
version = "0.3.74"
description = "A client library for accessing Prem APIs"
authors = []
readme = "README.md"
Expand Down

0 comments on commit 1e0afd3

Please sign in to comment.