Skip to content

Commit

Permalink
feat: update model
Browse files Browse the repository at this point in the history
  • Loading branch information
Dttbd authored and jameszyao committed May 13, 2024
1 parent e9d49a0 commit 1d5a746
Show file tree
Hide file tree
Showing 11 changed files with 92 additions and 8 deletions.
3 changes: 3 additions & 0 deletions taskingai/client/models/entities/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,11 @@
from .chat_completion_function_message import *
from .chat_completion_function_parameters import *
from .chat_completion_function_parameters_property import *
from .chat_completion_function_parameters_property_items import *
from .chat_completion_message import *
from .chat_completion_role import *
from .chat_completion_system_message import *
from .chat_completion_usage import *
from .chat_completion_user_message import *
from .chat_memory import *
from .chat_memory_message import *
Expand All @@ -54,6 +56,7 @@
from .status import *
from .text_embedding_input_type import *
from .text_embedding_output import *
from .text_embedding_usage import *
from .text_splitter import *
from .text_splitter_type import *
from .tool_ref import *
Expand Down
2 changes: 2 additions & 0 deletions taskingai/client/models/entities/chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from pydantic import BaseModel, Field
from .chat_completion_finish_reason import ChatCompletionFinishReason
from .chat_completion_assistant_message import ChatCompletionAssistantMessage
from .chat_completion_usage import ChatCompletionUsage

__all__ = ["ChatCompletion"]

Expand All @@ -22,3 +23,4 @@ class ChatCompletion(BaseModel):
finish_reason: ChatCompletionFinishReason = Field(...)
message: ChatCompletionAssistantMessage = Field(...)
created_timestamp: int = Field(...)
usage: ChatCompletionUsage = Field(...)
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,13 @@

from pydantic import BaseModel, Field
from typing import Optional, List

from .chat_completion_function_parameters_property_items import ChatCompletionFunctionParametersPropertyItems

__all__ = ["ChatCompletionFunctionParametersProperty"]


class ChatCompletionFunctionParametersProperty(BaseModel):
type: str = Field(..., pattern="^(string|number|integer|boolean)$")
description: str = Field("", max_length=256)
type: str = Field(..., pattern="^(string|number|integer|boolean|array)$")
description: str = Field("", max_length=512)
enum: Optional[List[str]] = Field(None)
items: Optional[ChatCompletionFunctionParametersPropertyItems] = Field(None)
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-

# chat_completion_function_parameters_property_items.py

"""
This script is automatically generated for TaskingAI python client
Do not modify the file manually
Author: James Yao
Organization: TaskingAI
License: Apache 2.0
"""

from pydantic import BaseModel, Field


__all__ = ["ChatCompletionFunctionParametersPropertyItems"]


class ChatCompletionFunctionParametersPropertyItems(BaseModel):
type: str = Field(..., pattern="^(string|number|integer|boolean)$")
22 changes: 22 additions & 0 deletions taskingai/client/models/entities/chat_completion_usage.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-

# chat_completion_usage.py

"""
This script is automatically generated for TaskingAI python client
Do not modify the file manually
Author: James Yao
Organization: TaskingAI
License: Apache 2.0
"""

from pydantic import BaseModel, Field


__all__ = ["ChatCompletionUsage"]


class ChatCompletionUsage(BaseModel):
input_tokens: int = Field(...)
output_tokens: int = Field(...)
21 changes: 21 additions & 0 deletions taskingai/client/models/entities/text_embedding_usage.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-

# text_embedding_usage.py

"""
This script is automatically generated for TaskingAI python client
Do not modify the file manually
Author: James Yao
Organization: TaskingAI
License: Apache 2.0
"""

from pydantic import BaseModel, Field


__all__ = ["TextEmbeddingUsage"]


class TextEmbeddingUsage(BaseModel):
input_tokens: int = Field(...)
2 changes: 1 addition & 1 deletion taskingai/client/models/entities/text_splitter.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@


class TextSplitter(BaseModel):
type: TextSplitterType = Field(...)
type: TextSplitterType = Field("token")
chunk_size: Optional[int] = Field(None, ge=50, le=1000)
chunk_overlap: Optional[int] = Field(None, ge=0, le=200)
separators: Optional[List[str]] = Field(None, min_length=1, max_length=16)
7 changes: 4 additions & 3 deletions taskingai/client/models/schemas/chat_completion_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,16 @@
from pydantic import BaseModel, Field
from typing import Optional, List, Dict, Union
from ..entities.chat_completion_function_message import ChatCompletionFunctionMessage
from ..entities.chat_completion_assistant_message import ChatCompletionAssistantMessage
from ..entities.chat_completion_user_message import ChatCompletionUserMessage
from ..entities.chat_completion_system_message import ChatCompletionSystemMessage
from ..entities.chat_completion_user_message import ChatCompletionUserMessage
from ..entities.chat_completion_assistant_message import ChatCompletionAssistantMessage
from ..entities.chat_completion_function import ChatCompletionFunction

__all__ = ["ChatCompletionRequest"]


class ChatCompletionRequest(BaseModel):
model_id: str = Field(..., min_length=8, max_length=8)
model_id: str = Field(..., min_length=1, max_length=255)
configs: Optional[Dict] = Field(None)
stream: bool = Field(False)
messages: List[
Expand All @@ -36,3 +36,4 @@ class ChatCompletionRequest(BaseModel):
] = Field(...)
function_call: Optional[str] = Field(None)
functions: Optional[List[ChatCompletionFunction]] = Field(None)
save_logs: bool = Field(False)
2 changes: 1 addition & 1 deletion taskingai/client/models/schemas/text_embedding_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,6 @@


class TextEmbeddingRequest(BaseModel):
model_id: str = Field(..., min_length=8, max_length=8)
model_id: str = Field(..., min_length=1, max_length=255)
input: Union[str, List[str]] = Field(...)
input_type: Optional[TextEmbeddingInputType] = Field(None)
2 changes: 2 additions & 0 deletions taskingai/client/models/schemas/text_embedding_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,12 @@
from pydantic import BaseModel, Field
from typing import List
from ..entities.text_embedding_output import TextEmbeddingOutput
from ..entities.text_embedding_usage import TextEmbeddingUsage

__all__ = ["TextEmbeddingResponse"]


class TextEmbeddingResponse(BaseModel):
status: str = Field("success")
data: List[TextEmbeddingOutput] = Field(...)
usage: TextEmbeddingUsage = Field(...)
11 changes: 11 additions & 0 deletions taskingai/retrieval/text_splitter.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
"TextSplitter",
"TextSplitterType",
"TokenTextSplitter",
"SeparatorTextSplitter",
]


Expand All @@ -14,3 +15,13 @@ def __init__(self, chunk_size: int, chunk_overlap: int):
chunk_size=chunk_size,
chunk_overlap=chunk_overlap,
)


class SeparatorTextSplitter(TextSplitter):
def __init__(self, chunk_size: int, chunk_overlap: int, separators: list[str]):
super().__init__(
type=TextSplitterType.SEPARATOR,
chunk_size=chunk_size,
chunk_overlap=chunk_overlap,
separators=separators,
)

0 comments on commit 1d5a746

Please sign in to comment.