Skip to content

Commit 3c946c3

Browse files
committed
Update OpenAI ChatGPT 3.5 turbo token limit
1 parent 37bf2f3 commit 3c946c3

9 files changed

+408
-1944
lines changed

.pre-commit-config.yaml

-12
Original file line numberDiff line numberDiff line change
@@ -31,15 +31,3 @@ repos:
3131
hooks:
3232
- id: ruff
3333
args: [ --fix, --exit-non-zero-on-fix ]
34-
- repo: https://github.com/psf/black
35-
rev: 23.3.0
36-
hooks:
37-
- id: black
38-
- repo: local
39-
hooks:
40-
- id: mypy
41-
name: mypy
42-
entry: 'mypy'
43-
language: system
44-
files: elia_chat
45-
types: [python, file]

.python-version

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
3.11

elia_chat/database/models.py

+7-9
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,18 @@
1-
from __future__ import annotations
2-
31
import pathlib
42
from datetime import datetime
5-
from typing import Any
3+
from typing import Any, Optional
64

75
from sqlalchemy import Column, DateTime, func, JSON, desc
86
from sqlalchemy.orm import selectinload
9-
from sqlmodel import SQLModel, Field, create_engine, Session, select, Relationship
7+
from sqlmodel import Field, Relationship, Session, SQLModel, create_engine, select
108

119

1210
class MessageDao(SQLModel, table=True):
1311
__tablename__ = "message"
1412

1513
id: int = Field(default=None, primary_key=True)
16-
chat_id: int | None = Field(foreign_key="chat.id")
17-
chat: ChatDao = Relationship(back_populates="messages")
14+
chat_id: Optional[int] = Field(foreign_key="chat.id")
15+
chat: "ChatDao" = Relationship(back_populates="messages")
1816
role: str
1917
content: str
2018
timestamp: datetime | None = Field(
@@ -23,7 +21,7 @@ class MessageDao(SQLModel, table=True):
2321
status: str | None
2422
end_turn: bool | None
2523
weight: float | None
26-
meta: dict = Field(sa_column=Column(JSON), default={})
24+
meta: dict[Any, Any] = Field(sa_column=Column(JSON), default={})
2725
recipient: str | None
2826

2927

@@ -39,7 +37,7 @@ class ChatDao(SQLModel, table=True):
3937
messages: list[MessageDao] = Relationship(back_populates="chat")
4038

4139
@staticmethod
42-
def all() -> list[ChatDao]:
40+
def all() -> list["ChatDao"]:
4341
with Session(engine) as session:
4442
# Create a subquery that finds the maximum
4543
# (most recent) timestamp for each chat.
@@ -60,7 +58,7 @@ def all() -> list[ChatDao]:
6058
return list(results)
6159

6260
@staticmethod
63-
def from_id(chat_id: str) -> ChatDao:
61+
def from_id(chat_id: str) -> "ChatDao":
6462
with Session(engine) as session:
6563
statement = (
6664
select(ChatDao)

elia_chat/widgets/chat.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@
2727
from elia_chat.widgets.chat_header import ChatHeader
2828
from elia_chat.widgets.chat_options import (
2929
DEFAULT_MODEL,
30-
MODEL_MAPPING,
3130
ChatOptions,
3231
GPTModel,
32+
get_model_by_name,
3333
)
3434
from elia_chat.widgets.chatbox import Chatbox
3535

@@ -182,7 +182,7 @@ async def stream_agent_response(self) -> None:
182182
log.debug(
183183
f"Creating streaming response with model {self.chat_data.model_name!r}"
184184
)
185-
selected_model: GPTModel = MODEL_MAPPING[self.chat_data.model_name]
185+
selected_model: GPTModel = get_model_by_name(self.chat_data.model_name)
186186
llm: BaseChatModel = selected_model.model
187187
trimmed_messages = self.trim_messages(
188188
model=llm,

elia_chat/widgets/chat_options.py

+10-5
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from typing import Dict
55

66
from langchain.callbacks import AsyncIteratorCallbackHandler
7-
from langchain.chat_models import ChatOpenAI
7+
from langchain_openai import ChatOpenAI
88
from langchain.chat_models.base import BaseChatModel
99
from langchain.llms.base import LLM
1010
from rich.console import RenderableType
@@ -42,11 +42,11 @@ class GPTModel:
4242
description="The fastest ChatGPT model, great for most everyday tasks.",
4343
css_class="gpt35",
4444
model=ChatOpenAI(
45-
model_name="gpt-3.5-turbo",
45+
model="gpt-3.5-turbo",
4646
streaming=True,
4747
callbacks=[callback],
4848
),
49-
token_limit=4096,
49+
token_limit=16385,
5050
)
5151
AVAILABLE_MODELS = [
5252
DEFAULT_MODEL,
@@ -59,7 +59,7 @@ class GPTModel:
5959
"complex tasks which require advanced reasoning.",
6060
css_class="gpt4",
6161
model=ChatOpenAI(
62-
model_name="gpt-4-1106-preview",
62+
model="gpt-4-turbo",
6363
streaming=True,
6464
callbacks=[callback],
6565
),
@@ -69,6 +69,11 @@ class GPTModel:
6969
MODEL_MAPPING: Dict[str, GPTModel] = {model.name: model for model in AVAILABLE_MODELS}
7070

7171

72+
def get_model_by_name(model_name: str) -> GPTModel:
73+
"""Given the name of a model as a string, return the GPTModel."""
74+
return MODEL_MAPPING[model_name]
75+
76+
7277
class ModelPanel(Static):
7378
class Selected(Message):
7479
def __init__(self, model: GPTModel):
@@ -172,7 +177,7 @@ def compose(self) -> ComposeResult:
172177
model_set.focus()
173178
for index, model in enumerate(AVAILABLE_MODELS):
174179
model_panel = ModelPanel(
175-
model, id=model.name, classes=model.css_class
180+
model, id=model.css_class, classes=model.css_class
176181
)
177182
if index == 0:
178183
model_panel.selected = True

0 commit comments

Comments
 (0)