Skip to content

Commit

Permalink
Release/1.1.300 (Chainlit#1028)
Browse files Browse the repository at this point in the history
* Add starters

* Debug mode

* Rework CoT

* Rework Avatars

* Remove PP
  • Loading branch information
willydouhard authored May 27, 2024
1 parent 4f7fb2d commit 4e0baab
Show file tree
Hide file tree
Showing 175 changed files with 2,207 additions and 5,668 deletions.
27 changes: 27 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,33 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

Nothing unreleased!

## [1.1.300rc0] - 2024-05-27

### Added

- Debug mode when starting with `-d`. Only available if the data layer supports it. This replaces the Prompt Playground.
- `@cl.set_starters` and `cl.Starter` to suggest conversation starters to the user
- `default` theme config in `config.toml`
- If only one OAuth provider is set, automatically redirect the user to it

### Changed

- **[BREAKING]** Avatars have been reworked. `cl.Avatar` has been removed, instead place your avatars by name in `/public/avatars/*`
- **[BREAKING]** The `running`, `took_one` and `took_other` translations have been replaced by `used`.
- **[BREAKING]** `root` attribute of `cl.Step` has been removed. Use `cl.Message` to send root level messages.
- Chain of Thought has been reworked. Only steps of type `tool` will be displayed if `hide_cot` is false
- The `show_readme_as_default` config has been removed
- No longer collapse root level messages

### Fixed

- The Chat Profile description should now disappear when not hovered.
- Error handling of steps has been improved
- No longer stream the first token twice
- Copilot should now work as expected even if the user is closing/reopening it
- Copilot CSS should no longer leak/be impacted by the host website CSS
- Fix various `cl.Context` errors

## [1.1.202] - 2024-05-22

### Added
Expand Down
22 changes: 19 additions & 3 deletions backend/chainlit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
from chainlit.context import context
from chainlit.element import (
Audio,
Avatar,
File,
Image,
Pdf,
Expand All @@ -52,7 +51,7 @@
from chainlit.step import Step, step
from chainlit.sync import make_async, run_sync
from chainlit.telemetry import trace
from chainlit.types import AudioChunk, ChatProfile, ThreadDict
from chainlit.types import AudioChunk, ChatProfile, Starter, ThreadDict
from chainlit.user import PersistedUser, User
from chainlit.user_session import user_session
from chainlit.utils import make_module_getattr, wrap_user_function
Expand Down Expand Up @@ -208,6 +207,22 @@ def set_chat_profiles(
return func


@trace
def set_starters(func: Callable[[Optional["User"]], List["Starter"]]) -> Callable:
"""
Programmatic declaration of the available starter (can depend on the User from the session if authentication is setup).
Args:
func (Callable[[Optional["User"]], List["Starter"]]): The function declaring the starters.
Returns:
Callable[[Optional["User"]], List["Starter"]]: The decorated function.
"""

config.code.set_starters = wrap_user_function(func)
return func


@trace
def on_chat_end(func: Callable) -> Callable:
"""
Expand Down Expand Up @@ -348,6 +363,8 @@ def acall(self):
)

__all__ = [
"ChatProfile",
"Starter",
"user_session",
"CopilotFunction",
"AudioChunk",
Expand All @@ -359,7 +376,6 @@ def acall(self):
"Plotly",
"Image",
"Text",
"Avatar",
"Pyplot",
"File",
"Task",
Expand Down
27 changes: 10 additions & 17 deletions backend/chainlit/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import sys
from importlib import util
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union, Literal
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Literal, Optional, Union

import tomli
from chainlit.logger import logger
Expand All @@ -18,7 +18,7 @@
from chainlit.action import Action
from chainlit.element import ElementBased
from chainlit.message import Message
from chainlit.types import AudioChunk, ChatProfile, ThreadDict
from chainlit.types import AudioChunk, ChatProfile, Starter, ThreadDict
from chainlit.user import User
from fastapi import Request, Response

Expand Down Expand Up @@ -61,9 +61,6 @@
# follow_symlink = false
[features]
# Show the prompt playground
prompt_playground = true
# Process and display HTML in messages. This can be a security risk (see https://stackoverflow.com/questions/19603097/why-is-it-dangerous-to-render-user-generated-html-or-javascript)
unsafe_allow_html = false
Expand Down Expand Up @@ -95,21 +92,15 @@
sample_rate = 44100
[UI]
# Name of the app and chatbot.
name = "Chatbot"
# Show the readme while the thread is empty.
show_readme_as_default = true
# Name of the assistant.
name = "Assistant"
# Description of the app and chatbot. This is used for HTML tags.
# Description of the assistant. This is used for HTML tags.
# description = ""
# Large size content are by default collapsed for a cleaner ui
default_collapse_content = true
# The default value for the expand messages settings.
default_expand_messages = false
# Hide the chain of thought details from the user in the UI.
hide_cot = false
Expand All @@ -136,6 +127,7 @@
# custom_build = "./public/build"
[UI.theme]
default = "dark"
#layout = "wide"
#font_family = "Inter, sans-serif"
# Override default MUI light theme. (Check theme.ts)
Expand Down Expand Up @@ -192,11 +184,13 @@ class PaletteOptions(DataClassJsonMixin):
light: Optional[str] = ""
dark: Optional[str] = ""


@dataclass()
class TextOptions(DataClassJsonMixin):
primary: Optional[str] = ""
secondary: Optional[str] = ""


@dataclass()
class Palette(DataClassJsonMixin):
primary: Optional[PaletteOptions] = None
Expand All @@ -208,6 +202,7 @@ class Palette(DataClassJsonMixin):
@dataclass()
class Theme(DataClassJsonMixin):
font_family: Optional[str] = None
default: Optional[Literal["light", "dark"]] = "dark"
layout: Optional[Literal["default", "wide"]] = "default"
light: Optional[Palette] = None
dark: Optional[Palette] = None
Expand All @@ -234,7 +229,6 @@ class AudioFeature(DataClassJsonMixin):

@dataclass()
class FeaturesSettings(DataClassJsonMixin):
prompt_playground: bool = True
spontaneous_file_upload: Optional[SpontaneousFileUploadFeature] = None
audio: Optional[AudioFeature] = Field(default_factory=AudioFeature)
latex: bool = False
Expand All @@ -245,12 +239,10 @@ class FeaturesSettings(DataClassJsonMixin):
@dataclass()
class UISettings(DataClassJsonMixin):
name: str
show_readme_as_default: bool = True
description: str = ""
hide_cot: bool = False
# Large size content are by default collapsed for a cleaner ui
default_collapse_content: bool = True
default_expand_messages: bool = False
github: Optional[str] = None
theme: Optional[Theme] = None
# Optional custom CSS file that allows you to customize the UI
Expand Down Expand Up @@ -289,6 +281,7 @@ class CodeSettings:
set_chat_profiles: Optional[Callable[[Optional["User"]], List["ChatProfile"]]] = (
None
)
set_starters: Optional[Callable[[Optional["User"]], List["Starter"]]] = None


@dataclass()
Expand Down
17 changes: 14 additions & 3 deletions backend/chainlit/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,9 @@ async def update_thread(
async def delete_user_session(self, id: str) -> bool:
return True

async def build_debug_url(self) -> str:
return ""


_data_layer: Optional[BaseDataLayer] = None

Expand Down Expand Up @@ -225,6 +228,14 @@ def step_to_step_dict(self, step: LiteralStep) -> "StepDict":
"waitForAnswer": metadata.get("waitForAnswer", False),
}

async def build_debug_url(self) -> str:
try:
project_id = await self.client.api.get_my_project_id()
return f"{self.client.api.url}/projects/{project_id}/threads?threadId=[thread_id]&currentStepId=[step_id]"
except Exception as e:
logger.error(f"Error building debug url: {e}")
return ""

async def get_user(self, identifier: str) -> Optional[PersistedUser]:
user = await self.client.api.get_user(identifier=identifier)
if not user:
Expand Down Expand Up @@ -456,12 +467,12 @@ async def get_thread(self, thread_id: str) -> "Optional[ThreadDict]":
steps = [] # List[StepDict]
if thread.steps:
for step in thread.steps:
if config.ui.hide_cot and step.parent_id:
if config.ui.hide_cot and (
step.parent_id or "message" not in step.type
):
continue
for attachment in step.attachments:
elements.append(self.attachment_to_element_dict(attachment))
if not config.features.prompt_playground and step.generation:
step.generation = None
steps.append(self.step_to_step_dict(step))

return {
Expand Down
7 changes: 4 additions & 3 deletions backend/chainlit/data/sql_alchemy.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import aiohttp
from chainlit.context import context
from chainlit.data import BaseDataLayer, BaseStorageClient, queue_until_user_message
from chainlit.element import Avatar, ElementDict
from chainlit.element import ElementDict
from chainlit.logger import logger
from chainlit.step import StepDict
from chainlit.types import (
Expand Down Expand Up @@ -65,6 +65,9 @@ def __init__(
"SQLAlchemyDataLayer storage client is not initialized and elements will not be persisted!"
)

async def build_debug_url(self) -> str:
return ""

###### SQL Helpers ######
async def execute_sql(
self, query: str, parameters: dict
Expand Down Expand Up @@ -373,8 +376,6 @@ async def create_element(self, element: "Element"):
logger.info(f"SQLAlchemy: create_element, element_id = {element.id}")
if not getattr(context.session.user, "id", None):
raise ValueError("No authenticated user in context")
if isinstance(element, Avatar): # Skip creating elements of type avatar
return
if not self.storage_provider:
logger.warn(
f"SQLAlchemy: create_element error. No blob_storage_client is configured!"
Expand Down
1 change: 0 additions & 1 deletion backend/chainlit/discord/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,6 @@ async def send_step(self, step_dict: StepDict):
is_message = step_type in [
"user_message",
"assistant_message",
"system_message",
]
is_chain_of_thought = bool(step_dict.get("parentId"))
is_empty_output = not step_dict.get("output")
Expand Down
10 changes: 1 addition & 9 deletions backend/chainlit/element.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
}

ElementType = Literal[
"image", "avatar", "text", "pdf", "tasklist", "audio", "video", "file", "plotly"
"image", "text", "pdf", "tasklist", "audio", "video", "file", "plotly"
]
ElementDisplay = Literal["inline", "side", "page"]
ElementSize = Literal["small", "medium", "large"]
Expand Down Expand Up @@ -190,14 +190,6 @@ class Image(Element):
size: ElementSize = "medium"


@dataclass
class Avatar(Element):
type: ClassVar[ElementType] = "avatar"

async def send(self):
await super().send(for_id="")


@dataclass
class Text(Element):
"""Useful to send a text (not a message) to the UI."""
Expand Down
6 changes: 4 additions & 2 deletions backend/chainlit/langchain/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,7 +456,7 @@ def _start_trace(self, run: Run) -> None:
elif run.run_type == "llm":
step_type = "llm"
elif run.run_type == "retriever":
step_type = "retrieval"
step_type = "tool"
elif run.run_type == "tool":
step_type = "tool"
elif run.run_type == "embedding":
Expand Down Expand Up @@ -533,7 +533,9 @@ def _on_run_update(self, run: Run) -> None:
break

current_step.language = "json"
current_step.output = json.dumps(message_completion, indent=4, ensure_ascii=False)
current_step.output = json.dumps(
message_completion, indent=4, ensure_ascii=False
)
else:
completion_start = self.completion_generations[str(run.id)]
completion = generation.get("text", "")
Expand Down
4 changes: 2 additions & 2 deletions backend/chainlit/llama_index/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,9 @@ def on_event_start(

step_type: StepType = "undefined"
if event_type == CBEventType.RETRIEVE:
step_type = "retrieval"
step_type = "tool"
elif event_type == CBEventType.QUERY:
step_type = "retrieval"
step_type = "tool"
elif event_type == CBEventType.LLM:
step_type = "llm"
else:
Expand Down
Loading

0 comments on commit 4e0baab

Please sign in to comment.