From ebbe1d048c39b0dd1d648185f3cfe33bf04f1896 Mon Sep 17 00:00:00 2001 From: Ajinkya Indulkar <26824103+ajndkr@users.noreply.github.com> Date: Sat, 2 Dec 2023 13:07:44 +0530 Subject: [PATCH] fix: add support for pydantic v1 users (#163) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :sparkles: add pydantic util functions * :zap: update openai adapter modules * :zap: update langchain adapter modules * bump(ver): 0.8.0 → 0.8.1 * :memo: add pypi friendly readme * :wrench: update pyproject.toml --- README.md | 4 +- README.pypi.md | 53 ++++++++++++++++++++++ lanarky/adapters/langchain/callbacks.py | 19 ++++---- lanarky/adapters/langchain/dependencies.py | 3 +- lanarky/adapters/langchain/utils.py | 5 +- lanarky/adapters/openai/dependencies.py | 4 +- lanarky/adapters/openai/resources.py | 4 +- lanarky/adapters/openai/utils.py | 7 +-- lanarky/utils.py | 42 +++++++++++++++++ pyproject.toml | 7 +-- 10 files changed, 126 insertions(+), 22 deletions(-) create mode 100644 README.pypi.md create mode 100644 lanarky/utils.py diff --git a/README.md b/README.md index a81b80d..cd8ed50 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@
-lanarky-logo-light-mode -lanarky-logo-dark-mode +lanarky-logo-light-mode +lanarky-logo-dark-mode

The web framework for building LLM microservices.

diff --git a/README.pypi.md b/README.pypi.md new file mode 100644 index 0000000..bdb5c82 --- /dev/null +++ b/README.pypi.md @@ -0,0 +1,53 @@ +
+ +lanarky-logo-light-mode + +

The web framework for building LLM microservices.

+ +[![Stars](https://img.shields.io/github/stars/ajndkr/lanarky)](https://github.com/ajndkr/lanarky/stargazers) +[![License](https://img.shields.io/badge/License-MIT-yellow.svg)](https://github.com/ajndkr/lanarky/blob/main/LICENSE) +[![Twitter](https://img.shields.io/twitter/follow/LanarkyAPI?style=social)](https://twitter.com/intent/follow?screen_name=LanarkyAPI) +[![Discord](https://img.shields.io/badge/join-Discord-7289da.svg)](https://discord.gg/6qUfrQAEeE) + +[![Python](https://img.shields.io/pypi/pyversions/lanarky.svg)](https://pypi.org/project/lanarky/) +[![Coverage](https://coveralls.io/repos/github/ajndkr/lanarky/badge.svg?branch=main)](https://coveralls.io/github/ajndkr/lanarky?branch=main) +[![Version](https://badge.fury.io/py/lanarky.svg)](https://pypi.org/project/lanarky/) +[![Stats](https://img.shields.io/pypi/dm/lanarky.svg)](https://pypistats.org/packages/lanarky) + +
+ +Lanarky is a **python (3.9+)** web framework for developers who want to build microservices using LLMs. +Here are some of its key features: + +- **LLM-first**: Unlike other web frameworks, lanarky is built specifically for LLM developers. + It's unopinionated in terms of how you build your microservices and guarantees zero vendor lock-in + with any LLM tooling frameworks or cloud providers +- **Fast & Modern**: Built on top of FastAPI, lanarky offers all the FastAPI features you know and love. + If you are new to FastAPI, visit [fastapi.tiangolo.com](https://fastapi.tiangolo.com) to learn more +- **Streaming**: Streaming is essential for many real-time LLM applications, like chatbots. Lanarky has + got you covered with built-in streaming support over **HTTP** and **WebSockets**. +- **Open-source**: Lanarky is open-source and free to use. Forever. + +To learn more about lanarky and get started, you can find the full documentation on [lanarky.ajndkr.com](https://lanarky.ajndkr.com) + +## Installation + +The library is available on PyPI and can be installed via `pip`: + +```bash +pip install lanarky +``` + +## Contributing + +[![Code check](https://github.com/ajndkr/lanarky/actions/workflows/code-check.yaml/badge.svg)](https://github.com/ajndkr/lanarky/actions/workflows/code-check.yaml) +[![Publish](https://github.com/ajndkr/lanarky/actions/workflows/publish.yaml/badge.svg)](https://github.com/ajndkr/lanarky/actions/workflows/publish.yaml) + +Contributions are more than welcome! If you have an idea for a new feature or want to help improve lanarky, +please create an issue or submit a pull request on [GitHub](https://github.com/ajndkr/lanarky). + +See [CONTRIBUTING.md](https://github.com/ajndkr/lanarky/blob/main/CONTRIBUTING.md) for more information. + +## License + +The library is released under the [MIT License](https://github.com/ajndkr/lanarky/blob/main/LICENSE). diff --git a/lanarky/adapters/langchain/callbacks.py b/lanarky/adapters/langchain/callbacks.py index aed1cf8..441d511 100644 --- a/lanarky/adapters/langchain/callbacks.py +++ b/lanarky/adapters/langchain/callbacks.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import Any, Optional, Union +from typing import Any, Optional from fastapi.websockets import WebSocket from langchain.callbacks.base import AsyncCallbackHandler @@ -12,6 +12,7 @@ from starlette.types import Message, Send from lanarky.events import Events, ServerSentEvent, ensure_bytes +from lanarky.utils import model_dump_json class LangchainEvents(str, Enum): @@ -88,7 +89,7 @@ class TokenEventData(BaseModel): token: str = "" -def get_token_data(token: str, mode: TokenStreamMode) -> Union[str, dict[str, Any]]: +def get_token_data(token: str, mode: TokenStreamMode) -> str: """Get token data based on mode. Args: @@ -101,7 +102,7 @@ def get_token_data(token: str, mode: TokenStreamMode) -> Union[str, dict[str, An if mode == TokenStreamMode.TEXT: return token else: - return TokenEventData(token=token).model_dump_json() + return model_dump_json(TokenEventData(token=token)) class TokenStreamingCallbackHandler(StreamingCallbackHandler): @@ -188,9 +189,9 @@ async def on_chain_end( document.dict() for document in outputs["source_documents"] ] message = self._construct_message( - data=SourceDocumentsEventData( - source_documents=source_documents - ).model_dump_json(), + data=model_dump_json( + SourceDocumentsEventData(source_documents=source_documents) + ), event=LangchainEvents.SOURCE_DOCUMENTS, ) await self.send(message) @@ -374,9 +375,9 @@ async def on_chain_end( document.dict() for document in outputs["source_documents"] ] message = self._construct_message( - data=SourceDocumentsEventData( - source_documents=source_documents - ).model_dump_json(), + data=model_dump_json( + SourceDocumentsEventData(source_documents=source_documents) + ), event=LangchainEvents.SOURCE_DOCUMENTS, ) await self.websocket.send_json(message) diff --git a/lanarky/adapters/langchain/dependencies.py b/lanarky/adapters/langchain/dependencies.py index 24818b4..e401afe 100644 --- a/lanarky/adapters/langchain/dependencies.py +++ b/lanarky/adapters/langchain/dependencies.py @@ -4,6 +4,7 @@ from langchain.chains.base import Chain from lanarky.adapters.langchain.utils import create_request_model, create_response_model +from lanarky.utils import model_dump def Depends( @@ -34,6 +35,6 @@ async def chain_dependency( request: request_model, chain: Chain = params.Depends(dependency, use_cache=use_cache), ) -> response_model: - return await chain.acall(inputs=request.model_dump(), **dependency_kwargs) + return await chain.acall(inputs=model_dump(request), **dependency_kwargs) return params.Depends(chain_dependency, use_cache=use_cache) diff --git a/lanarky/adapters/langchain/utils.py b/lanarky/adapters/langchain/utils.py index 52ca150..b20967d 100644 --- a/lanarky/adapters/langchain/utils.py +++ b/lanarky/adapters/langchain/utils.py @@ -19,6 +19,7 @@ from lanarky.adapters.langchain.responses import HTTPStatusDetail, StreamingResponse from lanarky.events import Events from lanarky.logging import logger +from lanarky.utils import model_dump from lanarky.websockets import WebSocket, WebsocketSession @@ -43,7 +44,7 @@ async def factory_endpoint( request: request_model, chain: Chain = Depends(endpoint) ): return StreamingResponse( - chain=chain, config={"inputs": request.model_dump(), "callbacks": callbacks} + chain=chain, config={"inputs": model_dump(request), "callbacks": callbacks} ) return factory_endpoint @@ -70,7 +71,7 @@ async def factory_endpoint(websocket: WebSocket, chain: Chain = Depends(endpoint async for data in session: try: await chain.acall( - inputs=request_model(**data).model_dump(), + inputs=model_dump(request_model(**data)), callbacks=callbacks, ) except Exception as e: diff --git a/lanarky/adapters/openai/dependencies.py b/lanarky/adapters/openai/dependencies.py index 7663272..769e20a 100644 --- a/lanarky/adapters/openai/dependencies.py +++ b/lanarky/adapters/openai/dependencies.py @@ -2,6 +2,8 @@ from fastapi import params +from lanarky.utils import model_dump + from .resources import OpenAIResource from .utils import create_request_model, create_response_model @@ -34,7 +36,7 @@ async def resource_dependency( request: request_model, resource: OpenAIResource = params.Depends(dependency, use_cache=use_cache), ) -> response_model: - resource_kwargs = {**request.model_dump(), **dependency_kwargs} + resource_kwargs = {**model_dump(request), **dependency_kwargs} return await resource(**resource_kwargs) diff --git a/lanarky/adapters/openai/resources.py b/lanarky/adapters/openai/resources.py index 5e322d1..1fcfa4a 100644 --- a/lanarky/adapters/openai/resources.py +++ b/lanarky/adapters/openai/resources.py @@ -5,6 +5,8 @@ from openai.types.chat import ChatCompletion, ChatCompletionChunk from pydantic import BaseModel, Field +from lanarky.utils import model_dump + class Message(BaseModel): role: str = Field(pattern=r"^(user|assistant)$") @@ -105,5 +107,5 @@ async def __call__(self, messages: list[dict]) -> ChatCompletion: def _prepare_messages(self, messages: list[dict]) -> list[dict]: if self.system is not None: - messages = [self.system.model_dump()] + messages + messages = [model_dump(self.system)] + messages return messages diff --git a/lanarky/adapters/openai/utils.py b/lanarky/adapters/openai/utils.py index 8d2c906..b56f0ca 100644 --- a/lanarky/adapters/openai/utils.py +++ b/lanarky/adapters/openai/utils.py @@ -7,6 +7,7 @@ from lanarky.events import Events from lanarky.logging import logger +from lanarky.utils import model_dump, model_fields from lanarky.websockets import WebSocket, WebsocketSession from .resources import ChatCompletion, ChatCompletionResource, Message, OpenAIResource @@ -31,7 +32,7 @@ def build_factory_api_endpoint( async def factory_endpoint( request: request_model, resource: OpenAIResource = Depends(endpoint) ): - return StreamingResponse(resource=resource, **request.model_dump()) + return StreamingResponse(resource=resource, **model_dump(request)) return factory_endpoint @@ -58,7 +59,7 @@ async def factory_endpoint( async for data in session: try: async for chunk in resource.stream_response( - **request_model(**data).model_dump() + **model_dump(request_model(**data)) ): await websocket.send_json( dict( @@ -153,7 +154,7 @@ def create_response_model( raise TypeError("resource must be a ChatCompletion instance") response_fields = { - k: (v.annotation, ...) for k, v in ChatCompletion.model_fields.items() + k: (v.annotation, ...) for k, v in model_fields(ChatCompletion).items() } prefix = prefix or resource.__class__.__name__ diff --git a/lanarky/utils.py b/lanarky/utils.py new file mode 100644 index 0000000..284788c --- /dev/null +++ b/lanarky/utils.py @@ -0,0 +1,42 @@ +from typing import Any + +import pydantic +from pydantic.fields import FieldInfo + +PYDANTIC_V2 = pydantic.VERSION.startswith("2.") + + +def model_dump(model: pydantic.BaseModel, **kwargs) -> dict[str, Any]: + """Dump a pydantic model to a dictionary. + + Args: + model: A pydantic model. + """ + if PYDANTIC_V2: + return model.model_dump(**kwargs) + else: + return model.dict(**kwargs) + + +def model_dump_json(model: pydantic.BaseModel, **kwargs) -> str: + """Dump a pydantic model to a JSON string. + + Args: + model: A pydantic model. + """ + if PYDANTIC_V2: + return model.model_dump_json(**kwargs) + else: + return model.json(**kwargs) + + +def model_fields(model: pydantic.BaseModel) -> dict[str, FieldInfo]: + """Get the fields of a pydantic model. + + Args: + model: A pydantic model. + """ + if PYDANTIC_V2: + return model.model_fields + else: + return model.__fields__ diff --git a/pyproject.toml b/pyproject.toml index 9eebdd3..28b0bbd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,12 @@ [tool.poetry] name = "lanarky" -version = "0.8.0" +version = "0.8.1" description = "The web framework for building LLM microservices" authors = ["Ajinkya Indulkar "] -readme = "README.md" -homepage = "https://github.com/ajndkr/lanarky" +readme = "README.pypi.md" +homepage = "https://lanarky.ajndkr.com/" repository = "https://github.com/ajndkr/lanarky" +documentation = "https://lanarky.ajndkr.com/" license = "MIT" packages = [{include = "lanarky"}]