diff --git a/Readme.md b/Readme.md index 0b269aa..4b02dcf 100644 --- a/Readme.md +++ b/Readme.md @@ -136,6 +136,22 @@ uvicorn main:app --host 0.0.0.0 --port 8000 Now, your LangChain models and pipelines are accessible via the LangCorn API server. +With a router : +```python +app = FastAPI() +router = APIRouter() + + +@app.get("/") +def read_main(): + return {"message": "Hello World from main app"} + +router: APIRouter = create_service(router, + "api.ex1:chain", +) +app.include_router(router) +``` + ## Docs Automatically served FastAPI doc diff --git a/langcorn/server/api.py b/langcorn/server/api.py index 883a85d..b131292 100644 --- a/langcorn/server/api.py +++ b/langcorn/server/api.py @@ -2,7 +2,7 @@ import sys from typing import Any, Union -from fastapi import Depends, FastAPI, Header, HTTPException, Request +from fastapi import APIRouter, Depends, FastAPI, Header, HTTPException, Request from fastapi.security.utils import get_authorization_scheme_param from langchain.schema import messages_from_dict, messages_to_dict from loguru import logger @@ -122,7 +122,7 @@ async def handler(request: request_cls, http_request: Request): return handler -def create_service(*lc_apps, auth_token: str = "", app: FastAPI = None): +def create_service(*lc_apps, auth_token: str = "", app: FastAPI or APIRouter = None): # Make local modules discoverable sys.path.append(os.path.dirname(".")) logger.info("Creating service")