diff --git a/apis/chat_api.py b/apis/chat_api.py index 2c0a20c..de160b0 100644 --- a/apis/chat_api.py +++ b/apis/chat_api.py @@ -1,7 +1,11 @@ import argparse +import markdown2 import sys import uvicorn +from pathlib import Path from fastapi import FastAPI +from fastapi.responses import HTMLResponse +from fastapi.staticfiles import StaticFiles from pydantic import BaseModel, Field from sse_starlette.sse import EventSourceResponse from conversations import ( @@ -21,6 +25,7 @@ def __init__(self): version="1.0", ) self.setup_routes() + self.app.mount("/docs", StaticFiles(directory="docs", html=True), name="docs") def get_available_models(self): self.available_models = { @@ -124,6 +129,15 @@ def chat_completions(self, item: ChatCompletionsPostItem): media_type="text/event-stream", ) + def get_readme(self): + readme_path = Path(__file__).parents[1] / "README.md" + with open(readme_path, "r", encoding="utf-8") as rf: + readme_str = rf.read() + readme_html = markdown2.markdown( + readme_str, extras=["table", "fenced-code-blocks", "highlightjs-lang"] + ) + return readme_html + def setup_routes(self): for prefix in ["", "/v1", "/api", "/api/v1"]: include_in_schema = True if prefix == "" else False @@ -145,6 +159,13 @@ def setup_routes(self): include_in_schema=include_in_schema, )(self.chat_completions) + self.app.get( + "/readme", + summary="README of Bing Chat API", + response_class=HTMLResponse, + include_in_schema=False, + )(self.get_readme) + class ArgParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): diff --git a/requirements.txt b/requirements.txt index c0e0fd8..ed02aeb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,6 +2,7 @@ aiohttp fastapi httpx openai +markdown2[all] pydantic requests sse_starlette