Skip to content

Commit

Permalink
Merge pull request #77 from OVINC-CN/feat_json_mode
Browse files Browse the repository at this point in the history
feat: json mode
  • Loading branch information
OrenZhang authored Oct 3, 2024
2 parents 9a51493 + 91eafcb commit 4a329c0
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 0 deletions.
16 changes: 16 additions & 0 deletions apps/chat/consumers_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,3 +145,19 @@ def get_model_client(self, model: AIModel) -> Type[BaseClient]:
return MidjourneyClient
case _:
raise UnexpectedProvider()


class JSONModeConsumer(AsyncConsumer):
"""
JSON Mode Consumer
"""

def __init__(self, key: str):
super().__init__("", key)
self.message = ""

async def send(self, text_data: str):
self.message += json.loads(text_data).get("data", "")

async def close(self):
return
18 changes: 18 additions & 0 deletions apps/chat/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from rest_framework.response import Response

from apps.chat.constants import MESSAGE_CACHE_KEY
from apps.chat.consumers_async import JSONModeConsumer
from apps.chat.models import AIModel, ChatLog, SystemPreset
from apps.chat.permissions import AIModelPermission
from apps.chat.serializers import (
Expand Down Expand Up @@ -92,6 +93,23 @@ def load_model_map(self) -> dict:
models = AIModel.objects.all()
return {model.model: model.name for model in models}

@action(methods=["POST"], detail=False, permission_classes=[AIModelPermission])
async def json(self, request, *args, **kwargs):
"""
JSON Mode
"""

# pre check
pre_response = await self.pre_check(request, *args, **kwargs)
data = pre_response.data

# chat
consumer = JSONModeConsumer(data["key"])
await consumer.chat()

# response
return Response(data={"data": consumer.message})


# pylint: disable=R0901
class AIModelViewSet(ListMixin, MainViewSet):
Expand Down

0 comments on commit 4a329c0

Please sign in to comment.