openai_api.py 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. """
  2. coding : utf-8
  3. @Date : 2024/7/11
  4. @Author : Shaobo
  5. @Describe:
  6. """
  7. import time
  8. from typing import Literal
  9. import shortuuid
  10. from pydantic import BaseModel
  11. completion_id = f"chatcmpl-{shortuuid.random()}"
  12. class ChatMessage(BaseModel):
  13. role: str
  14. content: str
  15. class ChatCompletionRequest(BaseModel):
  16. model: str = "codegeex4"
  17. messages: list[ChatMessage]
  18. temperature: float = 0.2
  19. top_p: float = 1.0
  20. max_tokens: int = 1024
  21. stop: list[str] = ['<|user|>', '<|assistant|>', '<|observation|>', '<|endoftext|>']
  22. stream: bool = True
  23. presence_penalty: float = None
  24. class DeltaMessage(BaseModel):
  25. role: str
  26. content: str
  27. class ChatCompletionResponseStreamChoice(BaseModel):
  28. index: int = 0
  29. delta: DeltaMessage = DeltaMessage(role='assistant', content='')
  30. finish_reason: Literal["stop", "length"] = None
  31. class ChatCompletionStreamResponse(BaseModel):
  32. id: str = completion_id
  33. object: str = "chat.completion.chunk"
  34. created: int = int(time.time())
  35. model: str = "codegeex4"
  36. choices: list[ChatCompletionResponseStreamChoice] = [ChatCompletionResponseStreamChoice()]
  37. class ChatCompletionResponseChoice(BaseModel):
  38. index: int = 0
  39. message: ChatMessage = ChatMessage(role="assistant", content="")
  40. finish_reason: Literal["stop", "length"] = None
  41. class ChatCompletionResponse(BaseModel):
  42. id: str = f"chatcmpl-{shortuuid.random()}"
  43. object: str = "chat.completion"
  44. created: int = int(time.time())
  45. model: str = "codegeex4"
  46. choices: list[ChatCompletionResponseChoice] = [ChatCompletionResponseChoice()]
  47. # usage: UsageInfo