fh.ai.js 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. import EncodeUtils from '../en-decode/endecode-lib.js';
  2. /**
  3. * 用零一万物大模型来进行流式问答输出
  4. */
  5. let AI = (() => {
  6. /**
  7. * 用 SiliconFlow 大模型(CoderVM)进行流式问答输出,支持多轮上下文对话
  8. * @param {Array} messages 聊天历史数组,每项格式: {role: 'user'|'assistant', content: string}
  9. * @param {function} receivingCallback 每次收到新内容时的回调,参数为 message 对象
  10. * @param {string} apiKey 可选,API Key
  11. * @example
  12. * const messages = [
  13. * { role: 'user', content: '你好' },
  14. * { role: 'assistant', content: '你好,有什么可以帮您?' },
  15. * { role: 'user', content: '帮我写个排序算法' }
  16. * ];
  17. * AI.askCoderLLM(messages, callback);
  18. */
  19. async function askCoderLLM(messages, receivingCallback, apiKey) {
  20. // 默认插入system prompt
  21. const systemPrompt = {
  22. role: 'system',
  23. content: '你是由FeHelper提供的,一个专为开发者服务的AI助手。' +
  24. '你的目标是精准理解开发者的技术需求,并以最简洁、直接、专业的方式输出高质量代码,并且保证代码的完整性。' +
  25. '请避免无关的解释和冗余描述,只输出开发者真正需要的代码和必要的技术要点说明。' +
  26. '遇到不明确的需求时,优先追问关键细节,绝不输出与开发无关的内容。' +
  27. '如果生成的是代码,一定要用```的markdown代码块包裹,并使用markdown语法渲染。'
  28. };
  29. let msgs;
  30. if (typeof messages === 'string') {
  31. // 单轮对话,自动组装为数组
  32. msgs = [systemPrompt, { role: 'user', content: messages }];
  33. } else if (Array.isArray(messages)) {
  34. // 多轮对话,插入system prompt(如未包含)
  35. const hasSystemPrompt = messages.some(m => m.role === 'system' && m.content === systemPrompt.content);
  36. msgs = hasSystemPrompt ? messages : [systemPrompt, ...messages];
  37. } else {
  38. // 其他类型,降级为空对话
  39. msgs = [systemPrompt];
  40. }
  41. const defaultKey = 'c2stamJ5eGlldmVmdmhnbnBnbGF3cmxlZ25uam9rY25kc3BpYndjZmh1d2Ntbm9jbmxp';
  42. const url = 'https://api.siliconflow.cn/v1/chat/completions';
  43. const options = {
  44. method: 'POST',
  45. headers: {
  46. 'Content-Type': 'application/json',
  47. 'Authorization': `Bearer ${apiKey || EncodeUtils.base64Decode(defaultKey)}`
  48. },
  49. body: JSON.stringify({
  50. "model": "Qwen/Qwen2.5-Coder-7B-Instruct",
  51. "messages": msgs, // 直接传递多轮历史
  52. "stream": true, // 开启流式输出
  53. "max_tokens": 4096,
  54. "enable_thinking": true,
  55. "thinking_budget": 4096,
  56. "min_p": 0.05,
  57. "stop": [],
  58. "temperature": 0.7,
  59. "top_p": 0.7,
  60. "top_k": 50,
  61. "frequency_penalty": 0.5,
  62. "n": 1,
  63. "response_format": {
  64. "type": "text"
  65. }
  66. })
  67. };
  68. try {
  69. const response = await fetch(url, options);
  70. if (!response.ok) {
  71. throw new Error(`HTTP error! status: ${response.status}`);
  72. }
  73. // 处理流式返回(text/event-stream)
  74. const reader = response.body.getReader();
  75. const decoder = new TextDecoder('utf-8');
  76. let buffer = '';
  77. let done = false;
  78. const msg = {id:'',content:''};
  79. while (!done) {
  80. const { value, done: readerDone } = await reader.read();
  81. done = readerDone;
  82. if (value) {
  83. buffer += decoder.decode(value, { stream: true });
  84. // 以换行分割,逐条处理
  85. let lines = buffer.split('\n');
  86. // 最后一行可能不完整,留到下次
  87. buffer = lines.pop();
  88. for (let line of lines) {
  89. line = line.trim();
  90. if (!line || !line.startsWith('data:')) continue;
  91. let jsonStr = line.replace(/^data:/, '').trim();
  92. if (jsonStr === '[DONE]') continue;
  93. try {
  94. let obj = JSON.parse(jsonStr);
  95. if (obj.choices && obj.choices[0] && obj.choices[0].delta) {
  96. msg.id = obj.id;
  97. msg.created = obj.created;
  98. msg.content += obj.choices[0].delta.content;
  99. receivingCallback && receivingCallback(msg);
  100. }
  101. } catch (e) {
  102. // 忽略解析失败的片段
  103. }
  104. }
  105. }
  106. }
  107. receivingCallback && receivingCallback(null,true);
  108. } catch (error) {
  109. console.error('Error fetching coderVM stream:', error);
  110. }
  111. }
  112. return {askCoderLLM};
  113. })();
  114. export default AI;