stream.test.ts 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114
  1. import { ApiStreamChunk } from '../stream';
  2. describe('API Stream Types', () => {
  3. describe('ApiStreamChunk', () => {
  4. it('should correctly handle text chunks', () => {
  5. const textChunk: ApiStreamChunk = {
  6. type: 'text',
  7. text: 'Hello world'
  8. };
  9. expect(textChunk.type).toBe('text');
  10. expect(textChunk.text).toBe('Hello world');
  11. });
  12. it('should correctly handle usage chunks with cache information', () => {
  13. const usageChunk: ApiStreamChunk = {
  14. type: 'usage',
  15. inputTokens: 100,
  16. outputTokens: 50,
  17. cacheWriteTokens: 20,
  18. cacheReadTokens: 10
  19. };
  20. expect(usageChunk.type).toBe('usage');
  21. expect(usageChunk.inputTokens).toBe(100);
  22. expect(usageChunk.outputTokens).toBe(50);
  23. expect(usageChunk.cacheWriteTokens).toBe(20);
  24. expect(usageChunk.cacheReadTokens).toBe(10);
  25. });
  26. it('should handle usage chunks without cache tokens', () => {
  27. const usageChunk: ApiStreamChunk = {
  28. type: 'usage',
  29. inputTokens: 100,
  30. outputTokens: 50
  31. };
  32. expect(usageChunk.type).toBe('usage');
  33. expect(usageChunk.inputTokens).toBe(100);
  34. expect(usageChunk.outputTokens).toBe(50);
  35. expect(usageChunk.cacheWriteTokens).toBeUndefined();
  36. expect(usageChunk.cacheReadTokens).toBeUndefined();
  37. });
  38. it('should handle text chunks with empty strings', () => {
  39. const emptyTextChunk: ApiStreamChunk = {
  40. type: 'text',
  41. text: ''
  42. };
  43. expect(emptyTextChunk.type).toBe('text');
  44. expect(emptyTextChunk.text).toBe('');
  45. });
  46. it('should handle usage chunks with zero tokens', () => {
  47. const zeroUsageChunk: ApiStreamChunk = {
  48. type: 'usage',
  49. inputTokens: 0,
  50. outputTokens: 0
  51. };
  52. expect(zeroUsageChunk.type).toBe('usage');
  53. expect(zeroUsageChunk.inputTokens).toBe(0);
  54. expect(zeroUsageChunk.outputTokens).toBe(0);
  55. });
  56. it('should handle usage chunks with large token counts', () => {
  57. const largeUsageChunk: ApiStreamChunk = {
  58. type: 'usage',
  59. inputTokens: 1000000,
  60. outputTokens: 500000,
  61. cacheWriteTokens: 200000,
  62. cacheReadTokens: 100000
  63. };
  64. expect(largeUsageChunk.type).toBe('usage');
  65. expect(largeUsageChunk.inputTokens).toBe(1000000);
  66. expect(largeUsageChunk.outputTokens).toBe(500000);
  67. expect(largeUsageChunk.cacheWriteTokens).toBe(200000);
  68. expect(largeUsageChunk.cacheReadTokens).toBe(100000);
  69. });
  70. it('should handle text chunks with special characters', () => {
  71. const specialCharsChunk: ApiStreamChunk = {
  72. type: 'text',
  73. text: '!@#$%^&*()_+-=[]{}|;:,.<>?`~'
  74. };
  75. expect(specialCharsChunk.type).toBe('text');
  76. expect(specialCharsChunk.text).toBe('!@#$%^&*()_+-=[]{}|;:,.<>?`~');
  77. });
  78. it('should handle text chunks with unicode characters', () => {
  79. const unicodeChunk: ApiStreamChunk = {
  80. type: 'text',
  81. text: '你好世界👋🌍'
  82. };
  83. expect(unicodeChunk.type).toBe('text');
  84. expect(unicodeChunk.text).toBe('你好世界👋🌍');
  85. });
  86. it('should handle text chunks with multiline content', () => {
  87. const multilineChunk: ApiStreamChunk = {
  88. type: 'text',
  89. text: 'Line 1\nLine 2\nLine 3'
  90. };
  91. expect(multilineChunk.type).toBe('text');
  92. expect(multilineChunk.text).toBe('Line 1\nLine 2\nLine 3');
  93. expect(multilineChunk.text.split('\n')).toHaveLength(3);
  94. });
  95. });
  96. });