2
0

ai-release-notes.py 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123
  1. """
  2. AI-powered release notes generator that creates concise and informative release notes from git changes.
  3. This script uses OpenAI's API to analyze git changes (summary, diff, and commit log) and generate
  4. well-formatted release notes in markdown. It focuses on important changes and their impact,
  5. particularly highlighting new types and schemas while avoiding repetitive information.
  6. Environment Variables Required:
  7. OPENAI_API_KEY: OpenAI API key for authentication
  8. CHANGE_SUMMARY: Summary of changes made (optional if CUSTOM_PROMPT provided)
  9. CHANGE_DIFF: Git diff of changes (optional if CUSTOM_PROMPT provided)
  10. CHANGE_LOG: Git commit log (optional if CUSTOM_PROMPT provided)
  11. GITHUB_OUTPUT: Path to GitHub output file
  12. CUSTOM_PROMPT: Custom prompt to override default (optional)
  13. """
  14. import os
  15. import requests # type: ignore
  16. import json
  17. import tiktoken # type: ignore
  18. OPENAI_API_KEY = os.environ["OPENAI_API_KEY"]
  19. CHANGE_SUMMARY = os.environ.get('CHANGE_SUMMARY', '')
  20. CHANGE_DIFF = os.environ.get('CHANGE_DIFF', '')
  21. CHANGE_LOG = os.environ.get('CHANGE_LOG', '')
  22. GITHUB_OUTPUT = os.getenv("GITHUB_OUTPUT")
  23. OPEN_AI_BASE_URL = "https://api.openai.com/v1"
  24. OPEN_API_HEADERS = {"Authorization": f"Bearer {OPENAI_API_KEY}", "Content-Type": "application/json"}
  25. CUSTOM_PROMPT = os.environ.get('CUSTOM_PROMPT', '')
  26. MODEL_NAME = os.environ.get('MODEL_NAME', 'gpt-3.5-turbo-16k')
  27. def num_tokens_from_string(string: str, model_name: str) -> int:
  28. """
  29. Calculate the number of tokens in a text string for a specific model.
  30. Args:
  31. string: The input text to count tokens for
  32. model_name: Name of the OpenAI model to use for token counting
  33. Returns:
  34. int: Number of tokens in the input string
  35. """
  36. encoding = tiktoken.encoding_for_model(model_name)
  37. num_tokens = len(encoding.encode(string))
  38. return num_tokens
  39. def truncate_to_token_limit(text, max_tokens, model_name):
  40. """
  41. Truncate text to fit within a maximum token limit for a specific model.
  42. Args:
  43. text: The input text to truncate
  44. max_tokens: Maximum number of tokens allowed
  45. model_name: Name of the OpenAI model to use for tokenization
  46. Returns:
  47. str: Truncated text that fits within the token limit
  48. """
  49. encoding = tiktoken.encoding_for_model(model_name)
  50. encoded = encoding.encode(text)
  51. truncated = encoded[:max_tokens]
  52. return encoding.decode(truncated)
  53. def generate_release_notes(model_name):
  54. """
  55. Generate release notes using OpenAI's API based on git changes.
  56. Uses the GPT-3.5-turbo model to analyze change summary, commit log, and code diff
  57. to generate concise and informative release notes in markdown format. The notes
  58. focus on important changes and their impact, with sections for new types/schemas
  59. and other updates.
  60. Returns:
  61. str: Generated release notes in markdown format
  62. Raises:
  63. requests.exceptions.RequestException: If the OpenAI API request fails
  64. """
  65. max_tokens = 14000 # Reserve some tokens for the response
  66. # Truncate inputs if necessary to fit within token limits
  67. change_summary = '' if CUSTOM_PROMPT else truncate_to_token_limit(CHANGE_SUMMARY, 1000, model_name)
  68. change_log = '' if CUSTOM_PROMPT else truncate_to_token_limit(CHANGE_LOG, 2000, model_name)
  69. change_diff = '' if CUSTOM_PROMPT else truncate_to_token_limit(CHANGE_DIFF, max_tokens - num_tokens_from_string(change_summary, model_name) - num_tokens_from_string(change_log, model_name) - 1000, model_name)
  70. url = f"{OPEN_AI_BASE_URL}/chat/completions"
  71. # Construct prompt for OpenAI API
  72. openai_prompt = CUSTOM_PROMPT if CUSTOM_PROMPT else f"""Based on the following summary of changes, commit log and code diff, please generate concise and informative release notes:
  73. Summary of changes:
  74. {change_summary}
  75. Commit log:
  76. {change_log}
  77. Code Diff:
  78. {json.dumps(change_diff)}
  79. """
  80. data = {
  81. "model": model_name,
  82. "messages": [{"role": "user", "content": openai_prompt}],
  83. "temperature": 0.7,
  84. "max_tokens": 1000,
  85. }
  86. print("----------------------------------------------------------------------------------------------------------")
  87. print("POST request to OpenAI")
  88. print("----------------------------------------------------------------------------------------------------------")
  89. ai_response = requests.post(url, headers=OPEN_API_HEADERS, json=data)
  90. print(f"Status Code: {str(ai_response.status_code)}")
  91. print(f"Response: {ai_response.text}")
  92. ai_response.raise_for_status()
  93. return ai_response.json()["choices"][0]["message"]["content"]
  94. release_notes = generate_release_notes(MODEL_NAME)
  95. print("----------------------------------------------------------------------------------------------------------")
  96. print("OpenAI generated release notes")
  97. print("----------------------------------------------------------------------------------------------------------")
  98. print(release_notes)
  99. # Write the release notes to GITHUB_OUTPUT
  100. with open(GITHUB_OUTPUT, "a") as outputs_file:
  101. outputs_file.write(f"RELEASE_NOTES<<EOF\n{release_notes}\nEOF")