|
@@ -0,0 +1,74 @@
|
|
|
+import os
|
|
|
+import openai
|
|
|
+import json
|
|
|
+import time
|
|
|
+
|
|
|
+def chat_gpt_runner(input_text):
|
|
|
+ try:
|
|
|
+ # 1. Read API key from environment variable
|
|
|
+ api_key = os.getenv("OPENAI_API_KEY")
|
|
|
+
|
|
|
+ # Print the first five lines of input_text
|
|
|
+ print("Input:\n" + "\n".join(input_text.splitlines()[:1]) + "...\n")
|
|
|
+
|
|
|
+ # 2. Set up OpenAI client
|
|
|
+ openai.api_key = api_key
|
|
|
+
|
|
|
+ response = openai.ChatCompletion.create(
|
|
|
+ model="gpt-3.5-turbo",
|
|
|
+ messages=[{"role": "user", "content": input_text}],
|
|
|
+ temperature=1.0,
|
|
|
+ )
|
|
|
+
|
|
|
+ output_text = response.choices[0].message.content
|
|
|
+
|
|
|
+ # Print the first three lines of output_text
|
|
|
+ print("Output:\n" + "\n".join(output_text.splitlines()[:3]) + "...\n")
|
|
|
+
|
|
|
+ return output_text
|
|
|
+ except Exception as e:
|
|
|
+ print(f"Error: {e}")
|
|
|
+ return None
|
|
|
+
|
|
|
+def read_prompt(prompt_path):
|
|
|
+ with open(prompt_path, 'r', encoding="utf-8") as prompt_file:
|
|
|
+ return prompt_file.read()
|
|
|
+
|
|
|
+def read_write_md_files(input_path, output_path, prompt):
|
|
|
+ start_time = time.time()
|
|
|
+ for file in os.listdir(input_path):
|
|
|
+ if file.endswith(".md"):
|
|
|
+ with open(os.path.join(input_path, file), 'r', encoding="utf-8") as input_file:
|
|
|
+ lines = input_file.readlines()[2:]
|
|
|
+ output_filename = file.replace("-output", "-gpt-output")
|
|
|
+ output_filepath = os.path.join(output_path, output_filename)
|
|
|
+
|
|
|
+ with open(output_filepath, 'w', encoding="utf-8") as output_file:
|
|
|
+ for i in range(0, len(lines), 50):
|
|
|
+ chunk = lines[i:i+50]
|
|
|
+ single_start_time = time.time()
|
|
|
+ result = chat_gpt_runner(prompt+"".join(chunk))
|
|
|
+ output_file.write(result)
|
|
|
+ output_file.write("\n\n")
|
|
|
+ single_end_time = time.time()
|
|
|
+ elapsed_single_time = single_end_time - single_start_time
|
|
|
+ elapsed_total_time = single_end_time - start_time
|
|
|
+
|
|
|
+ print(f"单次用时: {elapsed_single_time:.4f} 秒")
|
|
|
+ print(f"当前总耗时: {elapsed_total_time:.4f} 秒")
|
|
|
+
|
|
|
+if __name__ == "__main__":
|
|
|
+ root_path = os.getcwd()
|
|
|
+ input_folder = "md-outputs"
|
|
|
+ output_folder = "gpt-outputs"
|
|
|
+ prompt_path = "tools\prompt.md"
|
|
|
+
|
|
|
+ input_path = os.path.join(root_path, input_folder)
|
|
|
+ output_path = os.path.join(root_path, output_folder)
|
|
|
+
|
|
|
+ if not os.path.exists(output_path):
|
|
|
+ os.makedirs(output_path)
|
|
|
+
|
|
|
+ prompt_content = read_prompt(os.path.join(root_path, prompt_path))
|
|
|
+
|
|
|
+ read_write_md_files(input_path, output_path, prompt_content)
|