Browse Source

feat: add .env config for OpenAI key and model settings

Your Name (aider) 1 năm trước cách đây
mục cha
commit
e0458fc32c
3 tập tin đã thay đổi với 21 bổ sung10 xóa
  1. 2 3
      .env
  2. 11 2
      ai_trans.py
  3. 8 5
      pdfzh_translator.py

+ 2 - 3
.env

@@ -1,3 +1,2 @@
-OPENAI_API_KEY='sk-NscqaCD1PfVm7soEF3C3E6297bE14d7fB595Be8f17F39aFf'
-OPENAI_API_BASE='https://aiapi.magong.site/v1'
-LLM_MODEL="openai/deepseek-chat"
+OPENAI_API_KEY=your-api-key-here
+LLM_MODEL=gpt-3.5-turbo

+ 11 - 2
ai_trans.py

@@ -1,8 +1,17 @@
 import os
+from dotenv import load_dotenv
 from pdfzh_translator import OpenAITranslator
 
-# Initialize translator
-translator = OpenAITranslator(service="openai", lang_out="zh-CN", lang_in="ja", model="gpt-3.5-turbo")
+# Load environment variables
+load_dotenv()
+
+# Initialize translator with environment variables
+translator = OpenAITranslator(
+    service="openai",
+    lang_out="zh-CN",
+    lang_in="ja",
+    model=os.getenv('LLM_MODEL', 'gpt-3.5-turbo')  # Use env var or fallback to default
+)
 
 def translate_sentences(sentences, target_language='zh'):
     """

+ 8 - 5
pdfzh_translator.py

@@ -4,10 +4,13 @@ import logging
 import os
 import re
 from json import dumps, loads
+from dotenv import load_dotenv
 
 import openai
 import requests
 
+# Load environment variables
+load_dotenv()
 
 class BaseTranslator:
     def __init__(self, service, lang_out, lang_in, model):
@@ -58,13 +61,14 @@ class OpenAITranslator(BaseTranslator):
         lang_in = "en" if lang_in == "auto" else lang_in
         super().__init__(service, lang_out, lang_in, model)
         self.options = {"temperature": 0}  # 随机采样可能会打断公式标记
-        # OPENAI_BASE_URL
-        # OPENAI_API_KEY
-        self.client = openai.OpenAI()
+        # Configure OpenAI client with environment variables
+        self.client = openai.OpenAI(
+            api_key=os.getenv('OPENAI_API_KEY')
+        )
 
     def translate(self, text) -> str:
         response = self.client.chat.completions.create(
-            model=self.model,
+            model=os.getenv('LLM_MODEL', self.model),  # Use env var or fallback to default
             **self.options,
             messages=[
                 {
@@ -78,4 +82,3 @@ class OpenAITranslator(BaseTranslator):
             ],
         )
         return response.choices[0].message.content.strip()
-