|
@@ -56,7 +56,7 @@ class GoogleTranslator(BaseTranslator):
|
|
|
|
|
|
|
|
|
|
|
|
|
class OpenAITranslator(BaseTranslator):
|
|
class OpenAITranslator(BaseTranslator):
|
|
|
- def __init__(self, service, lang_out, lang_in, model, max_tokens=2000):
|
|
|
|
|
|
|
+ def __init__(self, service='opeanai', lang_out='zh-CN', lang_in='auto', model=os.getenv('LLM_MODEL'), max_tokens=2000):
|
|
|
lang_out = "zh-CN" if lang_out == "auto" else lang_out
|
|
lang_out = "zh-CN" if lang_out == "auto" else lang_out
|
|
|
lang_in = "en" if lang_in == "auto" else lang_in
|
|
lang_in = "en" if lang_in == "auto" else lang_in
|
|
|
super().__init__(service, lang_out, lang_in, model)
|
|
super().__init__(service, lang_out, lang_in, model)
|
|
@@ -75,7 +75,7 @@ class OpenAITranslator(BaseTranslator):
|
|
|
|
|
|
|
|
def _single_translate(self, text) -> str:
|
|
def _single_translate(self, text) -> str:
|
|
|
response = self.client.chat.completions.create(
|
|
response = self.client.chat.completions.create(
|
|
|
- model=os.getenv('LLM_MODEL', self.model),
|
|
|
|
|
|
|
+ model=self.model,
|
|
|
**self.options,
|
|
**self.options,
|
|
|
messages=[
|
|
messages=[
|
|
|
{
|
|
{
|
|
@@ -155,3 +155,11 @@ if __name__ == "__main__":
|
|
|
print("\nBatch translation results:")
|
|
print("\nBatch translation results:")
|
|
|
for original, translated in zip(batch_texts, translated_batch):
|
|
for original, translated in zip(batch_texts, translated_batch):
|
|
|
print(f"{original} -> {translated}")
|
|
print(f"{original} -> {translated}")
|
|
|
|
|
+
|
|
|
|
|
+'''
|
|
|
|
|
+translator = OpenAITranslator("openai", "zh-CN", "en", "openai/deepseek-chat")
|
|
|
|
|
+ # 单个翻译
|
|
|
|
|
+ result = translator.translate("Hello world")
|
|
|
|
|
+ # 批量翻译
|
|
|
|
|
+ results = translator.translate(["apple", "banana", "orange"])
|
|
|
|
|
+'''
|