| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687 |
- import json
- from typing import Dict, List
- from jinja2 import BaseLoader, Environment
- from opendevin.controller.agent import Agent
- from opendevin.controller.state.state import State
- from opendevin.core.exceptions import LLMOutputError
- from opendevin.events.action import Action, action_from_dict
- from opendevin.llm.llm import LLM
- from .instructions import instructions
- from .registry import all_microagents
- def parse_response(orig_response: str) -> Action:
- depth = 0
- start = -1
- for i, char in enumerate(orig_response):
- if char == '{':
- if depth == 0:
- start = i
- depth += 1
- elif char == '}':
- depth -= 1
- if depth == 0 and start != -1:
- response = orig_response[start : i + 1]
- try:
- action_dict = json.loads(response)
- action = action_from_dict(action_dict)
- return action
- except json.JSONDecodeError as e:
- raise LLMOutputError(
- 'Invalid JSON in response. Please make sure the response is a valid JSON object.'
- ) from e
- raise LLMOutputError('No valid JSON object found in response.')
- def my_encoder(obj):
- """
- Encodes objects as dictionaries
- Parameters:
- - obj (Object): An object that will be converted
- Returns:
- - dict: If the object can be converted it is returned in dict format
- """
- if hasattr(obj, 'to_dict'):
- return obj.to_dict()
- def to_json(obj, **kwargs):
- """
- Serialize an object to str format
- """
- return json.dumps(obj, default=my_encoder, **kwargs)
- class MicroAgent(Agent):
- prompt = ''
- agent_definition: Dict = {}
- def __init__(self, llm: LLM):
- super().__init__(llm)
- if 'name' not in self.agent_definition:
- raise ValueError('Agent definition must contain a name')
- self.prompt_template = Environment(loader=BaseLoader).from_string(self.prompt)
- self.delegates = all_microagents.copy()
- del self.delegates[self.agent_definition['name']]
- def step(self, state: State) -> Action:
- prompt = self.prompt_template.render(
- state=state,
- instructions=instructions,
- to_json=to_json,
- delegates=self.delegates,
- )
- messages = [{'content': prompt, 'role': 'user'}]
- resp = self.llm.completion(messages=messages)
- action_resp = resp['choices'][0]['message']['content']
- state.num_of_chars += len(prompt) + len(action_resp)
- action = parse_response(action_resp)
- return action
- def search_memory(self, query: str) -> List[str]:
- return []
|