Source code for autogen_ext.agents.file_surfer._file_surfer
importjsonimportosimporttracebackfromtypingimportList,Sequence,Tuplefromautogen_agentchat.agentsimportBaseChatAgentfromautogen_agentchat.baseimportResponsefromautogen_agentchat.messagesimport(ChatMessage,MultiModalMessage,TextMessage,)fromautogen_agentchat.utilsimportremove_imagesfromautogen_coreimportCancellationToken,Component,ComponentModel,FunctionCallfromautogen_core.modelsimport(AssistantMessage,ChatCompletionClient,LLMMessage,SystemMessage,UserMessage,)frompydanticimportBaseModelfromtyping_extensionsimportSelffrom._markdown_file_browserimportMarkdownFileBrowser# from typing_extensions import Annotatedfrom._tool_definitionsimport(TOOL_FIND_NEXT,TOOL_FIND_ON_PAGE_CTRL_F,TOOL_OPEN_PATH,TOOL_PAGE_DOWN,TOOL_PAGE_UP,)classFileSurferConfig(BaseModel):"""Configuration for FileSurfer agent"""name:strmodel_client:ComponentModeldescription:str|None=None
[docs]classFileSurfer(BaseChatAgent,Component[FileSurferConfig]):"""An agent, used by MagenticOne, that acts as a local file previewer. FileSurfer can open and read a variety of common file types, and can navigate the local file hierarchy. Installation: .. code-block:: bash pip install "autogen-ext[file-surfer]" Args: name (str): The agent's name model_client (ChatCompletionClient): The model to use (must be tool-use enabled) description (str): The agent's description used by the team. Defaults to DEFAULT_DESCRIPTION base_path (str): The base path to use for the file browser. Defaults to the current working directory. """component_config_schema=FileSurferConfigcomponent_provider_override="autogen_ext.agents.file_surfer.FileSurfer"DEFAULT_DESCRIPTION="An agent that can handle local files."DEFAULT_SYSTEM_MESSAGES=[SystemMessage(content=""" You are a helpful AI Assistant. When given a user query, use available functions to help the user with their request."""),]def__init__(self,name:str,model_client:ChatCompletionClient,description:str=DEFAULT_DESCRIPTION,base_path:str=os.getcwd(),)->None:super().__init__(name,description)self._model_client=model_clientself._chat_history:List[LLMMessage]=[]self._browser=MarkdownFileBrowser(viewport_size=1024*5,base_path=base_path)@propertydefproduced_message_types(self)->Sequence[type[ChatMessage]]:return(TextMessage,)
[docs]asyncdefon_messages(self,messages:Sequence[ChatMessage],cancellation_token:CancellationToken)->Response:forchat_messageinmessages:ifisinstance(chat_message,TextMessage|MultiModalMessage):self._chat_history.append(UserMessage(content=chat_message.content,source=chat_message.source))else:raiseValueError(f"Unexpected message in FileSurfer: {chat_message}")try:_,content=awaitself._generate_reply(cancellation_token=cancellation_token)self._chat_history.append(AssistantMessage(content=content,source=self.name))returnResponse(chat_message=TextMessage(content=content,source=self.name))exceptBaseException:content=f"File surfing error:\n\n{traceback.format_exc()}"self._chat_history.append(AssistantMessage(content=content,source=self.name))returnResponse(chat_message=TextMessage(content=content,source=self.name))
def_get_browser_state(self)->Tuple[str,str]:""" Get the current state of the browser, including the header and content. """header=f"Path: {self._browser.path}\n"ifself._browser.page_titleisnotNone:header+=f"Title: {self._browser.page_title}\n"current_page=self._browser.viewport_current_pagetotal_pages=len(self._browser.viewport_pages)header+=f"Viewport position: Showing page {current_page+1} of {total_pages}.\n"return(header,self._browser.viewport)asyncdef_generate_reply(self,cancellation_token:CancellationToken)->Tuple[bool,str]:history=self._chat_history[0:-1]last_message=self._chat_history[-1]assertisinstance(last_message,UserMessage)task_content=last_message.content# the last message from the sender is the taskassertself._browserisnotNonecontext_message=UserMessage(source="user",content=f"Your file viewer is currently open to the file or directory '{self._browser.page_title}' with path '{self._browser.path}'.",)task_message=UserMessage(source="user",content=task_content,)create_result=awaitself._model_client.create(messages=self._get_compatible_context(history+[context_message,task_message]),tools=[TOOL_OPEN_PATH,TOOL_PAGE_DOWN,TOOL_PAGE_UP,TOOL_FIND_NEXT,TOOL_FIND_ON_PAGE_CTRL_F,],cancellation_token=cancellation_token,)response=create_result.contentifisinstance(response,str):# Answer directly.returnFalse,responseelifisinstance(response,list)andall(isinstance(item,FunctionCall)foriteminresponse):function_calls=responseforfunction_callinfunction_calls:tool_name=function_call.nametry:arguments=json.loads(function_call.arguments)exceptjson.JSONDecodeErrorase:error_str=f"File surfer encountered an error decoding JSON arguments: {e}"returnFalse,error_striftool_name=="open_path":path=arguments["path"]self._browser.open_path(path)eliftool_name=="page_up":self._browser.page_up()eliftool_name=="page_down":self._browser.page_down()eliftool_name=="find_on_page_ctrl_f":search_string=arguments["search_string"]self._browser.find_on_page(search_string)eliftool_name=="find_next":self._browser.find_next()header,content=self._get_browser_state()final_response=header.strip()+"\n=======================\n"+contentreturnFalse,final_responsefinal_response="TERMINATE"returnFalse,final_responsedef_get_compatible_context(self,messages:List[LLMMessage])->List[LLMMessage]:"""Ensure that the messages are compatible with the underlying client, by removing images if needed."""ifself._model_client.model_info["vision"]:returnmessageselse:returnremove_images(messages)