You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
{{ message }}
This repository has been archived by the owner on Jun 9, 2024. It is now read-only.
SYSTEM: Command list_files returned: ['..\..\..\..\AutoGPTLocal\Auto-GPT\autogpt\auto_gpt_workspace\source\my-document (2).pdf', '..\..\..\..\AutoGPTLocal\Auto-GPT\autogpt\auto_gpt_workspace\source\my-document (3).pdf']
Traceback (most recent call last):
File "", line 198, in _run_module_as_main
File "", line 88, in run_code
File "C:\auto-gpt\Auto-GPT\autogpt_main.py", line 5, in
autogpt.cli.main()
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1130, in call
return self.main(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1055, in main
rv = self.invoke(ctx)
^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1635, in invoke
rv = super().invoke(ctx)
^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1404, in invoke
return ctx.invoke(self.callback, **ctx.params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 760, in invoke
return callback(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\decorators.py", line 26, in new_func
return f(get_current_context(), *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\cli.py", line 96, in main
run_auto_gpt(
File "C:\auto-gpt\Auto-GPT\autogpt\main.py", line 197, in run_auto_gpt
agent.start_interaction_loop()
File "C:\auto-gpt\Auto-GPT\autogpt\agent\agent.py", line 130, in start_interaction_loop
assistant_reply = chat_with_ai(
^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\chat.py", line 112, in chat_with_ai
new_summary_message, trimmed_messages = agent.history.trim_messages(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\memory\message_history.py", line 79, in trim_messages
new_summary_message = self.update_running_summary(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\memory\message_history.py", line 194, in update_running_summary
self.summary = create_chat_completion(prompt)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils_init.py", line 53, in metered_func
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils_init.py", line 87, in wrapped
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils_init.py", line 235, in create_chat_completion
response = api_manager.create_chat_completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\api_manager.py", line 61, in create_chat_completion
response = openai.ChatCompletion.create(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_resources\chat_completion.py", line 25, in create
return super().create(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_resources\abstract\engine_api_resource.py", line 153, in create
response, _, api_key = requestor.request(
^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 226, in request
resp, got_stream = self._interpret_response(result, stream)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 619, in _interpret_response
self._interpret_response_line(
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 682, in _interpret_response_line
raise self.handle_error_response(
openai.error.InvalidRequestError: This model's maximum context length is 4097 tokens. However, your messages resulted in 4310 tokens. Please reduce the length of the messages.
The text was updated successfully, but these errors were encountered:
Sign up for freeto subscribe to this conversation on GitHub.
Already have an account?
Sign in.
SYSTEM: Command list_files returned: ['..\..\..\..\AutoGPTLocal\Auto-GPT\autogpt\auto_gpt_workspace\source\my-document (2).pdf', '..\..\..\..\AutoGPTLocal\Auto-GPT\autogpt\auto_gpt_workspace\source\my-document (3).pdf']
Traceback (most recent call last):
File "", line 198, in _run_module_as_main
File "", line 88, in run_code
File "C:\auto-gpt\Auto-GPT\autogpt_main.py", line 5, in
autogpt.cli.main()
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1130, in call
return self.main(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1055, in main
rv = self.invoke(ctx)
^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1635, in invoke
rv = super().invoke(ctx)
^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 1404, in invoke
return ctx.invoke(self.callback, **ctx.params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\core.py", line 760, in invoke
return callback(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\click\decorators.py", line 26, in new_func
return f(get_current_context(), *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\cli.py", line 96, in main
run_auto_gpt(
File "C:\auto-gpt\Auto-GPT\autogpt\main.py", line 197, in run_auto_gpt
agent.start_interaction_loop()
File "C:\auto-gpt\Auto-GPT\autogpt\agent\agent.py", line 130, in start_interaction_loop
assistant_reply = chat_with_ai(
^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\chat.py", line 112, in chat_with_ai
new_summary_message, trimmed_messages = agent.history.trim_messages(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\memory\message_history.py", line 79, in trim_messages
new_summary_message = self.update_running_summary(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\memory\message_history.py", line 194, in update_running_summary
self.summary = create_chat_completion(prompt)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils_init.py", line 53, in metered_func
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils_init.py", line 87, in wrapped
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\utils_init.py", line 235, in create_chat_completion
response = api_manager.create_chat_completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\auto-gpt\Auto-GPT\autogpt\llm\api_manager.py", line 61, in create_chat_completion
response = openai.ChatCompletion.create(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_resources\chat_completion.py", line 25, in create
return super().create(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_resources\abstract\engine_api_resource.py", line 153, in create
response, _, api_key = requestor.request(
^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 226, in request
resp, got_stream = self._interpret_response(result, stream)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 619, in _interpret_response
self._interpret_response_line(
File "C:\Users\alpha\AppData\Local\Programs\Python\Python311\Lib\site-packages\openai\api_requestor.py", line 682, in _interpret_response_line
raise self.handle_error_response(
openai.error.InvalidRequestError: This model's maximum context length is 4097 tokens. However, your messages resulted in 4310 tokens. Please reduce the length of the messages.
The text was updated successfully, but these errors were encountered: