diff --git a/Dockerfile b/Dockerfile index 9f87b7b..2dc99b7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,10 @@ FROM python:3-slim +ENV SHELL_INTERACTION=false +ENV PRETTIFY_MARKDOWN=false +ENV OS_NAME=auto +ENV SHELL_NAME=auto + WORKDIR /app COPY . /app diff --git a/README.md b/README.md index 86e4b1c..59f45fe 100644 --- a/README.md +++ b/README.md @@ -461,18 +461,58 @@ docker run --rm \ ghcr.io/ther1d/shell_gpt --chat rainbow "what are the colors of a rainbow" ``` +When using a container, please note: +* The \[E\]xecute option for --shell with interaction will not work, since it would try this Execute in the docker container. +=> setting the `SHELL_INTERACTION` environment variable to false , makes sense. +* Since, most likely the os and shell of your container are not identical to the environment you want help with: +set the environment variables `OS_NAME` and `SHELL_NAME` according to your setup. + + Example of a conversation, using an alias and the `OPENAI_API_KEY` environment variable: ```shell -alias sgpt="docker run --rm --env OPENAI_API_KEY --volume gpt-cache:/tmp/shell_gpt ghcr.io/ther1d/shell_gpt" +alias sgpt="docker run --rm --volume gpt-cache:/tmp/shell_gpt --env OPENAI_API_KEY --env OS_NAME=$(uname -s) --env SHELL_NAME=$(echo $SHELL) ghcr.io/ther1d/shell_gpt" export OPENAI_API_KEY="your OPENAI API key" sgpt --chat rainbow "what are the colors of a rainbow" sgpt --chat rainbow "inverse the list of your last answer" sgpt --chat rainbow "translate your last answer in french" ``` +Note: +Consider filling in a more specific OS_NAME instead of using $(uname -s) You also can use the provided `Dockerfile` to build your own image: ```shell docker build -t sgpt . ``` -Additional documentation: [Azure integration](https://github.com/TheR1D/shell_gpt/wiki/Azure), [Ollama integration](https://github.com/TheR1D/shell_gpt/wiki/Ollama). +### Docker + Ollama + +If you want to send your requests to an Ollama instance and run ShellGPT inside a Docker container, you need to adjust the Dockerfile and build the container yourself: the litellm package is needed and env variables need to be set correctly. + +Example Dockerfile: +``` +FROM python:3-slim + +ENV DEFAULT_MODEL=ollama/mistral:7b-instruct-v0.2-q4_K_M +ENV API_BASE_URL=http://10.10.10.10:11434 +ENV USE_LITELLM=true +ENV OPENAI_API_KEY=bad_key +ENV SHELL_INTERACTION=false +ENV PRETTIFY_MARKDOWN=false +ENV OS_NAME="Red Hat Enterprise Linux 8.6 (Ootpa)" +ENV SHELL_NAME=auto + +WORKDIR /app +COPY . /app + +RUN apt-get update && apt-get install -y gcc +RUN pip install --no-cache /app[litellm] && mkdir -p /tmp/shell_gpt + +VOLUME /tmp/shell_gpt + +ENTRYPOINT ["sgpt"] +``` + + +## Additional documentation +* [Azure integration](https://github.com/TheR1D/shell_gpt/wiki/Azure) +* [Ollama integration](https://github.com/TheR1D/shell_gpt/wiki/Ollama) diff --git a/sgpt/app.py b/sgpt/app.py index bd09216..733b7ce 100644 --- a/sgpt/app.py +++ b/sgpt/app.py @@ -57,7 +57,7 @@ def main( rich_help_panel="Assistance Options", ), interaction: bool = typer.Option( - True, + cfg.get("SHELL_INTERACTION") == "true", help="Interactive mode for --shell option.", rich_help_panel="Assistance Options", ), diff --git a/sgpt/config.py b/sgpt/config.py index 5655b03..82a078a 100644 --- a/sgpt/config.py +++ b/sgpt/config.py @@ -34,6 +34,9 @@ "API_BASE_URL": os.getenv("API_BASE_URL", "default"), "PRETTIFY_MARKDOWN": os.getenv("PRETTIFY_MARKDOWN", "true"), "USE_LITELLM": os.getenv("USE_LITELLM", "false"), + "SHELL_INTERACTION ": os.getenv("SHELL_INTERACTION ", "true"), + "OS_NAME": os.getenv("OS_NAME", "auto"), + "SHELL_NAME ": os.getenv("SHELL_NAME", "auto"), # New features might add their own config variables here. } diff --git a/sgpt/role.py b/sgpt/role.py index 4671da9..2ae34e1 100644 --- a/sgpt/role.py +++ b/sgpt/role.py @@ -113,6 +113,8 @@ def get_role_name(cls, initial_message: str) -> Optional[str]: @classmethod def _os_name(cls) -> str: + if cfg.get("OS_NAME") != "auto": + return cfg.get("OS_NAME") current_platform = platform.system() if current_platform == "Linux": return "Linux/" + distro_name(pretty=True) @@ -124,6 +126,8 @@ def _os_name(cls) -> str: @classmethod def _shell_name(cls) -> str: + if cfg.get("SHELL_NAME") != "auto": + return cfg.get("SHELL_NAME") current_platform = platform.system() if current_platform in ("Windows", "nt"): is_powershell = len(getenv("PSModulePath", "").split(pathsep)) >= 3