mirror of
https://github.com/mudler/LocalAI.git
synced 2025-02-21 17:46:41 +00:00
docs(examples): add AutoGPT (#397)
This commit is contained in:
parent
217dbb448e
commit
59f7953249
@ -1025,7 +1025,7 @@ There is the availability of localai-webui and chatbot-ui in the examples sectio
|
|||||||
|
|
||||||
<details>
|
<details>
|
||||||
|
|
||||||
AutoGPT currently doesn't allow to set a different API URL, but there is a PR open for it, so this should be possible soon!
|
Yes, see the [examples](https://github.com/go-skynet/LocalAI/tree/master/examples/)!
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
@ -4,6 +4,13 @@ Here is a list of projects that can easily be integrated with the LocalAI backen
|
|||||||
|
|
||||||
### Projects
|
### Projects
|
||||||
|
|
||||||
|
### AutoGPT
|
||||||
|
|
||||||
|
_by [@mudler](https://github.com/mudler)_
|
||||||
|
|
||||||
|
This example shows how to use AutoGPT with LocalAI.
|
||||||
|
|
||||||
|
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/autoGPT/)
|
||||||
|
|
||||||
### Chatbot-UI
|
### Chatbot-UI
|
||||||
|
|
||||||
|
5
examples/autoGPT/.env
Normal file
5
examples/autoGPT/.env
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
OPENAI_API_KEY=sk---anystringhere
|
||||||
|
OPENAI_API_BASE=http://api:8080/v1
|
||||||
|
# Models to preload at start
|
||||||
|
# Here we configure gpt4all as gpt-3.5-turbo and bert as embeddings
|
||||||
|
PRELOAD_MODELS=[{"url": "github:go-skynet/model-gallery/gpt4all-j.yaml", "name": "gpt-3.5-turbo"}, { "url": "github:go-skynet/model-gallery/bert-embeddings.yaml", "name": "text-embedding-ada-002"}]
|
32
examples/autoGPT/README.md
Normal file
32
examples/autoGPT/README.md
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
# AutoGPT
|
||||||
|
|
||||||
|
Example of integration with [AutoGPT](https://github.com/Significant-Gravitas/Auto-GPT).
|
||||||
|
|
||||||
|
## Run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Clone LocalAI
|
||||||
|
git clone https://github.com/go-skynet/LocalAI
|
||||||
|
|
||||||
|
cd LocalAI/examples/autoGPT
|
||||||
|
|
||||||
|
docker-compose run --rm auto-gpt
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: The example automatically downloads the `gpt4all` model as it is under a permissive license. The GPT4All model does not seem to be enough to run AutoGPT. WizardLM-7b-uncensored seems to perform better (with `f16: true`).
|
||||||
|
|
||||||
|
See the `.env` configuration file to set a different model with the [model-gallery](https://github.com/go-skynet/model-gallery) by editing `PRELOAD_MODELS`.
|
||||||
|
|
||||||
|
## Without docker
|
||||||
|
|
||||||
|
Run AutoGPT with `OPENAI_API_BASE` pointing to the LocalAI endpoint. If you run it locally for instance:
|
||||||
|
|
||||||
|
```
|
||||||
|
OPENAI_API_BASE=http://localhost:8080 python ...
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: you need a model named `gpt-3.5-turbo` and `text-embedding-ada-002`. You can preload those in LocalAI at start by setting in the env:
|
||||||
|
|
||||||
|
```
|
||||||
|
PRELOAD_MODELS=[{"url": "github:go-skynet/model-gallery/gpt4all-j.yaml", "name": "gpt-3.5-turbo"}, { "url": "github:go-skynet/model-gallery/bert-embeddings.yaml", "name": "text-embedding-ada-002"}]
|
||||||
|
```
|
42
examples/autoGPT/docker-compose.yaml
Normal file
42
examples/autoGPT/docker-compose.yaml
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
version: "3.9"
|
||||||
|
services:
|
||||||
|
api:
|
||||||
|
image: quay.io/go-skynet/local-ai:latest
|
||||||
|
ports:
|
||||||
|
- 8080:8080
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
- DEBUG=true
|
||||||
|
- MODELS_PATH=/models
|
||||||
|
volumes:
|
||||||
|
- ./models:/models:cached
|
||||||
|
command: ["/usr/bin/local-ai" ]
|
||||||
|
auto-gpt:
|
||||||
|
image: significantgravitas/auto-gpt
|
||||||
|
depends_on:
|
||||||
|
api:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_started
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
MEMORY_BACKEND: ${MEMORY_BACKEND:-redis}
|
||||||
|
REDIS_HOST: ${REDIS_HOST:-redis}
|
||||||
|
profiles: ["exclude-from-up"]
|
||||||
|
volumes:
|
||||||
|
- ./auto_gpt_workspace:/app/autogpt/auto_gpt_workspace
|
||||||
|
- ./data:/app/data
|
||||||
|
## allow auto-gpt to write logs to disk
|
||||||
|
- ./logs:/app/logs
|
||||||
|
## uncomment following lines if you want to make use of these files
|
||||||
|
## you must have them existing in the same folder as this docker-compose.yml
|
||||||
|
#- type: bind
|
||||||
|
# source: ./azure.yaml
|
||||||
|
# target: /app/azure.yaml
|
||||||
|
#- type: bind
|
||||||
|
# source: ./ai_settings.yaml
|
||||||
|
# target: /app/ai_settings.yaml
|
||||||
|
redis:
|
||||||
|
image: "redis/redis-stack-server:latest"
|
Loading…
x
Reference in New Issue
Block a user