Skip to content

Commit d6e3b97

Browse files
committed
stuff
1 parent 0039161 commit d6e3b97

8 files changed

+289
-28
lines changed

.dockerignore

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
models/
2+
db/

.env

+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
DEBUG=true
2+
MODELS_PATH=/models
3+
GALLERIES=[{"name":"model-gallery", "url":"github:go-skynet/model-gallery/index.yaml"}, {"url": "github:go-skynet/model-gallery/huggingface.yaml","name":"huggingface"}]
4+
PRELOAD_MODELS=[{"id":"model-gallery@stablediffusion"},{"id":"model-gallery@voice-en-us-kathleen-low"},{"url": "github:go-skynet/model-gallery/base.yaml", "name": "all-MiniLM-L6-v2", "overrides": {"embeddings": true, "backend":"huggingface-embeddings", "parameters": {"model": "all-MiniLM-L6-v2"}}}, {"id": "huggingface@thebloke/wizardlm-13b-v1.0-uncensored-ggml/wizardlm-13b-v1.0-uncensored.ggmlv3.q4_0.bin", "name": "functions", "overrides": { "context_size": 2048, "template": {"chat": "", "completion": "" }, "roles": { "assistant": "ASSISTANT:", "system": "SYSTEM:", "assistant_function_call": "FUNCTION_CALL:", "function": "FUNCTION CALL RESULT:" }, "parameters": { "temperature": 0.1, "top_k": 40, "top_p": 0.95, "rope_freq_base": 10000.0, "rope_freq_scale": 1.0 }, "function": { "disable_no_action": true }, "mmap": true, "f16": true }},{"id": "huggingface@thebloke/wizardlm-13b-v1.0-uncensored-ggml/wizardlm-13b-v1.0-uncensored.ggmlv3.q4_k_m.bin", "name":"gpt-4", "overrides": { "context_size": 2048, "mmap": true, "f16": true, "parameters": { "temperature": 0.1, "top_k": 40, "top_p": 0.95, "rope_freq_base": 10000.0, "rope_freq_scale": 1.0 }}}]
5+
OPENAI_API_KEY=sk---
6+
OPENAI_API_BASE=http://api:8080
7+
IMAGE_PATH=/tmp
8+
THREADS=14

.gitignore

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
db/
2+
models/

Dockerfile

+18
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
FROM python:3.10-bullseye
2+
WORKDIR /app
3+
COPY ./requirements.txt /app/requirements.txt
4+
RUN pip install --no-cache-dir -r requirements.txt
5+
6+
7+
ENV DEBIAN_FRONTEND noninteractive
8+
9+
# Install package dependencies
10+
RUN apt-get update -y && \
11+
apt-get install -y --no-install-recommends \
12+
alsa-utils \
13+
libsndfile1-dev && \
14+
apt-get clean
15+
16+
COPY . /app
17+
18+
ENTRYPOINT [ "python", "./main.py" ];

README.md

+35-1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,36 @@
11
# miniAGI
2-
100% Local, mini-AGI
2+
3+
From the [LocalAI](https://localai.io) author, miniAGI. 100% Local AI assistant.
4+
5+
Note: this is a fun project, not a serious one. It's a toy, not a tool. Be warned!
6+
7+
## What is miniAGI?
8+
9+
It is a dead simple experiment to show how to tie the various LocalAI functionalities to create a virtual assistant that can do tasks. It is simple on purpose, trying to be minimalistic and easy to understand and customize.
10+
11+
## Quick start
12+
13+
No frills, just run docker-compose and start chatting with your virtual assistant:
14+
15+
```bash
16+
docker-compose run --build -i --rm miniagi
17+
```
18+
19+
## Roadmap
20+
21+
- [x] 100% Local, with Local AI. NO API KEYS NEEDED!
22+
- [x] Create a simple virtual assistant
23+
- [x] Make the virtual assistant do functions like store long-term memory and autonomously search between them when needed
24+
- [] Create the assistant avatar with Stable Diffusion
25+
- [] Give it a voice (push to talk or wakeword)
26+
- [] Get voice input
27+
- [] Make a REST API (OpenAI compliant?) so can be plugged by e.g. a third party service
28+
- [] Take a system prompt so can act with a "character" (e.g. "answer in rick and morty style")
29+
30+
## Development
31+
32+
Run docker-compose with main.py checked-out:
33+
34+
```bash
35+
docker-compose run -v main.py:/app/main.py -i --rm miniagi
36+
```

docker-compose.yaml

+24
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
version: "3.9"
2+
services:
3+
api:
4+
image: quay.io/go-skynet/local-ai:master
5+
ports:
6+
- 8090:8080
7+
env_file:
8+
- .env
9+
volumes:
10+
- ./models:/models:cached
11+
command: ["/usr/bin/local-ai" ]
12+
miniagi:
13+
build:
14+
context: .
15+
dockerfile: Dockerfile
16+
devices:
17+
- /dev/snd
18+
depends_on:
19+
api:
20+
condition: service_healthy
21+
volumes:
22+
- ./db:/app/db
23+
env_file:
24+
- .env

0 commit comments

Comments
 (0)