Skip to content

Commit 5264a00

Browse files
committed
+ add info about pull minicpm-v
+ load proper env in makefile
1 parent 7ec5a73 commit 5264a00

File tree

3 files changed

+17
-0
lines changed

3 files changed

+17
-0
lines changed

.env.localhost.example

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
#APP_ENV=production # sets the app into prod mode, othervise dev mode with auto-reload on code changes
22
REDIS_CACHE_URL=redis://localhost:6379/1
33
LLAMA_VISION_PROMPT="You are OCR. Convert image to markdown."
4+
DISABLE_LOCAL_OLLAMA=0
45

56
# CLI settings
67
OCR_URL=http://localhost:8000/ocr/upload

Makefile

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,13 @@ SHELL := /bin/bash
33
export DISABLE_VENV ?= 0
44
export DISABLE_LOCAL_OLLAMA ?= 0
55

6+
define load_env
7+
@if [ -f $(1) ]; then \
8+
echo "Loading environment from $(1)"; \
9+
set -o allexport; source $(1); set +o allexport; \
10+
fi
11+
endef
12+
613
.PHONY: help
714
help:
815
@echo "Available commands:"
@@ -81,6 +88,7 @@ install-requirements:
8188

8289
.PHONY: run
8390
run:
91+
@$(call load_env,.env.localhost)
8492
@echo "Starting the local application server..."; \
8593
DISABLE_VENV=$(DISABLE_VENV) DISABLE_LOCAL_OLLAMA=$(DISABLE_LOCAL_OLLAMA) ./run.sh
8694

README.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -182,6 +182,14 @@ The models and weights of MiniCPM are completely free for academic research. aft
182182
183183
Enabled by default. Please do use the `strategy=minicpm_v` CLI and URL parameters to use it.
184184
185+
| ⚠️ **Remember to pull the model in Ollama first** |
186+
|---------------------------------------------------------|
187+
| You need to pull the model in Ollama - use the command: |
188+
| `python client/cli.py llm_pull --model minicpm-v` |
189+
| Or, if you have Ollama locally: `ollama pull minicpm-v` |
190+
191+
192+
185193
### `llama_vision`
186194
187195
LLama 3.2 Vision Strategy is licensed on [Meta Community License Agreement](https://ollama.com/library/llama3.2-vision/blobs/0b4284c1f870). Works great for many languages, although due to the number of parameters (90b) this model is probably **the slowest** one.

0 commit comments

Comments
 (0)