When using `localaictl install --lxc`: 1. If podman/docker available: extracts rootfs from Docker image - Includes ALL backends (llama-cpp, whisper, etc.) - Creates LXC container with full LocalAI capabilities 2. If no docker/podman: falls back to standalone binary - Limited backend support This gives the best of both worlds: - LXC lightweight container management - Full Docker image backends Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
77 lines
2.0 KiB
Makefile
77 lines
2.0 KiB
Makefile
include $(TOPDIR)/rules.mk
|
|
|
|
PKG_NAME:=secubox-app-localai
|
|
PKG_RELEASE:=8
|
|
PKG_VERSION:=0.1.0
|
|
PKG_ARCH:=all
|
|
PKG_MAINTAINER:=CyberMind Studio <contact@cybermind.fr>
|
|
PKG_LICENSE:=Apache-2.0
|
|
|
|
include $(INCLUDE_DIR)/package.mk
|
|
|
|
define Package/secubox-app-localai
|
|
SECTION:=utils
|
|
CATEGORY:=Utilities
|
|
PKGARCH:=all
|
|
SUBMENU:=SecuBox Apps
|
|
TITLE:=SecuBox LocalAI - Self-hosted LLM (Docker)
|
|
DEPENDS:=+uci +libuci +jsonfilter +wget-ssl
|
|
endef
|
|
|
|
define Package/secubox-app-localai/description
|
|
LocalAI - Self-hosted, privacy-first AI/LLM for SecuBox-powered OpenWrt systems.
|
|
|
|
Features:
|
|
- OpenAI-compatible API (drop-in replacement)
|
|
- No cloud dependency - all processing on-device
|
|
- Support for various models (LLaMA, Mistral, Phi, etc.)
|
|
- All backends included (llama-cpp, whisper, etc.)
|
|
- Text generation, embeddings, transcription
|
|
|
|
Runs in Docker/Podman container with all backends.
|
|
Configure in /etc/config/localai.
|
|
endef
|
|
|
|
define Package/secubox-app-localai/conffiles
|
|
/etc/config/localai
|
|
endef
|
|
|
|
define Build/Compile
|
|
endef
|
|
|
|
define Package/secubox-app-localai/install
|
|
$(INSTALL_DIR) $(1)/etc/config
|
|
$(INSTALL_CONF) ./files/etc/config/localai $(1)/etc/config/localai
|
|
|
|
$(INSTALL_DIR) $(1)/etc/init.d
|
|
$(INSTALL_BIN) ./files/etc/init.d/localai $(1)/etc/init.d/localai
|
|
|
|
$(INSTALL_DIR) $(1)/usr/sbin
|
|
$(INSTALL_BIN) ./files/usr/sbin/localaictl $(1)/usr/sbin/localaictl
|
|
endef
|
|
|
|
define Package/secubox-app-localai/postinst
|
|
#!/bin/sh
|
|
[ -n "$${IPKG_INSTROOT}" ] || {
|
|
echo ""
|
|
echo "LocalAI installed (Docker/Podman version)."
|
|
echo ""
|
|
echo "Prerequisites: Install podman or docker first"
|
|
echo " opkg install podman"
|
|
echo ""
|
|
echo "To install and start LocalAI:"
|
|
echo " localaictl install # Pull Docker image (~2-4GB)"
|
|
echo " /etc/init.d/localai start"
|
|
echo ""
|
|
echo "API endpoint: http://<router-ip>:8080/v1"
|
|
echo "Web UI: http://<router-ip>:8080"
|
|
echo ""
|
|
echo "Download models with:"
|
|
echo " localaictl model-install tinyllama"
|
|
echo ""
|
|
}
|
|
exit 0
|
|
endef
|
|
|
|
$(eval $(call BuildPackage,secubox-app-localai))
|