aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLucio Sauer <watermanpaint@posteo.net>2024-07-11 14:05:44 +0200
committerLucio Sauer <watermanpaint@posteo.net>2024-07-11 14:10:29 +0200
commitfe3d951dfad11657718b3de5e7fbf8d2d83d1724 (patch)
tree96e1e7151fc9add7c3fd21ac2c33d9627a2b3330 /app-misc
parentdev-cpp/simdutf: drop 5.2.8 (diff)
downloadguru-fe3d951dfad11657718b3de5e7fbf8d2d83d1724.tar.gz
guru-fe3d951dfad11657718b3de5e7fbf8d2d83d1724.tar.bz2
guru-fe3d951dfad11657718b3de5e7fbf8d2d83d1724.zip
app-misc/ollama: move from dev-ml to app-misc
Could be moved into a new category together with other end user programs related to machine learning in the future. Signed-off-by: Lucio Sauer <watermanpaint@posteo.net>
Diffstat (limited to 'app-misc')
-rw-r--r--app-misc/ollama/files/ollama30
-rw-r--r--app-misc/ollama/metadata.xml16
-rw-r--r--app-misc/ollama/ollama-9999.ebuild74
3 files changed, 120 insertions, 0 deletions
diff --git a/app-misc/ollama/files/ollama b/app-misc/ollama/files/ollama
new file mode 100644
index 000000000..cbd787768
--- /dev/null
+++ b/app-misc/ollama/files/ollama
@@ -0,0 +1,30 @@
+#!/sbin/openrc-run
+
+description="Ollama Service"
+command="/usr/bin/ollama"
+command_args="serve"
+command_user="ollama"
+command_group="ollama"
+command_background="yes"
+pidfile="/run/ollama.pid"
+log="/var/log/ollama.log"
+
+# Ollama allows cross-origin requests from 127.0.0.1 and 0.0.0.0 by default.
+# Additional origins can be configured with OLLAMA_ORIGINS.
+# export OLLAMA_ORIGINS="<ip>"
+
+start() {
+ ebegin "Starting $description"
+ exec >> >(logger -t "$RC_SVCNAME Start daemon" -p daemon.info)
+ start-stop-daemon --start --background --user "$command_user" --group "$command_group" \
+ --pidfile "$pidfile" --make-pidfile --exec $command $command_args -1 $log -2 $log
+ eend $?
+}
+
+stop() {
+ ebegin "Stopping $description"
+ exec >> >(logger -t "$RC_SVCNAME Stop daemon" -p daemon.info)
+ start-stop-daemon --stop --pidfile "$pidfile"
+ eend $?
+}
+
diff --git a/app-misc/ollama/metadata.xml b/app-misc/ollama/metadata.xml
new file mode 100644
index 000000000..352043225
--- /dev/null
+++ b/app-misc/ollama/metadata.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE pkgmetadata SYSTEM "https://www.gentoo.org/dtd/metadata.dtd">
+<pkgmetadata>
+ <maintainer type="person" proxied="yes">
+ <email>zdanevich.vitaly@ya.ru</email>
+ <name>Vitaly Zdanevich</name>
+ <description>A copy from https://github.com/MrPenguin07/ebuilds/tree/master/dev-ml/ollama</description>
+ </maintainer>
+ <use>
+ <flag name="nvidia">Add support of nvidia</flag>
+ <flag name="amd">Add support of amd</flag>
+ </use>
+ <upstream>
+ <remote-id type="github">ollama/ollama</remote-id>
+ </upstream>
+</pkgmetadata>
diff --git a/app-misc/ollama/ollama-9999.ebuild b/app-misc/ollama/ollama-9999.ebuild
new file mode 100644
index 000000000..3db009887
--- /dev/null
+++ b/app-misc/ollama/ollama-9999.ebuild
@@ -0,0 +1,74 @@
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+inherit git-r3 go-module
+
+DESCRIPTION="Get up and running with Llama 3, Mistral, Gemma, and other language models."
+HOMEPAGE="https://ollama.com"
+EGIT_REPO_URI="https://github.com/ollama/ollama.git"
+LICENSE="MIT"
+SLOT="0"
+
+IUSE="nvidia amd"
+
+RDEPEND="
+ acct-group/ollama
+ acct-user/ollama
+"
+IDEPEND="${RDEPEND}"
+BDEPEND="
+ >=dev-lang/go-1.21.0
+ >=dev-build/cmake-3.24
+ >=sys-devel/gcc-11.4.0
+ nvidia? ( dev-util/nvidia-cuda-toolkit )
+ amd? (
+ sci-libs/clblast
+ dev-libs/rocm-opencl-runtime
+ )
+"
+
+pkg_pretend() {
+ if use amd; then
+ ewarn "WARNING: AMD & Nvidia support in this ebuild are experimental"
+ einfo "If you run into issues, especially compiling dev-libs/rocm-opencl-runtime"
+ einfo "you may try the docker image here https://github.com/ROCm/ROCm-docker"
+ einfo "and follow instructions here"
+ einfo "https://rocm.docs.amd.com/projects/install-on-linux/en/latest/how-to/docker.html"
+ fi
+}
+
+src_unpack() {
+ git-r3_src_unpack
+ go-module_live_vendor
+}
+
+src_compile() {
+ VERSION=$(
+ git describe --tags --first-parent --abbrev=7 --long --dirty --always \
+ | sed -e "s/^v//g"
+ assert
+ )
+ export GOFLAGS="'-ldflags=-w -s \"-X=github.com/ollama/ollama/version.Version=${VERSION}\"'"
+
+ ego generate ./...
+ ego build .
+}
+
+src_install() {
+ dobin ollama
+ doinitd "${FILESDIR}"/ollama
+}
+
+pkg_preinst() {
+ touch /var/log/ollama.log || die
+ fowners ollama:ollama /var/log/ollama.log
+}
+
+pkg_postinst() {
+ einfo "Quick guide:"
+ einfo "ollama serve"
+ einfo "ollama run llama3:70b"
+ einfo "See available models at https://ollama.com/library"
+}