%global debug_package %{nil} Name: ollama Version: 0.15.2 Release: 1%{?dist} Summary: Get up and running with large language models locally License: MIT URL: https://github.com/ollama/ollama Source0: https://github.com/ollama/ollama/releases/download/v%{version}/ollama-linux-amd64.tar.zst BuildRequires: systemd-rpm-macros BuildRequires: tar BuildRequires: zstd Requires: systemd %description Get up and running with large language models locally. %prep %setup -q -c -n %{name}-%{version} %build # No build needed %install # Install the binary mkdir -p %{buildroot}%{_bindir} install -m 755 bin/ollama %{buildroot}%{_bindir}/ollama # Generate the service file mkdir -p %{buildroot}%{_unitdir} cat > %{buildroot}%{_unitdir}/ollama.service </dev/null || groupadd -r ollama getent passwd ollama >/dev/null || \ useradd -r -g ollama -d /usr/share/ollama -s /sbin/nologin \ -c "Ollama Service User" ollama exit 0 %post %systemd_post ollama.service %preun %systemd_preun ollama.service %postun %systemd_postun_with_restart ollama.service %files %{_bindir}/ollama %{_unitdir}/ollama.service %changelog * Mon Jan 26 2026 Robbware - 0.15.2 - Bump to latest version