# Generated by go2rpm 1.14.0 %bcond check 1 %global ggml_arches x86_64 aarch64 %global rocm_arches x86_64 %global ggml_license ml/backend/ggml/ggml/LICENSE %ifarch %{ggml_arches} %bcond ggml 1 %else %bcond ggml 0 %endif %ifarch %{rocm_arches} %if 0%{?fedora} >= 42 %bcond rocm 1 %endif %else %bcond rocm 0 %endif # build problems on ppc64le and s390x ExcludeArch: ppc64le s390x %define _vpath_builddir %{_vendor}-%{_target_os}-build_%{ggml_preset} %global ggml_privatelibs libggml-.*\\.so.* %global __provides_exclude_from ^(%{_libdir}/ollama/%{ggml_privatelibs})$ %global __requires_exclude ^(%{ggml_privatelibs})$ # https://github.com/ollama/ollama %global goipath github.com/ollama/ollama %global forgeurl https://github.com/ollama/ollama Version: 0.12.3 %gometa -L -f %global common_description %{expand: Get up and running with OpenAI gpt-oss, DeepSeek-R1, Gemma 3 and other models.} %global golicenses LICENSE %global godocs docs CONTRIBUTING.md README.md SECURITY.md Name: ollama Release: %autorelease Summary: Get up and running with OpenAI gpt-oss, DeepSeek-R1, Gemma 3 and other models License: Apache-2.0 AND BSD-2-Clause AND BSD-3-Clause AND BSL-1.0 AND CC-BY-3.0 AND CC-BY-4.0 AND CC0-1.0 AND ISC AND LicenseRef-Fedora-Public-Domain AND LicenseRef-scancode-protobuf AND MIT AND NCSA AND NTP AND OpenSSL AND ZPL-2.1 AND Zlib URL: %{gourl} Source0: %{gosource} # Generated by go-vendor-tools Source1: https://github.com/Fachep/ollama-rpm/releases/download/v%{version}/vendor.tar.bz2 Source2: go-vendor-tools.toml Source3: LICENSE.sentencepiece Source4: sysconfig-ollama Source5: ollama.service Source6: ollama-user.conf # Fixup ollama to look at gtt vs vram mem and enable iGPU's # https://github.com/Crandel/ollama-amd-igpu.git # d70b7b91791181c0bca95dbbc223f9f3543f3747 # Never applied since cb027a828de742cf4ecee6261866c228f6acedb9 # Patch broken and upstream archived, so dropping for now # Patch0: 0001-rewrite-for-ollama-4.0.patch # Remove cuda and rocm runtime from cmake install targets Patch0: remove-runtime-for-cuda-and-rocm.patch # Temporary patch to work with _libdir Patch1: replace-library-paths.patch # https://github.com/pdevine/tensor/pull/2 Patch2: vendor-pdevine-tensor-fix-cannonical-import-paths.patch BuildRequires: systemd-rpm-macros %{?sysusers_requires_compat} BuildRequires: gcc-c++ Provides: bundled(llama-cpp) = b6121 %if %{with ggml} BuildRequires: cmake BuildRequires: go-vendor-tools Recommends: ollama-ggml %if %{with rocm} BuildRequires: hipblas-devel BuildRequires: rocblas-devel BuildRequires: rocm-comgr-devel BuildRequires: rocm-runtime-devel BuildRequires: rocm-hip-devel BuildRequires: rocminfo %endif %endif %description %{common_description} %prep %goprep -A %setup -q -T -D -a1 %{forgesetupargs} %autopatch -p1 # LICENSE for convert/sentencepiece_model.proto cp %{S:3} convert/sentencepiece/LICENSE %generate_buildrequires %go_vendor_license_buildrequires -c %{S:2} %build %global gomodulesmode GO111MODULE=on export GO_LDFLAGS=" \ -X github.com/ollama/ollama/ml/backend/ggml/ggml/src.libDir=%{_libdir} \ -X github.com/ollama/ollama/discover.libDir=%{_libdir} \ -X github.com/ollama/ollama/server.mode=release" %gobuild -o %{gobuilddir}/bin/ollama %{goipath} # build ggml backends %if %{with ggml} # ggml-cpu %global ggml_preset ggml-cpu %cmake --preset "CPU" %cmake_build --target ggml-cpu # ggml-rocm %if %{with rocm} %global ggml_preset ggml-rocm-6 %cmake --preset "ROCm 6" %cmake_build --target ggml-hip %endif %endif %install %go_vendor_license_install -c %{S:2} install -m 0755 -vd %{buildroot}%{_bindir} install -m 0755 -vp %{gobuilddir}/bin/* %{buildroot}%{_bindir}/ install -m 0755 -vd %{buildroot}%{_sysconfdir}/sysconfig install -m 0644 -vp %{S:4} %{buildroot}%{_sysconfdir}/sysconfig/ollama install -m 0755 -vd %{buildroot}%{_unitdir} install -m 0644 -vp %{S:5} %{buildroot}%{_unitdir}/ollama.service install -m 0755 -vd %{buildroot}%{_sysusersdir} install -m 0644 -vp %{S:6} %{buildroot}%{_sysusersdir}/ollama.conf install -m 0755 -vd %{buildroot}%{_sharedstatedir} install -m 0755 -vd %{buildroot}%{_sharedstatedir}/ollama %if %{with ggml} # ggml-cpu %global ggml_preset ggml-cpu %cmake_install --component "CPU" # ggml-rocm %if %{with rocm} %global ggml_preset ggml-rocm-6 %cmake_install --component "HIP" %endif %endif %pre %sysusers_create_compat %{S:6} %post %systemd_post ollama.service %preun %systemd_preun ollama.service %postun %systemd_postun_with_restart ollama.service %check %go_vendor_license_check -c %{S:2} %if %{with check} %gocheck %endif %files -f %{go_vendor_license_filelist} %license vendor/modules.txt %doc %{godocs} %{_bindir}/ollama %config(noreplace) %{_sysconfdir}/sysconfig/ollama %{_unitdir}/ollama.service %{_sysusersdir}/ollama.conf %attr(-,ollama,ollama) %dir %{_sharedstatedir}/ollama %if %{with ggml} %package ggml Summary: GGML base library vendored by ollama License: MIT Requires: ollama%{?_isa} = %{version}-%{release} Provides: bundled(llama-cpp) = b6121 %description ggml %{common_description} This package contains the GGML base library vendored by ollama. %files ggml %license %{ggml_license} %{_libdir}/ollama/libggml-base.so %package ggml-cpu Summary: GGML CPU backends for ollama License: MIT Requires: ollama-ggml%{?_isa} = %{version}-%{release} Provides: bundled(llama-cpp) = b6121 Supplements: ollama-ggml%{?_isa} %description ggml-cpu %{common_description} This package contains the GGML CPU backends for ollama. %files ggml-cpu %license %{ggml_license} %{_libdir}/ollama/libggml-cpu*.so %if %{with rocm} %package ggml-rocm Summary: GGML ROCm backend for ollama License: MIT Requires: ollama-ggml%{?_isa} = %{version}-%{release} Provides: bundled(llama-cpp) = b6121 Supplements: ollama-ggml%{?_isa} if rocm-hip%{?_isa} %description ggml-rocm %{common_description} This package contains the GGML ROCm backend for ollama. %files ggml-rocm %license %{ggml_license} %{_libdir}/ollama/libggml-hip.so %endif %endif %changelog %autochangelog