## START: Set by rpmautospec ## (rpmautospec version 0.7.3) ## RPMAUTOSPEC: autorelease, autochangelog %define autorelease(e:s:pb:n) %{?-p:0.}%{lua: release_number = 7; base_release_number = tonumber(rpm.expand("%{?-b*}%{!?-b:1}")); print(release_number + base_release_number - 1); }%{?-e:.%{-e*}}%{?-s:.%{-s*}}%{!?-n:%{?dist}} ## END: Set by rpmautospec %global pypi_name ramalama %global forgeurl https://github.com/containers/%{pypi_name} # see ramalama/version.py %global version0 0.2.0 %forgemeta %global summary RamaLama is a command line tool for working with AI LLM models %global _python_dist_allow_version_zero 1 Name: python-%{pypi_name} # DO NOT TOUCH the Version string! # The TRUE source of this specfile is: # https://github.com/containers/ramalama/blob/main/rpm/python-ramalama.spec # If that's what you're reading, Version must be 0, and will be updated by Packit for # copr and koji builds. # If you're reading this on dist-git, the version is automatically filled in by Packit. Version: %{version0} License: MIT Release: %{autorelease} Summary: %{summary} URL: %{forgeurl} # Tarball fetched from upstream Source: %{forgesource} BuildArch: noarch BuildRequires: git-core BuildRequires: golang BuildRequires: go-md2man BuildRequires: make BuildRequires: pyproject-rpm-macros BuildRequires: python%{python3_pkgversion}-argcomplete BuildRequires: python%{python3_pkgversion}-devel BuildRequires: python%{python3_pkgversion}-pip BuildRequires: python%{python3_pkgversion}-setuptools BuildRequires: python%{python3_pkgversion}-wheel %if 0%{?fedora} >= 40 BuildRequires: python%{python3_pkgversion}-tqdm %endif %description %summary On first run RamaLama inspects your system for GPU support, falling back to CPU support if no GPUs are present. It then uses container engines like Podman to pull the appropriate OCI image with all of the software necessary to run an AI Model for your systems setup. This eliminates the need for the user to configure the system for AI themselves. After the initialization, RamaLama will run the AI Models within a container based on the OCI image. %package -n python%{python3_pkgversion}-%{pypi_name} Requires: podman %if 0%{?fedora} >= 40 Requires: python%{python3_pkgversion}-tqdm # Needed as seen by BZ: 2327515 Requires: python%{python3_pkgversion}-omlmd %else Recommends: python%{python3_pkgversion}-tqdm Recommends: python%{python3_pkgversion}-omlmd %endif Summary: %{summary} Provides: %{pypi_name} = %{version}-%{release} %description -n python%{python3_pkgversion}-%{pypi_name} %summary On first run RamaLama inspects your system for GPU support, falling back to CPU support if no GPUs are present. It then uses container engines like Podman to pull the appropriate OCI image with all of the software necessary to run an AI Model for your systems setup. This eliminates the need for the user to configure the system for AI themselves. After the initialization, RamaLama will run the AI Models within a container based on the OCI image. %if 0%{?fedora} >= 40 %generate_buildrequires %pyproject_buildrequires %endif %prep %forgeautosetup -p1 %build %pyproject_wheel %install %pyproject_install %pyproject_save_files -l %{pypi_name} %{__make} DESTDIR=%{buildroot} PREFIX=%{_prefix} install-docs install-shortnames %{__make} DESTDIR=%{buildroot} PREFIX=%{_prefix} install-completions %check %pyproject_check_import %files -n python%{python3_pkgversion}-%{pypi_name} -f %{pyproject_files} %license LICENSE %doc README.md %{_bindir}/%{pypi_name} %{bash_completions_dir}/%{pypi_name} %{_datadir}/fish/vendor_completions.d/ramalama.fish %{_datadir}/zsh/vendor-completions/_ramalama %dir %{_datadir}/%{pypi_name} %{_datadir}/%{pypi_name}/shortnames.conf %{_datadir}/%{pypi_name}/ramalama.conf %{_mandir}/man1/ramalama*.1* %{_mandir}/man5/ramalama*.5* %changelog ## START: Generated by rpmautospec * Tue Nov 26 2024 Stephen Smoogen - 0.2.0-7 - Fix spec file to get CentOS Stream 10 building * Tue Nov 26 2024 Stephen Smoogen - 0.2.0-6 - Needed to fix to match upstream * Tue Nov 26 2024 Stephen Smoogen - 0.2.0-5 - Do manual addition of PR #4 items * Tue Nov 26 2024 Stephen Smoogen - 0.2.0-4 - Fix changes to spec file description and other items found in review * Tue Nov 26 2024 Stephen Smoogen - 0.2.0-3 - Incorporate upstream PR * Tue Nov 26 2024 Mikel Olasagasti Uranga - 0.2.0-2 - Switch to go-md2man instead of full name * Fri Nov 22 2024 Stephen Smoogen - 0.2.0-1 - Update to 0.2.0 * Wed Nov 20 2024 Stephen Smoogen - 0.1.2-1 - Update to 0.1.2 and fix bugzilla issues. * Thu Nov 14 2024 Stephen Smoogen - 0.1.1-1 - Initial pull of package ## END: Generated by rpmautospec