Learn how to integrate Inferno with system package managers for easy distribution and installation.
This tutorial covers creating packages for:
Create inferno.rb
:
class Inferno < Formula
desc "High-performance AI inference server"
homepage "https://infernoai.cc"
url "https://github.com/ringo380/inferno/archive/v0.7.0.tar.gz"
sha256 "YOUR_SHA256_HERE"
license "MIT"
depends_on "rust" => :build
def install
system "cargo", "build", "--release"
bin.install "target/release/inferno"
end
test do
assert_match version.to_s, shell_output("#{bin}/inferno --version")
end
end
# Install locally
brew install --build-from-source ./inferno.rb
# Test installation
brew test inferno
# Audit formula
brew audit --strict inferno
mkdir -p inferno-deb/DEBIAN
mkdir -p inferno-deb/usr/local/bin
mkdir -p inferno-deb/etc/inferno
mkdir -p inferno-deb/lib/systemd/system
inferno-deb/DEBIAN/control
:
Package: inferno
Version: 0.7.0
Section: utils
Priority: optional
Architecture: amd64
Depends: libc6 (>= 2.27)
Maintainer: Your Name <[email protected]>
Description: High-performance AI inference server
Inferno is a production-ready GGUF model inference server
with GPU acceleration and OpenAI-compatible API.
inferno-deb/DEBIAN/postinst
:
#!/bin/bash
set -e
# Create user
useradd -r -s /bin/false inferno || true
# Create directories
mkdir -p /var/lib/inferno/models
mkdir -p /var/log/inferno
chown -R inferno:inferno /var/lib/inferno /var/log/inferno
# Enable systemd service
systemctl daemon-reload
systemctl enable inferno || true
exit 0
# Copy binary
cp target/release/inferno inferno-deb/usr/local/bin/
# Set permissions
chmod +x inferno-deb/DEBIAN/postinst
# Build .deb
dpkg-deb --build inferno-deb inferno_0.7.0_amd64.deb
# Install
sudo dpkg -i inferno_0.7.0_amd64.deb
inferno.spec
:
Name: inferno
Version: 0.7.0
Release: 1%{?dist}
Summary: High-performance AI inference server
License: MIT
URL: https://infernoai.cc
Source0: %{name}-%{version}.tar.gz
BuildRequires: rust cargo
Requires: systemd
%description
Inferno is a production-ready GGUF model inference server
with GPU acceleration and OpenAI-compatible API.
%prep
%setup -q
%build
cargo build --release
%install
mkdir -p %{buildroot}%{_bindir}
install -m 755 target/release/inferno %{buildroot}%{_bindir}/
mkdir -p %{buildroot}%{_unitdir}
install -m 644 inferno.service %{buildroot}%{_unitdir}/
%post
systemctl daemon-reload
systemctl enable inferno
%files
%{_bindir}/inferno
%{_unitdir}/inferno.service
%changelog
* Wed Oct 10 2024 Your Name <[email protected]> - 0.7.0-1
- Initial RPM release
# Create build directories
rpmdev-setuptree
# Copy files
cp inferno-0.7.0.tar.gz ~/rpmbuild/SOURCES/
cp inferno.spec ~/rpmbuild/SPECS/
# Build
rpmbuild -ba ~/rpmbuild/SPECS/inferno.spec
# Install
sudo rpm -i ~/rpmbuild/RPMS/x86_64/inferno-0.7.0-1.x86_64.rpm
inferno.yaml
:
PackageIdentifier: Inferno.InfernoAI
PackageVersion: 0.7.0
PackageName: Inferno AI
Publisher: Inferno AI
License: MIT
LicenseUrl: https://github.com/ringo380/inferno/blob/main/LICENSE-MIT
ShortDescription: High-performance AI inference server
PackageUrl: https://infernoai.cc
Installers:
- Architecture: x64
InstallerType: msi
InstallerUrl: https://github.com/ringo380/inferno/releases/download/v0.7.0/inferno-windows-x64.msi
InstallerSha256: YOUR_SHA256_HERE
ManifestType: singleton
ManifestVersion: 1.0.0
# Install from local manifest
winget install --manifest inferno.yaml
# Or from published package
winget install Inferno.InfernoAI
FROM rust:1.75-slim as builder
WORKDIR /build
COPY . .
RUN cargo build --release
FROM debian:bookworm-slim
RUN apt-get update && \
apt-get install -y ca-certificates && \
rm -rf /var/lib/apt/lists/*
COPY --from=builder /build/target/release/inferno /usr/local/bin/
RUN useradd -r -s /bin/false inferno && \
mkdir -p /data/models /data/cache && \
chown -R inferno:inferno /data
USER inferno
WORKDIR /data
EXPOSE 8080
ENTRYPOINT ["inferno"]
CMD ["serve", "--host", "0.0.0.0", "--port", "8080"]
# Build multi-platform
docker buildx build --platform linux/amd64,linux/arm64 \
-t ringo380/inferno:0.7.0 \
-t ringo380/inferno:latest \
--push .
# Test
docker run -p 8080:8080 ringo380/inferno:latest
.github/workflows/release.yml
:
name: Release
on:
push:
tags:
- 'v*'
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
steps:
- uses: actions/checkout@v3
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
- name: Build
run: cargo build --release
- name: Create Package
run: |
# Platform-specific packaging commands
- name: Upload Release
uses: softprops/action-gh-release@v1
with:
files: |
*.deb
*.rpm
*.msi