#!/sbin/openrc-run description="HTTP Server for LLM inference" command=/usr/bin/llama-server : ${command_user:=llama-server:llama-server} start_pre() { if [ -z "${command_args}" ]; then eerror "command_args not specified in /etc/conf.d/llama-server" return 1 fi } no_new_privs="yes" supervisor="supervise-daemon"