Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 56 additions & 0 deletions containers/ollama/build_container.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
#!/bin/bash -e
# build singularity container for Ollama
# just pull docker image from https://hub.docker.com/r/ollama/ollama

# TODO: MP should pull a CUDA enabled version instead?
# specify version
VERSION="0.12.11"
TAG=${VERSION}
IMAGE_NAME="ollama"
MODULE_FOLDER="testing/ollama"

echo "Building tag: ${TAG}"

# build the container
module purge
module load apptainer

CONTAINER_NAME=${IMAGE_NAME}_${TAG}.sif
apptainer build --fakeroot ${CONTAINER_NAME} docker://ollama/${IMAGE_NAME}:${TAG}

# move container to /hpc/{sys}/containers/
CLUSTER=$(scontrol show config | grep ClusterName | grep -oP '= \K.+')
if [ "$CLUSTER" = "nvidia" ]; then
CLUSTER="mp"
fi
mkdir -p /hpc/${CLUSTER}/containers/ollama
mv ${CONTAINER_NAME} /hpc/${CLUSTER}/containers/ollama/${CONTAINER_NAME}

mkdir -p ../../modules/${CLUSTER}/applications/${MODULE_FOLDER}
MODULE_FILE=../../modules/${CLUSTER}/applications/${MODULE_FOLDER}/${VERSION}.lua
(
sed 's/^ \{2\}//' > "$MODULE_FILE" << EOL

help([[
Name: Ollama
Version: ${TAG}
Website: https://ollama.com/

Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine

]])
whatis("Name: Ollama")
whatis("Version: ${IMAGE_NAME}:${TAG}")
whatis("Category: Ollama")
whatis("URL: https://hub.docker.com/r/ollama/${IMAGE_NAME}")
whatis("Description: Provides access to Ollama through a container built with Apptainer")
family("Ollama")

always_load('apptainer')
local sif_file = '/hpc/${CLUSTER}/containers/ollama/${CONTAINER_NAME}'

setenv('CONTAINER_IMAGE', sif_file)
source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh")

EOL
)
22 changes: 22 additions & 0 deletions modules/m3/applications/testing/ollama/0.12.11.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@

help([[
Name: Ollama
Version: 0.12.11
Website: https://ollama.com/

Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine

]])
whatis("Name: Ollama")
whatis("Version: ollama:0.12.11")
whatis("Category: Ollama")
whatis("URL: https://hub.docker.com/r/ollama/ollama")
whatis("Description: Provides access to Ollama through a container built with Apptainer")
family("Ollama")

always_load('apptainer')
local sif_file = '/hpc/m3/containers/ollama/ollama_0.12.11.sif'

setenv('CONTAINER_IMAGE', sif_file)
source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh")