diff --git a/containers/ollama/build_container.sh b/containers/ollama/build_container.sh new file mode 100755 index 00000000..3507827c --- /dev/null +++ b/containers/ollama/build_container.sh @@ -0,0 +1,56 @@ +#!/bin/bash -e +# build singularity container for Ollama +# just pull docker image from https://hub.docker.com/r/ollama/ollama + +# TODO: MP should pull a CUDA enabled version instead? +# specify version +VERSION="0.12.11" +TAG=${VERSION} +IMAGE_NAME="ollama" +MODULE_FOLDER="testing/ollama" + +echo "Building tag: ${TAG}" + +# build the container +module purge +module load apptainer + +CONTAINER_NAME=${IMAGE_NAME}_${TAG}.sif +apptainer build --fakeroot ${CONTAINER_NAME} docker://ollama/${IMAGE_NAME}:${TAG} + +# move container to /hpc/{sys}/containers/ +CLUSTER=$(scontrol show config | grep ClusterName | grep -oP '= \K.+') +if [ "$CLUSTER" = "nvidia" ]; then + CLUSTER="mp" +fi +mkdir -p /hpc/${CLUSTER}/containers/ollama +mv ${CONTAINER_NAME} /hpc/${CLUSTER}/containers/ollama/${CONTAINER_NAME} + +mkdir -p ../../modules/${CLUSTER}/applications/${MODULE_FOLDER} +MODULE_FILE=../../modules/${CLUSTER}/applications/${MODULE_FOLDER}/${VERSION}.lua +( +sed 's/^ \{2\}//' > "$MODULE_FILE" << EOL + +help([[ +Name: Ollama +Version: ${TAG} +Website: https://ollama.com/ + +Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine + +]]) +whatis("Name: Ollama") +whatis("Version: ${IMAGE_NAME}:${TAG}") +whatis("Category: Ollama") +whatis("URL: https://hub.docker.com/r/ollama/${IMAGE_NAME}") +whatis("Description: Provides access to Ollama through a container built with Apptainer") +family("Ollama") + +always_load('apptainer') +local sif_file = '/hpc/${CLUSTER}/containers/ollama/${CONTAINER_NAME}' + +setenv('CONTAINER_IMAGE', sif_file) +source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh") + +EOL +) diff --git a/modules/m3/applications/testing/ollama/0.12.11.lua b/modules/m3/applications/testing/ollama/0.12.11.lua new file mode 100644 index 00000000..35412c91 --- /dev/null +++ b/modules/m3/applications/testing/ollama/0.12.11.lua @@ -0,0 +1,22 @@ + +help([[ +Name: Ollama +Version: 0.12.11 +Website: https://ollama.com/ + +Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine + +]]) +whatis("Name: Ollama") +whatis("Version: ollama:0.12.11") +whatis("Category: Ollama") +whatis("URL: https://hub.docker.com/r/ollama/ollama") +whatis("Description: Provides access to Ollama through a container built with Apptainer") +family("Ollama") + +always_load('apptainer') +local sif_file = '/hpc/m3/containers/ollama/ollama_0.12.11.sif' + +setenv('CONTAINER_IMAGE', sif_file) +source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh") +