From 7dd9624b02ee1dac8eeb5f849255af6e5ff7bd4d Mon Sep 17 00:00:00 2001 From: John LaGrone Date: Mon, 24 Nov 2025 14:12:38 -0600 Subject: [PATCH] add ollama testing to superpod --- modules/mp/apps/testing/ollama/0.12.11.lua | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 modules/mp/apps/testing/ollama/0.12.11.lua diff --git a/modules/mp/apps/testing/ollama/0.12.11.lua b/modules/mp/apps/testing/ollama/0.12.11.lua new file mode 100644 index 00000000..35412c91 --- /dev/null +++ b/modules/mp/apps/testing/ollama/0.12.11.lua @@ -0,0 +1,22 @@ + +help([[ +Name: Ollama +Version: 0.12.11 +Website: https://ollama.com/ + +Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine + +]]) +whatis("Name: Ollama") +whatis("Version: ollama:0.12.11") +whatis("Category: Ollama") +whatis("URL: https://hub.docker.com/r/ollama/ollama") +whatis("Description: Provides access to Ollama through a container built with Apptainer") +family("Ollama") + +always_load('apptainer') +local sif_file = '/hpc/m3/containers/ollama/ollama_0.12.11.sif' + +setenv('CONTAINER_IMAGE', sif_file) +source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh") +