diff --git a/files/usegalaxy.cz/tpv_rules_local.yml b/files/usegalaxy.cz/tpv_rules_local.yml deleted file mode 100644 index 525c9788..00000000 --- a/files/usegalaxy.cz/tpv_rules_local.yml +++ /dev/null @@ -1,2 +0,0 @@ -global: - default_inherits: default diff --git a/files/usegalaxy.cz/tpv_rules_local.yml.j2 b/files/usegalaxy.cz/tpv_rules_local.yml.j2 new file mode 100644 index 00000000..75a61662 --- /dev/null +++ b/files/usegalaxy.cz/tpv_rules_local.yml.j2 @@ -0,0 +1,70 @@ +global: + default_inherits: default +destinations: + tpv_pulsar_rosettafold: + inherits: tpv_pulsar + runner: pulsar_tpv_runner + params: + singularity_enabled: false + submit_native_specification: "-l select=1:ncpus={int(cores)}:mem={int(mem)}gb:scratch_local={int(scratch)}gb:ngpus={int(gpus)}:gpu_mem={int(gpu_mem)}gb -l walltime={int(walltime)}:00:00 -q {{ pulsar.pbs_gpu_queue }} -N {{ pulsar.nfs_prefix }}_j{job.id}__{tool.id if '/' not in tool.id else tool.id.split('/')[-2]+'_v'+tool.id.split('/')[-1]}__{user.username if user and hasattr(user, 'username') else 'anonymous'}" + scheduling: + require: + - rosettafold + tpv_pulsar_rosettafold_sing: + inherits: tpv_pulsar_rosettafold + container_resolvers: + - type: explicit_singularity + - cache_directory: /cvmfs/singularity.metacentrum.cz/RoseTTAFold2/ + type: cached_mulled_singularity + params: + singularity_enabled: true + singularity_run_extra_arguments: '--nv --env SCRATCHDIR="$SCRATCHDIR" --env SCRATCH=$SCRATCHDIR' + singularity_volumes: '$job_directory:ro,$tool_directory:ro,$job_directory/outputs:rw,$working_directory:rw,$SCRATCHDIR,/scratch.ssd/galaxyeu/permanent/rosettafold_data/bfd:/opt/RoseTTAFold2/bfd,/scratch.ssd/galaxyeu/permanent/rosettafold_data/pdb100_2021Mar03:/opt/RoseTTAFold2/pdb100_2021Mar03,/scratch.ssd/galaxyeu/permanent/rosettafold_data/UniRef30_2020_06:/opt/RoseTTAFold2/UniRef30_2020_06,/scratch.ssd/galaxyeu/permanent/rosettafold_data/weights:/opt/RoseTTAFold2/network/weights' + singularity_default_container_id: "/cvmfs/singularity.metacentrum.cz/RoseTTAFold2/rosettafold_image.sif" + scheduling: + require: + - singularity + tpv_pulsar_alphafold: + inherits: tpv_pulsar + max_accepted_gpus: 1 + max_gpus: 1 + runner: pulsar_tpv_runner + params: + singularity_run_extra_arguments: '--nv' + singularity_volumes: '$job_directory:ro,$tool_directory:ro,$job_directory/outputs:rw,$working_directory:rw,$SCRATCHDIR,$ALPHAFOLD_DB:/data/2.3:ro' + submit_native_specification: "-l select=1:ncpus={int(cores)}:mem={int(mem)}gb:scratch_local={int(scratch)}gb:ngpus={int(gpus)}:gpu_mem={int(gpu_mem)}gb -l walltime={int(walltime)}:00:00 -q {{ pulsar.pbs_gpu_queue }} -N {{ pulsar.nfs_prefix }}_j{job.id}__{tool.id if '/' not in tool.id else tool.id.split('/')[-2]+'_v'+tool.id.split('/')[-1]}__{user.username if user and hasattr(user, 'username') else 'anonymous'}" + scheduling: + require: + - alphafold +tools: + testing_rosettafold2: + cores: 8 + mem: 64 + gpus: 1 + context: + walltime: 24 + scratch: 25 + gpu_mem: 16 + scheduling: + require: + - rosettafold + rosettafold2: + inherits: testing_rosettafold2 + scheduling: + require: + - singularity + toolshed.g2.bx.psu.edu/repos/galaxy-australia/alphafold2/alphafold/.*: + cores: 8 + mem: 120 + gpus: 1 + context: + walltime: 24 + scratch: 100 + gpu_mem: 16 + env: + MPLCONFIGDIR: "$SCRATCHDIR" + ALPHAFOLD_DB: "/scratch.ssd/galaxyeu/permanent/alphafold.db" + ALPHAFOLD_USE_GPU: True + scheduling: + require: + - alphafold diff --git a/host_vars/usegalaxy.cz/vars.yml b/host_vars/usegalaxy.cz/vars.yml index 8316bc0f..0f758910 100644 --- a/host_vars/usegalaxy.cz/vars.yml +++ b/host_vars/usegalaxy.cz/vars.yml @@ -28,8 +28,6 @@ pulsar: galaxy_config_files: - src: files/galaxy/themes.yml dest: "{{ galaxy_config.galaxy.themes_config_file }}" - - src: "{{ lookup('first_found', ['files/'+inventory_hostname+'/tpv_rules_local.yml', 'files/galaxy/config/tpv_rules_local.yml']) }}" - dest: "{{ tpv_mutable_dir }}/tpv_rules_{{ inventory_hostname }}.yml" - src: "{{ lookup('first_found', ['files/'+inventory_hostname+'/config/tool_conf.xml', 'files/galaxy/config/tool_conf.xml']) }}" dest: "{{ galaxy_config_dir }}/tool_conf.xml" - src: files/galaxy/config/oidc_config.xml @@ -41,6 +39,22 @@ galaxy_config_files: - src: files/{{ inventory_hostname }}/user_preferences_extra_conf.yml dest: "{{ galaxy_config_dir }}/user_preferences_extra_conf.yml" +galaxy_config_templates: + - src: "templates/{{ inventory_hostname }}/config/object_store_conf.xml.j2" + dest: "{{ galaxy_config.galaxy.object_store_config_file }}" + - src: "templates/{{ inventory_hostname }}/config/local_tool_conf.xml.j2" + dest: "{{ galaxy_config_dir }}/local_tool_conf.xml" + - src: templates/galaxy/config/tpv_rules_meta.yml.j2 + dest: "{{ tpv_mutable_dir }}/tpv_rules_meta.yml" + - src: "templates/galaxy/config/vault_conf.yml.j2" + dest: "{{ galaxy_config_dir }}/vault_conf.yml" + - src: "{{ lookup('first_found', ['templates/'+inventory_hostname+'/config/auth_conf.xml.j2', 'templates/galaxy/config/auth_conf.xml.j2']) }}" + dest: "{{ galaxy_config.galaxy.auth_config_file }}" + - src: "{{ lookup('first_found', ['templates/'+inventory_hostname+'/config/file_source_templates.yml.j2', 'templates/galaxy/config/file_source_templates.yml.j2']) }}" + dest: "{{ galaxy_config_dir }}/file_source_templates.yml" + - src: "{{ lookup('first_found', ['files/'+inventory_hostname+'/tpv_rules_local.yml.j2', 'files/galaxy/config/tpv_rules_local.yml']) }}" + dest: "{{ tpv_mutable_dir }}/tpv_rules_{{ inventory_hostname }}.yml" + galaxy_local_tools: - testing.xml - testing_pbs.xml diff --git a/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/ci.yml b/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/ci.yml new file mode 100644 index 00000000..b4f1e997 --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/ci.yml @@ -0,0 +1,31 @@ +--- +name: Ansible-lint + +'on': + push: + +defaults: + run: + working-directory: 'ansible-tpv-lint' + +jobs: + ansible-lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + path: 'ansible-tpv-lint' + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.9' + cache: 'pip' + - name: Install dependencies + run: | + pip install --upgrade pip + pip install ansible ansible-lint + - name: Lint playbook + run: | + if ! ansible-lint; then + exit 1 + fi \ No newline at end of file diff --git a/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/import.yml b/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/import.yml new file mode 100644 index 00000000..92df7a79 --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/import.yml @@ -0,0 +1,38 @@ +--- +# This workflow requires a GALAXY_API_KEY secret present in the GitHub +# repository or organization. +# +# See: https://github.com/marketplace/actions/publish-ansible-role-to-galaxy +# See: https://github.com/ansible/galaxy/issues/46 + +name: "Ansible-Galaxy import" + +on: + workflow_dispatch: + +jobs: + release: + name: Release + runs-on: ubuntu-latest + steps: + - name: Check out the codebase. + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Set up Python 3. + uses: actions/setup-python@v3 + with: + python-version: '3.x' + + - name: Install Ansible. + run: pip3 install ansible-base + + # We have to do this step as GHA prevents triggering it's own actions, to + # prevent runaway loops. + - name: Trigger a new import on Galaxy. + run: | + org=$(echo ${{ github.repository }} | cut -d/ -f1) + repo=$(echo ${{ github.repository }} | cut -d/ -f2) + key=${{ secrets.ANSIBLE_GALAXY_API_KEY }} + ansible-galaxy role import --api-key $key $org $repo --branch main diff --git a/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/release.yml b/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/release.yml new file mode 100644 index 00000000..a7a15da7 --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/release.yml @@ -0,0 +1,32 @@ +--- +# This workflow requires a GALAXY_API_KEY secret present in the GitHub +# repository or organization. +# +# See: https://github.com/marketplace/actions/publish-ansible-role-to-galaxy +# See: https://github.com/ansible/galaxy/issues/46 + +name: Release + +'on': + push: + tags: + - '*' + +jobs: + release: + name: Release + runs-on: ubuntu-latest + steps: + - name: Check out the codebase. + uses: actions/checkout@v2 + + - name: Set up Python 3. + uses: actions/setup-python@v2 + with: + python-version: '3.x' + + - name: Install Ansible. + run: pip3 install ansible-base + + - name: Trigger a new import on Galaxy. + run: ansible-galaxy role import --api-key ${{ secrets.ANSIBLE_GALAXY_API_KEY }} $(echo ${{ github.repository }} | cut -d/ -f1) $(echo ${{ github.repository }} | cut -d/ -f2) --branch main --role-name tpv_auto_lint diff --git a/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/slugger.yml b/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/slugger.yml new file mode 100644 index 00000000..45bd5b33 --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/.github/workflows/slugger.yml @@ -0,0 +1,72 @@ +--- +# This workflow requires a GALAXY_API_KEY secret present in the GitHub +# repository or organization. +# +# See: https://github.com/marketplace/actions/publish-ansible-role-to-galaxy +# See: https://github.com/ansible/galaxy/issues/46 +# +# Note on the file name: +# Reminding me (@hexylena) to make a release of a role was the last thing +# @Slugger70 asked me our group chat. I'd forgotten to do it and he was +# waiting on me for it, well, here's to you mate, none of us can forget to +# make a point release again. + +name: "Automatic Regular Releases" + +on: + workflow_dispatch: + schedule: + - cron: '0 0 * * 1' + +jobs: + release: + name: Release + runs-on: ubuntu-latest + steps: + - name: Check out the codebase. + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Set up Python 3. + uses: actions/setup-python@v3 + with: + python-version: '3.x' + + - name: Install Ansible. + run: pip3 install ansible-base + + - name: Check for changes + run: | + LATEST_TAG=$(git describe --tags --abbrev=0) + echo "The last released tag was ${LATEST_TAG}" + CHANGES=$(git diff ${LATEST_TAG} --name-only | wc -l) + echo "Found ${CHANGES} changed files" + git diff ${LATEST_TAG} --name-only + echo "changed_files=${CHANGES}" >> $GITHUB_ENV + + - name: Create a new git tag + run: | + LATEST_TAG=$(git describe --tags --abbrev=0) + major_minor=$(echo "$LATEST_TAG" | sed 's/\(.*\..*\.\)\(.*\)/\1/') + patch=$(echo "$LATEST_TAG" | sed 's/\(.*\..*\.\)\(.*\)/\2/') + newpatch=$(echo "$patch + 1" | bc) + NEW_TAG="${major_minor}${newpatch}" + echo "$LATEST_TAG -> $NEW_TAG" + + git config user.name github-actions + git config user.email github-actions@github.com + git tag "$NEW_TAG" + git push --tags + echo "Creating new tag $NEW_TAG" >> $GITHUB_STEP_SUMMARY + if: env.changed_files > 0 + + # We have to do this step as GHA prevents triggering it's own actions, to + # prevent runaway loops. + - name: Trigger a new import on Galaxy. + run: | + org=$(echo ${{ github.repository }} | cut -d/ -f1) + repo=$(echo ${{ github.repository }} | cut -d/ -f2) + key=${{ secrets.ANSIBLE_GALAXY_API_KEY }} + ansible-galaxy role import --api-key $key $org $repo --branch main + if: env.changed_files > 0 diff --git a/roles/usegalaxy_eu.tpv_auto_lint/README.md b/roles/usegalaxy_eu.tpv_auto_lint/README.md new file mode 100644 index 00000000..d72e2e07 --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/README.md @@ -0,0 +1,17 @@ +# TPV Auto Lint +Ansible role to create a script that automatically lints all YAML files in the `tpv_mutable_dir` and checks for their existence in Galaxy's `job_conf.yml`. If both applies, it copies the file to the TPV rules directory, updating existing files. +This leads to a automatic reload, if Galaxy watches this directory. +With this script, TPV in Galaxy's job handlers can't break anymore. Which would otherwise lead to mostly undetected destination mapping failures. + +## Requirements + - Ansible >= 2.11 + - [galaxyproject.galaxy](https://galaxy.ansible.com/galaxyproject/galaxy) for TPV and the `galaxy_*` vars + +## Role Variables +See [defaults/main](./default/main.yml) or galaxy's [default/main](https://github.com/galaxyproject/ansible-galaxy/blob/main/defaults/main.yml) +## Playbook Example +Include role in your Galaxyserver Playbook **after** the galaxyproject.galaxy role (the dirs have to exist already) +## License +GPLv3 +## Author Information +[Galaxy Europe](https://galaxyproject.org/eu/) \ No newline at end of file diff --git a/roles/usegalaxy_eu.tpv_auto_lint/defaults/main.yml b/roles/usegalaxy_eu.tpv_auto_lint/defaults/main.yml new file mode 100644 index 00000000..fe1413e0 --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/defaults/main.yml @@ -0,0 +1,6 @@ +tpv_mutable_dir: "{{ galaxy_mutable_data_dir }}/total_perspective_vortex" +tpv_config_dir_name: TPV_DO_NOT_TOUCH +tpv_config_dir: "{{ galaxy_config_dir }}/{{ tpv_config_dir_name }}" +galaxy_job_config_file: "{{ galaxy_config_dir }}/job_conf.yml" +tpv_privsep: false +tpv_env_name: tpv_dispatcher diff --git a/roles/usegalaxy_eu.tpv_auto_lint/meta/.galaxy_install_info b/roles/usegalaxy_eu.tpv_auto_lint/meta/.galaxy_install_info new file mode 100644 index 00000000..d4cab97a --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/meta/.galaxy_install_info @@ -0,0 +1,2 @@ +install_date: 'Fri 23 May 2025 09:39:42 PM ' +version: 0.4.4 diff --git a/roles/usegalaxy_eu.tpv_auto_lint/meta/main.yml b/roles/usegalaxy_eu.tpv_auto_lint/meta/main.yml new file mode 100644 index 00000000..5e5890f0 --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/meta/main.yml @@ -0,0 +1,16 @@ +galaxy_info: + author: Mira Kuntz + company: Galaxy Europe + namespace: usegalaxy_eu + role_name: tpv_auto_lint + description: Script that lints TPV rule files and checks if they exist in \ + Galaxy's job_conf.yml before copying them to the TPC rules folder + license: GPL-3.0 + min_ansible_version: "2.1" + galaxy_tags: + - galaxyproject + platforms: + - name: EL + versions: + - "8" + - "9" diff --git a/roles/usegalaxy_eu.tpv_auto_lint/requirements.txt b/roles/usegalaxy_eu.tpv_auto_lint/requirements.txt new file mode 100644 index 00000000..0e5c220a --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/requirements.txt @@ -0,0 +1,2 @@ +ansible-core +ansible-lint diff --git a/roles/usegalaxy_eu.tpv_auto_lint/tasks/main.yml b/roles/usegalaxy_eu.tpv_auto_lint/tasks/main.yml new file mode 100644 index 00000000..c76b6034 --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/tasks/main.yml @@ -0,0 +1,18 @@ +--- +- name: Create TPV mutable dir + ansible.builtin.file: + state: directory + path: "{{ tpv_mutable_dir }}" + mode: 0755 + +- name: Copy TPV lint-and-copy-script + ansible.builtin.template: + src: tpv-lint-and-copy.sh.j2 + dest: "{{ tpv_mutable_dir }}/tpv-lint-and-copy.sh" + owner: root + group: root + mode: 0750 + +- name: Execute TPV lint-and-copy-script + ansible.builtin.command: + cmd: "{{ tpv_mutable_dir }}/tpv-lint-and-copy.sh" diff --git a/roles/usegalaxy_eu.tpv_auto_lint/templates/tpv-lint-and-copy.sh.j2 b/roles/usegalaxy_eu.tpv_auto_lint/templates/tpv-lint-and-copy.sh.j2 new file mode 100644 index 00000000..8612c6b8 --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/templates/tpv-lint-and-copy.sh.j2 @@ -0,0 +1,37 @@ +#!/bin/bash +# Script that copies files to TPV config dir, only if linting is successful + +PYTHONPATH="{{ galaxy_root }}/server/lib" +VENV="{{ galaxy_venv_dir }}" +TPV_MUTABLE_DIR="{{ tpv_mutable_dir }}" +TPV_DIR="{{ tpv_config_dir }}" +TPV_DIR_NAME="{{ tpv_config_dir_name }}" +JOB_CONFIG_FILE="{{ galaxy_job_config_file }}" +TPV_ENV_NAME="{{ tpv_env_name }}" + +echo "Installing TPV version 3.1.3 and yq" +$VENV/bin/pip install total-perspective-vortex==3.1.3 yq +echo "Activating the Galaxy VENV" +. $VENV/bin/activate + +LOCAL_TPV_CONFIGS="$(yq -c '.galaxy.job_config.execution.environments.'${TPV_ENV_NAME}'.tpv_config_files[]' $JOB_CONFIG_FILE | grep -v '^\"http' | xargs basename -a | xargs -I {} echo \"${TPV_MUTABLE_DIR}/{}\" | xargs)" +echo "Found the following local TPV configs: $LOCAL_TPV_CONFIGS" + +if PYTHONPATH=$PYTHONPATH tpv lint $LOCAL_TPV_CONFIGS; then + echo "lint successful, checking job configuration for file(s): $LOCAL_TPV_CONFIGS ..." + for f in $LOCAL_TPV_CONFIGS; do + FNAME=$(basename "$f") + if grep -q "$TPV_DIR_NAME/$FNAME" "$JOB_CONFIG_FILE"; then + echo "$FNAME is present in job configuration, copying..." + [[ $(type -t cp) == "alias" ]] && unalias cp + cp -bu "$f" "$TPV_DIR/$FNAME" {% if tpv_privsep %}&& chown root:{{ __galaxy_user_group }} "$TPV_DIR/$FNAME" {% endif %} + + else + echo "$FNAME is not present in job configuration, exiting..." + exit 3 && true + fi + done +else + echo "lint failed, file(s): $LOCAL_TPV_CONFIGS - not copied" + exit 3 && true +fi diff --git a/roles/usegalaxy_eu.tpv_auto_lint/tpv-lint-and-copy.sh b/roles/usegalaxy_eu.tpv_auto_lint/tpv-lint-and-copy.sh new file mode 100644 index 00000000..c16463b0 --- /dev/null +++ b/roles/usegalaxy_eu.tpv_auto_lint/tpv-lint-and-copy.sh @@ -0,0 +1,26 @@ +#!/bin/bash +# Script that copies files to TPV config dir, only if linting is successful + +PYTHONPATH="/opt/galaxy/server/lib" +VENV="/opt/galaxy/venv" +TPV_DIR="/opt/galaxy/config/total_perspective_vortex" +GALAXY_CONF_DIR="/opt/galaxy/config" + +. $VENV/bin/activate + +for f in *.yml; do + if PYTHONPATH=$PYTHONPATH tpv lint "$f"; then + echo "lint successful, checking job_conf..." + if grep -q "/total_perspective_vortex/$f" "$GALAXY_CONF_DIR/job_conf.yml"; then + echo "$f is present in job_conf, copying..." + [[ $(type -t cp) == "alias" ]] && unalias cp + cp -bu "$f" "$TPV_DIR/$f" + else + echo "$f is not present in job_conf, exiting..." + exit 3 && true + fi + else + echo "lint failed, '$f' was not copied" + exit 3 && true + fi +done diff --git a/templates/galaxy/config/tpv_rules_meta.yml.j2 b/templates/galaxy/config/tpv_rules_meta.yml.j2 index bf34287b..84ac2f1c 100644 --- a/templates/galaxy/config/tpv_rules_meta.yml.j2 +++ b/templates/galaxy/config/tpv_rules_meta.yml.j2 @@ -117,37 +117,6 @@ tools: toolshed.g2.bx.psu.edu/repos/devteam/clustalw/clustalw/.*: context: walltime: 48 - testing_rosettafold2: - cores: 8 - mem: 64 - gpus: 1 - context: - walltime: 24 - scratch: 25 - gpu_mem: 16 - scheduling: - require: - - rosettafold - rosettafold2: - inherits: testing_rosettafold2 - scheduling: - require: - - singularity - toolshed.g2.bx.psu.edu/repos/galaxy-australia/alphafold2/alphafold/.*: - cores: 8 - mem: 120 - gpus: 1 - context: - walltime: 24 - scratch: 100 - gpu_mem: 16 - env: - MPLCONFIGDIR: "$SCRATCHDIR" - ALPHAFOLD_DB: "/scratch.ssd/galaxyeu/permanent/alphafold.db" - ALPHAFOLD_USE_GPU: True - scheduling: - require: - - alphafold roles: training.*: @@ -294,38 +263,3 @@ destinations: scheduling: require: - nasty-java - tpv_pulsar_rosettafold: - inherits: tpv_pulsar - runner: pulsar_tpv_runner - params: - singularity_enabled: false - submit_native_specification: "-l select=1:ncpus={int(cores)}:mem={int(mem)}gb:scratch_local={int(scratch)}gb:ngpus={int(gpus)}:gpu_mem={int(gpu_mem)}gb -l walltime={int(walltime)}:00:00 -q galaxy_gpu@pbs-m1.metacentrum.cz -N pulsar_cz_j{job.id}__{tool.id if '/' not in tool.id else tool.id.split('/')[-2]+'_v'+tool.id.split('/')[-1]}__{user.username if user and hasattr(user, 'username') else 'anonymous'}" - scheduling: - require: - - rosettafold - tpv_pulsar_rosettafold_sing: - inherits: tpv_pulsar_rosettafold - container_resolvers: - - type: explicit_singularity - - cache_directory: /cvmfs/singularity.metacentrum.cz/RoseTTAFold2/ - type: cached_mulled_singularity - params: - singularity_enabled: true - singularity_run_extra_arguments: '--nv --env SCRATCHDIR="$SCRATCHDIR" --env SCRATCH=$SCRATCHDIR' - singularity_volumes: '$job_directory:ro,$tool_directory:ro,$job_directory/outputs:rw,$working_directory:rw,$SCRATCHDIR,/scratch.ssd/galaxyeu/permanent/rosettafold_data/bfd:/opt/RoseTTAFold2/bfd,/scratch.ssd/galaxyeu/permanent/rosettafold_data/pdb100_2021Mar03:/opt/RoseTTAFold2/pdb100_2021Mar03,/scratch.ssd/galaxyeu/permanent/rosettafold_data/UniRef30_2020_06:/opt/RoseTTAFold2/UniRef30_2020_06,/scratch.ssd/galaxyeu/permanent/rosettafold_data/weights:/opt/RoseTTAFold2/network/weights' - singularity_default_container_id: "/cvmfs/singularity.metacentrum.cz/RoseTTAFold2/rosettafold_image.sif" - scheduling: - require: - - singularity - tpv_pulsar_alphafold: - inherits: tpv_pulsar - max_accepted_gpus: 1 - max_gpus: 1 - runner: pulsar_tpv_runner - params: - singularity_run_extra_arguments: '--nv' - singularity_volumes: '$job_directory:ro,$tool_directory:ro,$job_directory/outputs:rw,$working_directory:rw,$SCRATCHDIR,$ALPHAFOLD_DB:/data/2.3:ro' - submit_native_specification: "-l select=1:ncpus={int(cores)}:mem={int(mem)}gb:scratch_local={int(scratch)}gb:ngpus={int(gpus)}:gpu_mem={int(gpu_mem)}gb -l walltime={int(walltime)}:00:00 -q {{ pulsar.pbs_gpu_queue }} -N {{ pulsar.nfs_prefix }}_j{job.id}__{tool.id if '/' not in tool.id else tool.id.split('/')[-2]+'_v'+tool.id.split('/')[-1]}__{user.username if user and hasattr(user, 'username') else 'anonymous'}" - scheduling: - require: - - alphafold diff --git a/templates/nginx/galaxy.j2 b/templates/nginx/galaxy.j2 index 5e96335f..fbb7a65c 100644 --- a/templates/nginx/galaxy.j2 +++ b/templates/nginx/galaxy.j2 @@ -90,18 +90,6 @@ server { client_max_body_size 0; proxy_pass http://localhost:{{ galaxy_tusd_port }}/files; } -{% block tus_hook %} - {% if csnt_galaxy_url_prefix != '' %} - #DEMON: this piece of code shouldn't definitely be on instance without special galaxy_url_prefix! - #DEMON: hopefully just a temporary hack, need to find out how to configure TUSd hook via ansible role - #DEMON: Not sure if we still need this - location /api/upload/hooks { - rewrite ^/api/upload/hooks(.*)$ {{ csnt_galaxy_url_prefix }}/api/upload/hooks$1 break; - proxy_pass https://{{ inventory_hostname }}; - } - {% endif %} -{% endblock tus_hook %} - # Static files can be more efficiently served by Nginx. Why send the # request to Gunicorn which should be spending its time doing more useful