Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
142 changes: 142 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
name: CI

on:
push:
branches:
- "main"
pull_request:

env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}

jobs:
lint-python:
name: Lint Python
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
cache: "pip"

- name: Run flake8
uses: py-actions/flake8@v2

validate-compute-block:
name: Validate Compute Block Config
runs-on: ubuntu-latest
needs: lint-python
steps:
- uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5

- name: Intall dependencies
run: |
pip install -r requirements.txt

- name: Check cbcs
run: |
python3 - <<'EOF'
import main

from scystream.sdk.config import load_config, get_compute_block
from scystream.sdk.config.config_loader import _compare_configs
from pathlib import Path

CBC_PATH = Path("cbc.yaml")

if not CBC_PATH.exists():
raise FileNotFoundError("cbc.yaml not found in repo root.")

block_from_code = get_compute_block()
block_from_yaml = load_config(str(CBC_PATH))

_compare_configs(block_from_code, block_from_yaml)

print("cbc.yaml matches python code definition")
EOF

run-test:
name: Run Tests
runs-on: ubuntu-latest
needs: validate-compute-block
services:
minio:
image: lazybit/minio
ports:
- 9000:9000
env:
MINIO_ROOT_USER: minioadmin
MINIO_ROOT_PASSWORD: minioadmin
options: >-
--health-cmd "curl -f http://localhost:9000/minio/health/live || exit 1"
--health-interval 5s
--health-retries 5
--health-timeout 5s
postgres:
image: postgres:15
ports:
- 5432:5432
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
options: >-
--health-cmd="pg_isready -U postgres"
--health-interval=5s
--health-retries=10
--health-timeout=5s
steps:
- uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
cache: "pip"

- name: Install dependencies
run: |
pip install -r requirements.txt

- name: Run Tests
run: pytest -vv

build:
name: Build Docker Image
runs-on: ubuntu-latest
needs: run-test
permissions:
contents: read
packages: write
steps:
- name: Checkout Repository
uses: actions/checkout@v4

- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Extract metadata for docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}/language-preprocessing
tags: |
type=ref, event=pr
type=raw, value=latest, enable=${{ (github.ref == format('refs/heads/{0}', 'main')) }}

- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

44 changes: 0 additions & 44 deletions .github/workflows/docker.yaml

This file was deleted.

76 changes: 24 additions & 52 deletions cbc.yaml
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
author: Paul Kalhorn
author: Paul Kalhorn
description: Language preprocessing for .txt or .bib files
docker_image: ghcr.io/rwth-time/language-preprocessing/language-preprocessing
entrypoints:
preprocess_bib_file:
description: Entrypoint for preprocessing a .bib file
description: Entrypoint for preprocessing an attribute of a .bib file
envs:
BIB_DOWNLOAD_PATH: /tmp/input.bib
FILTER_STOPWORDS: true
LANGUAGE: en
NGRAM_MAX: 3
NGRAM_MIN: 2
UNIGRAM_NORMALIZER: porter
UNIGRAM_NORMALIZER: lemma
USE_NGRAMS: true
inputs:
bib_input:
Expand All @@ -23,41 +24,27 @@ entrypoints:
bib_file_S3_PORT: null
bib_file_S3_SECRET_KEY: null
bib_file_SELECTED_ATTRIBUTE: Abstract
description: The bib file, aswell as one attribute selected for preprocessing
description: The bib file, aswell as one attribute selected for preprocessing
type: file
outputs:
dtm_output:
normalized_docs_output:
config:
dtm_output_BUCKET_NAME: null
dtm_output_FILE_EXT: pkl
dtm_output_FILE_NAME: null
dtm_output_FILE_PATH: null
dtm_output_S3_ACCESS_KEY: null
dtm_output_S3_HOST: null
dtm_output_S3_PORT: null
dtm_output_S3_SECRET_KEY: null
description: Numpy representation of document-term matrix as .pkl file
type: file
vocab_output:
config:
vocab_output_BUCKET_NAME: null
vocab_output_FILE_EXT: pkl
vocab_output_FILE_NAME: null
vocab_output_FILE_PATH: null
vocab_output_S3_ACCESS_KEY: null
vocab_output_S3_HOST: null
vocab_output_S3_PORT: null
vocab_output_S3_SECRET_KEY: null
description: Pkl file of a dictionary that maps all words to their index in the DTM
type: file
normalized_docs_DB_TABLE: null
normalized_docs_PG_HOST: null
normalized_docs_PG_PASS: null
normalized_docs_PG_PORT: null
normalized_docs_PG_USER: null
description: Database Output, containing bib_id aswell as the normalized text
type: pg_table
preprocess_txt_file:
description: Entrypoint to preprocess a .txt file
envs:
FILTER_STOPWORDS: true
LANGUAGE: en
NGRAM_MAX: 3
NGRAM_MIN: 2
UNIGRAM_NORMALIZER: porter
TXT_DOWNLOAD_PATH: /tmp/input.txt
UNIGRAM_NORMALIZER: lemma
USE_NGRAMS: true
inputs:
txt_input:
Expand All @@ -70,31 +57,16 @@ entrypoints:
txt_file_S3_HOST: null
txt_file_S3_PORT: null
txt_file_S3_SECRET_KEY: null
description: A .txt file
description: A .txt file, each line will be treated as a document
type: file
outputs:
dtm_output:
normalized_docs_output:
config:
dtm_output_BUCKET_NAME: null
dtm_output_FILE_EXT: pkl
dtm_output_FILE_NAME: null
dtm_output_FILE_PATH: null
dtm_output_S3_ACCESS_KEY: null
dtm_output_S3_HOST: null
dtm_output_S3_PORT: null
dtm_output_S3_SECRET_KEY: null
description: Numpy representation of document-term matrix as .pkl file
type: file
vocab_output:
config:
vocab_output_BUCKET_NAME: null
vocab_output_FILE_EXT: pkl
vocab_output_FILE_NAME: null
vocab_output_FILE_PATH: null
vocab_output_S3_ACCESS_KEY: null
vocab_output_S3_HOST: null
vocab_output_S3_PORT: null
vocab_output_S3_SECRET_KEY: null
description: Pkl file of a dictionary that maps all words to their index in the DTM
type: file
normalized_docs_DB_TABLE: null
normalized_docs_PG_HOST: null
normalized_docs_PG_PASS: null
normalized_docs_PG_PORT: null
normalized_docs_PG_USER: null
description: Database Output, containing bib_id aswell as the normalized text
type: pg_table
name: Language-Preprocessing
15 changes: 9 additions & 6 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,16 @@ services:
ports:
- "9000:9000"
- "9001:9001"
networks:
- scystream-net

networks:
scystream-net:
driver: bridge
postgres:
image: postgres:13
container_name: postgres
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DB=postgres
ports:
- "5432:5432"

volumes:
minio_data:
search_query:
Loading
Loading