From 4c9a4945fae2bb34f5aa775370cab273f92ead7c Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 17:34:12 +0100 Subject: [PATCH 01/13] Upgrade dependencies, add linters and tests --- .ansible-lint | 71 ------ .../container-image-build-validation.yaml | 8 +- .github/workflows/pre-commit.yaml | 2 +- .github/workflows/release-from-label.yaml | 2 +- .../workflows/release-label-validation.yaml | 2 +- .github/workflows/test.yaml | 35 +++ .linter-config/.checkov.yml | 15 -- .pre-commit-config.yaml | 42 +++- .tool-versions | 3 +- Dockerfile | 24 +- README.md | 46 +++- azure-pipelines/dsb2pushover.yml | 2 +- requirements.txt | 6 +- src/app/dsb2pushover.py | 237 ++++++++++-------- .../site-packages/dsbapi/__init__.py | 72 +++--- tests/__init__.py | 0 16 files changed, 309 insertions(+), 258 deletions(-) delete mode 100644 .ansible-lint create mode 100644 .github/workflows/test.yaml delete mode 100644 .linter-config/.checkov.yml rename src/usr/lib/{python3.11 => python3.12}/site-packages/dsbapi/__init__.py (78%) create mode 100644 tests/__init__.py diff --git a/.ansible-lint b/.ansible-lint deleted file mode 100644 index da760fc..0000000 --- a/.ansible-lint +++ /dev/null @@ -1,71 +0,0 @@ -# .ansible-lint -exclude_paths: - - .cache/ # implicit unless exclude_paths is defined in config - - .github/ -# parseable: true -# quiet: true -# verbosity: 1 - -# Mock modules or roles in order to pass ansible-playbook --syntax-check -mock_modules: - - zuul_return - # note the foo.bar is invalid as being neither a module or a collection - - fake_namespace.fake_collection.fake_module - - fake_namespace.fake_collection.fake_module.fake_submodule -mock_roles: - - mocked_role - - author.role_name # old standalone galaxy role - - fake_namespace.fake_collection.fake_role # role within a collection - -# Enable checking of loop variable prefixes in roles -loop_var_prefix: "{role}_" - -# Enforce variable names to follow pattern below, in addition to Ansible own -# requirements, like avoiding python identifiers. To disable add `var-naming` -# to skip_list. -# var_naming_pattern: "^[a-z_][a-z0-9_]*$" - -use_default_rules: true -# Load custom rules from this specific folder -# rulesdir: -# - ./rule/directory/ - -# This makes linter to fully ignore rules/tags listed below -skip_list: - - skip_this_tag - - git-latest - - command-instead-of-shell # Use shell only when shell functionality is required. - - no-changed-when - - no-handler - - ignore-errors - -# Any rule that has the 'opt-in' tag will not be loaded unless its 'id' is -# mentioned in the enable_list: -enable_list: - - fqcn-builtins # opt-in - - no-log-password # opt-in - - no-same-owner # opt-in - # add yaml here if you want to avoid ignoring yaml checks when yamllint - # library is missing. Normally its absence just skips using that rule. - - yaml -# Report only a subset of tags and fully ignore any others -# tags: -# - var-spacing - -# This makes the linter display but not fail for rules/tags listed below: -warn_list: - - skip_this_tag - - git-latest - - experimental # experimental is included in the implicit list - # - role-name - -# Offline mode disables installation of requirements.yml -offline: false - -# Define required Ansible's variables to satisfy syntax check -extra_vars: - foo: bar - multiline_string_variable: | - line1 - line2 - complex_variable: ":{;\t$()" diff --git a/.github/workflows/container-image-build-validation.yaml b/.github/workflows/container-image-build-validation.yaml index 59ebf0b..242ad8f 100644 --- a/.github/workflows/container-image-build-validation.yaml +++ b/.github/workflows/container-image-build-validation.yaml @@ -18,7 +18,7 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - name: Test build - uses: docker/build-push-action@v4.1.0 + uses: docker/build-push-action@v4 with: push: false load: false @@ -26,7 +26,7 @@ jobs: platforms: linux/amd64, linux/arm64 tags: container-build:test - name: Test build and export for further validation - uses: docker/build-push-action@v4.1.0 + uses: docker/build-push-action@v4 with: push: false load: true @@ -34,14 +34,14 @@ jobs: tags: container-build:test outputs: type=docker,dest=/tmp/container.tar - name: Upload container image as artifact - uses: actions/upload-artifact@v3.1.2 + uses: actions/upload-artifact@v3 with: name: container-build path: /tmp/container.tar scan: name: Container vulnerability scan needs: container-build - uses: cybcon/github_workflows/.github/workflows/container-vulnerability-scan.yaml@v1.1.10 + uses: cybcon/github_workflows/.github/workflows/container-vulnerability-scan.yaml@v1.4.0 with: image_name: container-build:test image_artifact_filename: container.tar diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index 9b14ed9..32d7f81 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -6,4 +6,4 @@ on: - main jobs: pre-commit: - uses: cybcon/github_workflows/.github/workflows/pre-commit.yaml@v1.1.10 + uses: cybcon/github_workflows/.github/workflows/pre-commit.yaml@v1.4.0 diff --git a/.github/workflows/release-from-label.yaml b/.github/workflows/release-from-label.yaml index bc24c07..714ed98 100644 --- a/.github/workflows/release-from-label.yaml +++ b/.github/workflows/release-from-label.yaml @@ -5,4 +5,4 @@ on: - closed jobs: release: - uses: cybcon/github_workflows/.github/workflows/release-from-label.yaml@v1.1.10 + uses: cybcon/github_workflows/.github/workflows/release-from-label.yaml@v1.4.0 diff --git a/.github/workflows/release-label-validation.yaml b/.github/workflows/release-label-validation.yaml index b85a46a..3ad8986 100644 --- a/.github/workflows/release-label-validation.yaml +++ b/.github/workflows/release-label-validation.yaml @@ -10,4 +10,4 @@ on: - unlabeled jobs: release-label-validation: - uses: cybcon/github_workflows/.github/workflows/release-label-validation.yaml@v1.1.10 + uses: cybcon/github_workflows/.github/workflows/release-label-validation.yaml@v1.4.0 diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..19df923 --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,35 @@ +name: Run tests + +on: + pull_request: + push: + branches: [ main ] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12"] + + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install ruff pytest + cd src/ && pip install -r requirements.txt + - name: Lint with ruff + run: | + # stop the build if there are Python syntax errors or undefined names + ruff check --select=E9,F63,F7,F82 --target-version=py311 . + # default set of ruff rules with GitHub Annotations + ruff check --target-version=py311 . + - name: Test with pytest + run: | + pytest diff --git a/.linter-config/.checkov.yml b/.linter-config/.checkov.yml deleted file mode 100644 index 552b198..0000000 --- a/.linter-config/.checkov.yml +++ /dev/null @@ -1,15 +0,0 @@ -block-list-secret-scan: [] -branch: master -download-external-modules: false -evaluate-variables: true -external-modules-download-path: .external_modules -framework: - - all -mask: [] -output: - - cli -quiet: true -secrets-history-timeout: 12h -secrets-scan-file-type: [] -summary-position: top -skip-check: [] diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a658b7a..1a218c1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,10 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v5.0.0 hooks: - id: fix-byte-order-marker - id: check-json - id: check-yaml - # args: - # - "-c __GIT_WORKING_DIR__/.linter-config/.yamllint" - id: end-of-file-fixer - id: trailing-whitespace - id: mixed-line-ending @@ -15,10 +13,38 @@ repos: - id: detect-aws-credentials args: ['--allow-missing-credentials'] - id: detect-private-key - - repo: https://github.com/antonbabenko/pre-commit-terraform - rev: v1.81.0 + - repo: https://github.com/myint/autoflake + rev: v2.3.1 hooks: - - id: terraform_checkov - exclude: "[examples|test]/.*$" + - id: autoflake args: - - "--args=--config-file __GIT_WORKING_DIR__/.linter-config/.checkov.yml" + - --in-place + - --remove-unused-variables + - --remove-all-unused-imports + - repo: https://github.com/hadolint/hadolint + rev: v2.12.0 + hooks: + - id: hadolint-docker + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: v0.8.4 + hooks: + - id: ruff + args: + - '--line-length=120' + - '--fix' + - '--exit-non-zero-on-fix' + - repo: https://github.com/pycqa/isort + rev: 5.13.2 + hooks: + - id: isort + name: isort (python) + args: + - '--profile' + - black + - '--filter-files' + - repo: https://github.com/psf/black + rev: 24.10.0 + hooks: + - id: black + args: + - '--line-length=120' diff --git a/.tool-versions b/.tool-versions index 839054c..e92cdd4 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1 +1,2 @@ -pre-commit 3.3.3 +python 3.11.4 +pre-commit 4.0.1 diff --git a/Dockerfile b/Dockerfile index 9dcb033..3715d54 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ -FROM alpine:3.19.0 +FROM alpine:3.21.0 LABEL maintainer="Michael Oberdorf IT-Consulting " -LABEL site.local.program.version="1.0.13" +LABEL site.local.program.version="1.0.14" # LOGLEVEL can be one of debug, info, warning , error ENV LOGLEVEL info @@ -10,16 +10,16 @@ COPY --chown=root:root /requirements.txt / RUN apk upgrade --available --no-cache --update \ && apk add --no-cache --update \ - libcurl=8.5.0-r0 \ - python3=3.11.6-r1 \ - py3-beautifulsoup4=4.12.2-r1 \ - py3-curl=7.45.2-r1 \ - py3-numpy=1.25.2-r0 \ - py3-packaging=23.2-r0 \ - py3-pandas=2.0.3-r0 \ - py3-pillow=10.1.0-r1 \ - py3-pip=23.3.1-r0 \ - py3-requests=2.31.0-r1 \ + libcurl=8.11.1-r0 \ + python3=3.12.8-r1 \ + py3-beautifulsoup4=4.12.3-r3 \ + py3-curl=7.45.3-r0 \ + py3-numpy=2.1.3-r0 \ + py3-packaging=24.2-r0 \ + py3-pandas=2.2.3-r0 \ + py3-pillow=11.0.0-r0 \ + py3-pip=24.3.1-r0 \ + py3-requests=2.32.3-r0\ && pip3 install --no-cache-dir --break-system-packages -r /requirements.txt COPY --chown=root:root /src / diff --git a/README.md b/README.md index 8104ced..dc2015a 100644 --- a/README.md +++ b/README.md @@ -6,10 +6,27 @@ Source code: [GitHub](https://github.com/cybcon/docker.dsb2pushover) Container image: [DockerHub](https://hub.docker.com/r/oitc/dsb2pushover) + +[![][github-action-test-shield]][github-action-test-link] +[![][github-action-release-shield]][github-action-release-link] +[![][github-release-shield]][github-release-link] +[![][github-releasedate-shield]][github-releasedate-link] +[![][github-stars-shield]][github-stars-link] +[![][github-forks-shield]][github-forks-link] +[![][github-issues-shield]][github-issues-link] +[![][github-license-shield]][github-license-link] + +[![][docker-release-shield]][docker-release-link] +[![][docker-pulls-shield]][docker-pulls-link] +[![][docker-stars-shield]][docker-stars-link] +[![][docker-size-shield]][docker-size-link] + + # Supported tags and respective `Dockerfile` links -* [`latest`, `1.0.13`](https://github.com/cybcon/docker.dsb2pushover/blob/v1.0.13/Dockerfile) +* [`latest`, `1.0.14`](https://github.com/cybcon/docker.dsb2pushover/blob/v1.0.14/Dockerfile) +* [`1.0.13`](https://github.com/cybcon/docker.dsb2pushover/blob/v1.0.13/Dockerfile) * [`1.0.12`](https://github.com/cybcon/docker.dsb2pushover/blob/v1.0.12/Dockerfile) * [`1.0.9`](https://github.com/cybcon/docker.dsb2pushover/blob/v1.0.9/Dockerfile) @@ -97,3 +114,30 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +[docker-pulls-link]: https://hub.docker.com/r/oitc/dsb2pushover +[docker-pulls-shield]: https://img.shields.io/docker/pulls/oitc/dsb2pushover?color=45cc11&labelColor=black&style=flat-square +[docker-release-link]: https://hub.docker.com/r/oitc/dsb2pushover +[docker-release-shield]: https://img.shields.io/docker/v/oitc/dsb2pushover?color=369eff&label=docker&labelColor=black&logo=docker&logoColor=white&style=flat-square +[docker-size-link]: https://hub.docker.com/r/oitc/dsb2pushover +[docker-size-shield]: https://img.shields.io/docker/image-size/oitc/dsb2pushover?color=369eff&labelColor=black&style=flat-square +[docker-stars-link]: https://hub.docker.com/r/oitc/dsb2pushover +[docker-stars-shield]: https://img.shields.io/docker/stars/oitc/dsb2pushover?color=45cc11&labelColor=black&style=flat-square +[github-action-release-link]: https://github.com/cybcon/docker.dsb2pushover/actions/workflows/release-from-label.yaml +[github-action-release-shield]: https://img.shields.io/github/actions/workflow/status/cybcon/docker.dsb2pushover/release-from-label.yaml?label=release&labelColor=black&logo=githubactions&logoColor=white&style=flat-square +[github-action-test-link]: https://github.com/cybcon/docker.dsb2pushover/actions/workflows/test.yaml +[github-action-test-shield-original]: https://github.com/cybcon/docker.dsb2pushover/actions/workflows/test.yaml/badge.svg +[github-action-test-shield]: https://img.shields.io/github/actions/workflow/status/cybcon/docker.dsb2pushover/test.yaml?label=tests&labelColor=black&logo=githubactions&logoColor=white&style=flat-square +[github-forks-link]: https://github.com/cybcon/docker.dsb2pushover/network/members +[github-forks-shield]: https://img.shields.io/github/forks/cybcon/docker.dsb2pushover?color=8ae8ff&labelColor=black&style=flat-square +[github-issues-link]: https://github.com/cybcon/docker.dsb2pushover/issues +[github-issues-shield]: https://img.shields.io/github/issues/cybcon/docker.dsb2pushover?color=ff80eb&labelColor=black&style=flat-square +[github-license-link]: https://github.com/cybcon/docker.dsb2pushover/blob/main/LICENSE +[github-license-shield]: https://img.shields.io/badge/license-MIT-blue?labelColor=black&style=flat-square +[github-release-link]: https://github.com/cybcon/docker.dsb2pushover/releases +[github-release-shield]: https://img.shields.io/github/v/release/cybcon/docker.dsb2pushover?color=369eff&labelColor=black&logo=github&style=flat-square +[github-releasedate-link]: https://github.com/cybcon/docker.dsb2pushover/releases +[github-releasedate-shield]: https://img.shields.io/github/release-date/cybcon/docker.dsb2pushover?labelColor=black&style=flat-square +[github-stars-link]: https://github.com/cybcon/docker.dsb2pushover +[github-stars-shield]: https://img.shields.io/github/stars/cybcon/docker.dsb2pushover?color=ffcb47&labelColor=black&style=flat-square diff --git a/azure-pipelines/dsb2pushover.yml b/azure-pipelines/dsb2pushover.yml index 67e4d12..6766422 100644 --- a/azure-pipelines/dsb2pushover.yml +++ b/azure-pipelines/dsb2pushover.yml @@ -27,7 +27,7 @@ pool: vmImage: ubuntu-latest variables: - appVersion: 1.0.12 + appVersion: 1.0.14 REGISTRY: Docker Hub (OITC) REPOSITORY_NAME: oitc/dsb2pushover containerBuildPath: $(Build.Repository.Name)/resources diff --git a/requirements.txt b/requirements.txt index bc65519..2b8ad62 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -datetime==5.3 +datetime>=5.3,<6 # needs to be fixed to 0.0.14 and \src\usr\local\lib\python3.9\site-packages\dsbapi\__init__.py needs to be removed dsbapipy>=0.0.13 -http.client==0.1.22 -pytesseract==0.3.10 +http.client>=0.1.22,<1 +pytesseract>=0.3.10,<1 diff --git a/src/app/dsb2pushover.py b/src/app/dsb2pushover.py index 34c01a9..434ce07 100644 --- a/src/app/dsb2pushover.py +++ b/src/app/dsb2pushover.py @@ -6,20 +6,26 @@ Author: Michael Oberdorf Datum: 2021-04-21 *************************************************************************** """ +import datetime +import logging import os import sys -import datetime -import pandas as pd + import dsbapi -import logging +import pandas as pd from packaging import version -VERSION='1.0.13' +VERSION = "1.0.14" -dsbapi_min_version = '0.0.14' +dsbapi_min_version = "0.0.14" if version.parse(dsbapi.__version__) < version.parse(dsbapi_min_version): - print('ERROR: DSBApi library version ' + dsbapi.__version__ + ' found but require minimum version ' + dsbapi_min_version) - sys.exit(1) + print( + "ERROR: DSBApi library version " + + dsbapi.__version__ + + " found but require minimum version " + + dsbapi_min_version + ) + sys.exit(1) """ @@ -27,6 +33,8 @@ # F U N C T I O N S ############################################################################### """ + + def parse_dsb_data(dsb_entries, filter_class=None, filter_date=None): """ parse the result from dsbclient.fetch_entries() and return a data frame @@ -39,33 +47,36 @@ def parse_dsb_data(dsb_entries, filter_class=None, filter_date=None): DF = list() for slice in dsb_entries: log.debug(slice) - DF.append(pd.DataFrame.from_dict(slice, orient='columns')) - df = pd.concat(DF,ignore_index=False, sort=False).reset_index(drop=True) #.replace('---',np.NaN) - with pd.option_context('display.max_rows', None, 'display.max_columns', None): log.debug(df) + DF.append(pd.DataFrame.from_dict(slice, orient="columns")) + df = pd.concat(DF, ignore_index=False, sort=False).reset_index(drop=True) # .replace('---',np.NaN) + with pd.option_context("display.max_rows", None, "display.max_columns", None): + log.debug(df) # set data types - #df[['date', 'updated']] = df[['date', 'updated']].astype('datetime64[ns]') - df['date'] = pd.to_datetime(df['date'], format='%d.%m.%Y') - df['updated'] = pd.to_datetime(df['updated'], format='%d.%m.%Y %H:%M') - df[['day', 'subject', 'new_subject', 'class']] = df[['day', 'subject', 'new_subject', 'class']].astype("category") + # df[['date', 'updated']] = df[['date', 'updated']].astype('datetime64[ns]') + df["date"] = pd.to_datetime(df["date"], format="%d.%m.%Y") + df["updated"] = pd.to_datetime(df["updated"], format="%d.%m.%Y %H:%M") + df[["day", "subject", "new_subject", "class"]] = df[["day", "subject", "new_subject", "class"]].astype("category") # filter relevant school class - if filter_class and 'class' in df.columns: - log.debug(' filter for school class: ' + filter_class) - #df = df[df['class'] == filter_class] - df = df[df['class'].str.match(r'(' + str(filter_class) + ')')==True] - df.drop(axis=1, columns=['class'], inplace=True) + if filter_class and "class" in df.columns: + log.debug(" filter for school class: " + filter_class) + # df = df[df['class'] == filter_class] + df = df[df["class"].str.match(r"(" + str(filter_class) + ")") == True] # noqa: E712 + df.drop(axis=1, columns=["class"], inplace=True) ## filter relevant date - if filter_date and 'date' in df.columns: - log.debug(' filter for date: ' + filter_date.strftime("%Y-%m-%d")) - df = df[df['date'] == pd.Timestamp(filter_date)] - df.drop(axis=1, columns=['date', 'day'], inplace=True) + if filter_date and "date" in df.columns: + log.debug(" filter for date: " + filter_date.strftime("%Y-%m-%d")) + df = df[df["date"] == pd.Timestamp(filter_date)] + df.drop(axis=1, columns=["date", "day"], inplace=True) log.debug(df.columns) log.debug(df.info()) - with pd.option_context('display.max_rows', None, 'display.max_columns', None): log.debug(df.head()) - return(df) + with pd.option_context("display.max_rows", None, "display.max_columns", None): + log.debug(df.head()) + return df + def render_payload(df): """ @@ -73,26 +84,27 @@ def render_payload(df): @param df: pandas.DataFrame(), the data frame with the entries from DSB @return string, the rendered payload """ - data=[] + data = [] for index, row in df.iterrows(): - #if DEBUG: print(row) - entry = '' - if 'class' in row: - entry = 'Klasse ' + row['class'] + ', ' - entry += row['type'] + ' in Stunde ' + row['lesson'] + ', Fach ' + row['subject'] - if row['subject'] != row['new_subject']: - entry += ' --> ' + row['new_subject'] - if row['room'] != '---': - entry += ', in Raum ' + row['room'] - if row['new_teacher'] != '---': - entry += ', Lehrer ' + row['new_teacher'] - if row['text'] != '---': - entry += ', ' + row['text'] + # if DEBUG: print(row) + entry = "" + if "class" in row: + entry = "Klasse " + row["class"] + ", " + entry += row["type"] + " in Stunde " + row["lesson"] + ", Fach " + row["subject"] + if row["subject"] != row["new_subject"]: + entry += " --> " + row["new_subject"] + if row["room"] != "---": + entry += ", in Raum " + row["room"] + if row["new_teacher"] != "---": + entry += ", Lehrer " + row["new_teacher"] + if row["text"] != "---": + entry += ", " + row["text"] log.debug(entry) data.append(entry) - return("\n".join(data)) + return "\n".join(data) + def send_pushover_message(userkey, apikey, title, message): """ @@ -102,17 +114,26 @@ def send_pushover_message(userkey, apikey, title, message): @param title: string, @param message: string, """ - import http.client, urllib + import http.client + import urllib + conn = http.client.HTTPSConnection("api.pushover.net:443") - conn.request("POST", "/1/messages.json", - urllib.parse.urlencode({ - "token": apikey, - "user": userkey, - "title": title, - "message": message, - }), { "Content-type": "application/x-www-form-urlencoded" }) + conn.request( + "POST", + "/1/messages.json", + urllib.parse.urlencode( + { + "token": apikey, + "user": userkey, + "title": title, + "message": message, + } + ), + {"Content-type": "application/x-www-form-urlencoded"}, + ) conn.getresponse() + """ ############################################################################### # M A I N @@ -120,93 +141,95 @@ def send_pushover_message(userkey, apikey, title, message): """ log = logging.getLogger() log_handler = logging.StreamHandler(sys.stdout) -if not 'LOGLEVEL' in os.environ: +if "LOGLEVEL" not in os.environ: log.setLevel(logging.INFO) log_handler.setLevel(logging.INFO) else: - if os.environ['LOGLEVEL'].lower() == 'debug': - log.setLevel(logging.DEBUG) - log_handler.setLevel(logging.DEBUG) - elif os.environ['LOGLEVEL'].lower() == 'info': - log.setLevel(logging.INFO) - log_handler.setLevel(logging.INFO) - elif os.environ['LOGLEVEL'].lower() == 'warning': - log.setLevel(logging.WARN) - log_handler.setLevel(logging.WARN) - elif os.environ['LOGLEVEL'].lower() == 'error': - log.setLevel(logging.ERROR) - log_handler.setLevel(logging.ERROR) - else: - log.setLevel(logging.INFO) - log_handler.setLevel(logging.INFO) -log_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + if os.environ["LOGLEVEL"].lower() == "debug": + log.setLevel(logging.DEBUG) + log_handler.setLevel(logging.DEBUG) + elif os.environ["LOGLEVEL"].lower() == "info": + log.setLevel(logging.INFO) + log_handler.setLevel(logging.INFO) + elif os.environ["LOGLEVEL"].lower() == "warning": + log.setLevel(logging.WARN) + log_handler.setLevel(logging.WARN) + elif os.environ["LOGLEVEL"].lower() == "error": + log.setLevel(logging.ERROR) + log_handler.setLevel(logging.ERROR) + else: + log.setLevel(logging.INFO) + log_handler.setLevel(logging.INFO) +log_formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") log_handler.setFormatter(log_formatter) log.addHandler(log_handler) -log.info('DSB to Pushover version ' + VERSION + ' started') +log.info("DSB to Pushover version " + VERSION + " started") # Read environment variables -log.debug('Validate environment variables') -if not 'DSB_USERNAME' in os.environ: - log.error('Environment variable DSB_USERNAME not defined') - raise Exception('Environment variable DSB_USERNAME not defined') -if not 'DSB_PASSWORD' in os.environ: - log.error('Environment variable DSB_PASSWORD not defined') - raise Exception('Environment variable DSB_PASSWORD not defined') -if not 'PUSHOVER_USER_KEY' in os.environ: - log.error('Environment variable PUSHOVER_USER_KEY not defined') - raise Exception('Environment variable PUSHOVER_USER_KEY not defined') -if not 'PUSHOVER_API_KEY' in os.environ: - log.error('Environment variable PUSHOVER_API_KEY not defined') - raise Exception('Environment variable PUSHOVER_API_KEY not defined') -filter_date = None -filter_date = datetime.date.today() + datetime.timedelta(days=1) +log.debug("Validate environment variables") +if "DSB_USERNAME" not in os.environ: + log.error("Environment variable DSB_USERNAME not defined") + raise Exception("Environment variable DSB_USERNAME not defined") +if "DSB_PASSWORD" not in os.environ: + log.error("Environment variable DSB_PASSWORD not defined") + raise Exception("Environment variable DSB_PASSWORD not defined") +if "PUSHOVER_USER_KEY" not in os.environ: + log.error("Environment variable PUSHOVER_USER_KEY not defined") + raise Exception("Environment variable PUSHOVER_USER_KEY not defined") +if "PUSHOVER_API_KEY" not in os.environ: + log.error("Environment variable PUSHOVER_API_KEY not defined") + raise Exception("Environment variable PUSHOVER_API_KEY not defined") +filter_date = None +filter_date = datetime.date.today() + datetime.timedelta(days=1) filter_schoolclass = None -if not 'FILTER_SCHOOLCLASS' in os.environ: - log.info('Environment variable FILTER_SCHOOLCLASS not defined') +if "FILTER_SCHOOLCLASS" not in os.environ: + log.info("Environment variable FILTER_SCHOOLCLASS not defined") else: - filter_schoolclass = os.environ['FILTER_SCHOOLCLASS'] -if not 'DSB_TABLE_FIELDS' in os.environ: - tablemapper=['type','class','lesson','subject','room','new_subject','new_teacher','teacher'] + filter_schoolclass = os.environ["FILTER_SCHOOLCLASS"] +if "DSB_TABLE_FIELDS" not in os.environ: + tablemapper = ["type", "class", "lesson", "subject", "room", "new_subject", "new_teacher", "teacher"] else: - tablemapper=os.environ['DSB_TABLE_FIELDS'].replace(" ", "").split(',') -log.debug('Use following DSBMonile table fileds: ' + ",".join(tablemapper)) + tablemapper = os.environ["DSB_TABLE_FIELDS"].replace(" ", "").split(",") +log.debug("Use following DSBMonile table fileds: " + ",".join(tablemapper)) # Get data from DSB -log.debug('Request data from DSB service') -dsbclient = dsbapi.DSBApi(os.environ['DSB_USERNAME'], os.environ['DSB_PASSWORD'], tablemapper=tablemapper) +log.debug("Request data from DSB service") +dsbclient = dsbapi.DSBApi(os.environ["DSB_USERNAME"], os.environ["DSB_PASSWORD"], tablemapper=tablemapper) -entries = dsbclient.fetch_entries() # Rückgabe einer JSON Liste an Arrays +entries = dsbclient.fetch_entries() # Rückgabe einer JSON Liste an Arrays # Parse DSB data -log.debug('Parse received data') -df = parse_dsb_data(entries, filter_class=filter_schoolclass, filter_date=filter_date) # parse the data +log.debug("Parse received data") +df = parse_dsb_data(entries, filter_class=filter_schoolclass, filter_date=filter_date) # parse the data if len(df.index) <= 0: - log.info('Keine Einträge gefunden') - log.info('DSB to Pushover version ' + VERSION + ' ended') + log.info("Keine Einträge gefunden") + log.info("DSB to Pushover version " + VERSION + " ended") sys.exit() else: - log.info(str(len(df.index)) + ' Einträge gefunden') + log.info(str(len(df.index)) + " Einträge gefunden") # extract the timestamp when the last update was made -updated = df['updated'].max() -df.drop(axis=1, columns=['updated'], inplace=True) +updated = df["updated"].max() +df.drop(axis=1, columns=["updated"], inplace=True) # render the subject of the pushover message -log.debug('Render pushover message') -SUBJECT = 'Vertretungsplan ' +log.debug("Render pushover message") +SUBJECT = "Vertretungsplan " if filter_schoolclass: - SUBJECT += 'für die Klasse ' + filter_schoolclass + ' ' + SUBJECT += "für die Klasse " + filter_schoolclass + " " if filter_date: - SUBJECT += 'für den ' + filter_date.strftime("%d.%m.%Y") -PAYLOAD = 'Stand: ' + updated.strftime("%d.%m.%Y %H:%M") + "\n" -PAYLOAD += render_payload(df) # render the payload from DSB entries + SUBJECT += "für den " + filter_date.strftime("%d.%m.%Y") +PAYLOAD = "Stand: " + updated.strftime("%d.%m.%Y %H:%M") + "\n" +PAYLOAD += render_payload(df) # render the payload from DSB entries -log.debug('Send data to Pushover service:') +log.debug("Send data to Pushover service:") log.debug(SUBJECT) log.debug(PAYLOAD) -send_pushover_message(userkey=os.environ['PUSHOVER_USER_KEY'], apikey=os.environ['PUSHOVER_API_KEY'], title=SUBJECT, message=PAYLOAD) +send_pushover_message( + userkey=os.environ["PUSHOVER_USER_KEY"], apikey=os.environ["PUSHOVER_API_KEY"], title=SUBJECT, message=PAYLOAD +) -log.info('DSB to Pushover version ' + VERSION + ' ended') +log.info("DSB to Pushover version " + VERSION + " ended") sys.exit() diff --git a/src/usr/lib/python3.11/site-packages/dsbapi/__init__.py b/src/usr/lib/python3.12/site-packages/dsbapi/__init__.py similarity index 78% rename from src/usr/lib/python3.11/site-packages/dsbapi/__init__.py rename to src/usr/lib/python3.12/site-packages/dsbapi/__init__.py index 556a849..e3f9844 100644 --- a/src/usr/lib/python3.11/site-packages/dsbapi/__init__.py +++ b/src/usr/lib/python3.12/site-packages/dsbapi/__init__.py @@ -3,26 +3,34 @@ DSBApi An API for the DSBMobile substitution plan solution, which many schools use. """ -__version_info__ = ('0', '0', '14') -__version__ = '.'.join(__version_info__) +__version_info__ = ("0", "0", "14") +__version__ = ".".join(__version_info__) -import bs4 -import json -import requests +import base64 import datetime import gzip +import json import uuid -import base64 + +import bs4 +import requests try: from PIL import Image except: import Image + import pytesseract import requests + class DSBApi: - def __init__(self, username, password, tablemapper=['type','class','lesson','subject','room','new_subject','new_teacher','teacher']): + def __init__( + self, + username, + password, + tablemapper=["type", "class", "lesson", "subject", "room", "new_subject", "new_teacher", "teacher"], + ): """ Class constructor for class DSBApi @param username: string, the username of the DSBMobile account @@ -35,19 +43,18 @@ def __init__(self, username, password, tablemapper=['type','class','lesson','sub self.username = username self.password = password if not isinstance(tablemapper, list): - raise TypeError('Attribute tablemapper is not of type list!') + raise TypeError("Attribute tablemapper is not of type list!") self.tablemapper = tablemapper # loop over tablemapper array and identify the keyword "class". The "class" will have a special operation in split up the datasets self.class_index = None i = 0 while i < len(self.tablemapper): - if self.tablemapper[i] == 'class': + if self.tablemapper[i] == "class": self.class_index = i break i += 1 - def fetch_entries(self, images=True): """ Fetch all the DSBMobile entries @@ -70,33 +77,33 @@ def fetch_entries(self, images=True): "Device": "SM-G930F", "BundleId": "de.heinekingmedia.dsbmobile", "Date": current_time, - "LastUpdate": current_time + "LastUpdate": current_time, } # Convert params into the right format - params_bytestring = json.dumps(params, separators=(',', ':')).encode("UTF-8") + params_bytestring = json.dumps(params, separators=(",", ":")).encode("UTF-8") params_compressed = base64.b64encode(gzip.compress(params_bytestring)).decode("UTF-8") # Send the request json_data = {"req": {"Data": params_compressed, "DataType": 1}} - timetable_data = requests.post(self.DATA_URL, json = json_data) + timetable_data = requests.post(self.DATA_URL, json=json_data) # Decompress response data_compressed = json.loads(timetable_data.content)["d"] data = json.loads(gzip.decompress(base64.b64decode(data_compressed))) # validate response before proceed - if data['Resultcode'] != 0: - raise Exception(data['ResultStatusInfo']) + if data["Resultcode"] != 0: + raise Exception(data["ResultStatusInfo"]) # Find the timetable page, and extract the timetable URL from it final = [] for page in data["ResultMenuItems"][0]["Childs"]: - for child in page["Root"]["Childs"]: - if isinstance(child["Childs"], list): - for sub_child in child["Childs"]: - final.append(sub_child["Detail"]) - else: - final.append(child["Childs"]["Detail"]) + for child in page["Root"]["Childs"]: + if isinstance(child["Childs"], list): + for sub_child in child["Childs"]: + final.append(sub_child["Detail"]) + else: + final.append(child["Childs"]["Detail"]) if not final: raise Exception("Timetable data could not be found") output = [] @@ -118,7 +125,6 @@ def fetch_entries(self, images=True): else: return output - def fetch_img(self, imgurl): """ Extract data from the image @@ -131,11 +137,10 @@ def fetch_img(self, imgurl): try: img = Image.open(io.BytesIO(requests.get(imgurl))) except: - return #haha this is quality coding surplus - + return # haha this is quality coding surplus try: - return pytesseract.image_to_string(img) + return pytesseract.image_to_string(img) except TesseractError: raise Exception("You have to make the tesseract command accessible and work!") return None @@ -150,10 +155,13 @@ def fetch_timetable(self, timetableurl): sauce = requests.get(timetableurl).text soupi = bs4.BeautifulSoup(sauce, "html.parser") ind = -1 - for soup in soupi.find_all('table', {'class': 'mon_list'}): + for soup in soupi.find_all("table", {"class": "mon_list"}): ind += 1 - updates = [o.p.findAll('span')[-1].next_sibling.split("Stand: ")[1] for o in soupi.findAll('table', {'class': 'mon_head'})][ind] - titles = [o.text for o in soupi.findAll('div', {'class': 'mon_title'})][ind] + updates = [ + o.p.findAll("span")[-1].next_sibling.split("Stand: ")[1] + for o in soupi.findAll("table", {"class": "mon_head"}) + ][ind] + titles = [o.text for o in soupi.findAll("div", {"class": "mon_title"})][ind] date = titles.split(" ")[0] day = titles.split(" ")[1].split(", ")[0].replace(",", "") entries = soup.find_all("tr") @@ -168,19 +176,19 @@ def fetch_timetable(self, timetableurl): class_array = infos[self.class_index].text.split(", ") else: # define a dummy value if we don't have a class column (with keyword "class") - class_array = [ '---' ] + class_array = ["---"] for class_ in class_array: new_entry = dict() new_entry["date"] = date - new_entry["day"] = day + new_entry["day"] = day new_entry["updated"] = updates i = 0 while i < len(infos): if i < len(self.tablemapper): attribute = self.tablemapper[i] else: - attribute = 'col' + str(i) - if attribute == 'class': + attribute = "col" + str(i) + if attribute == "class": new_entry[attribute] = class_ if infos[i].text != "\xa0" else "---" else: new_entry[attribute] = infos[i].text if infos[i].text != "\xa0" else "---" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 From 30f5e19fdeaf9056281ae5a6c0956955d03aba86 Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 17:39:59 +0100 Subject: [PATCH 02/13] fixing LegacyKeyValueFormat and path to requirements.txt --- .github/workflows/test.yaml | 2 +- Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 19df923..2dcd150 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -23,7 +23,7 @@ jobs: run: | python -m pip install --upgrade pip pip install ruff pytest - cd src/ && pip install -r requirements.txt + pip install -r requirements.txt - name: Lint with ruff run: | # stop the build if there are Python syntax errors or undefined names diff --git a/Dockerfile b/Dockerfile index 3715d54..64d9b71 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,7 +4,7 @@ LABEL maintainer="Michael Oberdorf IT-Consulting " LABEL site.local.program.version="1.0.14" # LOGLEVEL can be one of debug, info, warning , error -ENV LOGLEVEL info +ENV LOGLEVEL=info COPY --chown=root:root /requirements.txt / From a6947106eb73d6fffe7a1b67d69d91e166124d3d Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 18:03:31 +0100 Subject: [PATCH 03/13] Adding Test --- tests/test_dsb2pushover.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 tests/test_dsb2pushover.py diff --git a/tests/test_dsb2pushover.py b/tests/test_dsb2pushover.py new file mode 100644 index 0000000..eb62f11 --- /dev/null +++ b/tests/test_dsb2pushover.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +import pandas as pd + +from src.app.dsb2pushover import render_payload + + +def test_render_payload(): + data = [ + { + "class": "6a", + "type": "entfall", + "lesson": "3", + "subject": "Math", + "new_subject": "Math", + "room": "101", + "new_teacher": "Mr. Text", + "text": "Some text", + } + ] + expected_result = "Klasse 6a, entfall in Stunde 3, Fach Math, in Raum 101, Lehrer Mr. Text, Some text" + + result = render_payload(pd.DataFrame.from_dict(data)) + + assert result == expected_result From 8bd7d331dae439874e68d3f4f7e1305cb088fbb7 Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 18:06:54 +0100 Subject: [PATCH 04/13] Adding pandas --- .github/workflows/test.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 2dcd150..3ea36ad 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -24,6 +24,7 @@ jobs: python -m pip install --upgrade pip pip install ruff pytest pip install -r requirements.txt + pip install pandas - name: Lint with ruff run: | # stop the build if there are Python syntax errors or undefined names From ea4b4348770febeefcd284fe1e52d8055c29d52b Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 18:10:39 +0100 Subject: [PATCH 05/13] Add python modules installed directly in Dockerfile as alpine package --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 3ea36ad..14f1d10 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -24,7 +24,7 @@ jobs: python -m pip install --upgrade pip pip install ruff pytest pip install -r requirements.txt - pip install pandas + pip install beautifulsoup4 curl numpy packaging pandas pillow requests - name: Lint with ruff run: | # stop the build if there are Python syntax errors or undefined names From b2c605501946d8c0444c4f36abe98970e7ff5bd4 Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 18:12:55 +0100 Subject: [PATCH 06/13] Add python modules installed directly in Dockerfile as alpine package --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 14f1d10..8272ce5 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -24,7 +24,7 @@ jobs: python -m pip install --upgrade pip pip install ruff pytest pip install -r requirements.txt - pip install beautifulsoup4 curl numpy packaging pandas pillow requests + pip install beautifulsoup4 numpy packaging pandas pillow requests - name: Lint with ruff run: | # stop the build if there are Python syntax errors or undefined names From 0bbf010c21fa8d1b77a7809b011cd1d6d23468b9 Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 18:17:00 +0100 Subject: [PATCH 07/13] Add dsbapi library to test path --- .github/workflows/test.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 8272ce5..615a2a1 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -25,6 +25,7 @@ jobs: pip install ruff pytest pip install -r requirements.txt pip install beautifulsoup4 numpy packaging pandas pillow requests + cp ./src/usr/lib/python3.12/site-packages/dsbapi/__init__.py /usr/lib/python${{ matrix.python-version }}/site-packages/dsbapi/__init__.py - name: Lint with ruff run: | # stop the build if there are Python syntax errors or undefined names From ad80fd6d1730b45f85a203d686c1ff2d72541431 Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 18:24:30 +0100 Subject: [PATCH 08/13] Add dsbapi library to test path --- .github/workflows/test.yaml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 615a2a1..db408d6 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -25,7 +25,16 @@ jobs: pip install ruff pytest pip install -r requirements.txt pip install beautifulsoup4 numpy packaging pandas pillow requests - cp ./src/usr/lib/python3.12/site-packages/dsbapi/__init__.py /usr/lib/python${{ matrix.python-version }}/site-packages/dsbapi/__init__.py + - name: Overwrite dsbapi from local version + run: | + # identify site-packages directory + sitepackages_dir=$(python -c 'import site; print(site.getsitepackages()[0])') + if [ -d "${sitepackages_dir}" ]; then + cp ./src/usr/lib/python3.12/site-packages/dsbapi/__init__.py ${sitepackages_dir}/dsbapi/__init__.py + else + echo "Directory not found: ${sitepackages_dir}" + exit 1 + fi - name: Lint with ruff run: | # stop the build if there are Python syntax errors or undefined names From 8ec1fe8896a33fb103f78092c561acc6aae14b30 Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 18:28:51 +0100 Subject: [PATCH 09/13] define mandatory environment variables --- tests/__init__.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/__init__.py b/tests/__init__.py index e69de29..2584d63 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +import os + +# Define mandatory environment variables +os.environ["DSB_USERNAME"] = "DSB_USERNAME" +os.environ["DSB_PASSWORD"] = "DSB_PASSWORD" +os.environ["PUSHOVER_USER_KEY"] = "PUSHOVER_USER_KEY" +os.environ["PUSHOVER_API_KEY"] = "PUSHOVER_API_KEY" From ac03dd9ff6f698456be68eda6245dd4e62e6c23c Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 18:33:53 +0100 Subject: [PATCH 10/13] change test --- tests/test_dsb2pushover.py | 24 ------------------------ tests/test_dsbapi.py | 10 ++++++++++ 2 files changed, 10 insertions(+), 24 deletions(-) delete mode 100644 tests/test_dsb2pushover.py create mode 100644 tests/test_dsbapi.py diff --git a/tests/test_dsb2pushover.py b/tests/test_dsb2pushover.py deleted file mode 100644 index eb62f11..0000000 --- a/tests/test_dsb2pushover.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -import pandas as pd - -from src.app.dsb2pushover import render_payload - - -def test_render_payload(): - data = [ - { - "class": "6a", - "type": "entfall", - "lesson": "3", - "subject": "Math", - "new_subject": "Math", - "room": "101", - "new_teacher": "Mr. Text", - "text": "Some text", - } - ] - expected_result = "Klasse 6a, entfall in Stunde 3, Fach Math, in Raum 101, Lehrer Mr. Text, Some text" - - result = render_payload(pd.DataFrame.from_dict(data)) - - assert result == expected_result diff --git a/tests/test_dsbapi.py b/tests/test_dsbapi.py new file mode 100644 index 0000000..59d235f --- /dev/null +++ b/tests/test_dsbapi.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +import dsbapi +from packaging import version + + +def test_dsbapi_version(): + expected_result = "0.0.14" + result = version.parse(dsbapi.__version__) + + assert result == expected_result From 2f80fe58e2a3e6d80d8dd6fca4d3dc0f40a6dd5a Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 18:36:24 +0100 Subject: [PATCH 11/13] change test --- tests/test_dsbapi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_dsbapi.py b/tests/test_dsbapi.py index 59d235f..b3ce76b 100644 --- a/tests/test_dsbapi.py +++ b/tests/test_dsbapi.py @@ -4,7 +4,7 @@ def test_dsbapi_version(): - expected_result = "0.0.14" + expected_result = version.parse("0.0.14") result = version.parse(dsbapi.__version__) assert result == expected_result From c5ccca7bc4e9fd02756c90770e6194163302229c Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 18:40:25 +0100 Subject: [PATCH 12/13] Upgrade artifact upload --- .github/workflows/container-image-build-validation.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/container-image-build-validation.yaml b/.github/workflows/container-image-build-validation.yaml index 242ad8f..0eab01a 100644 --- a/.github/workflows/container-image-build-validation.yaml +++ b/.github/workflows/container-image-build-validation.yaml @@ -34,7 +34,7 @@ jobs: tags: container-build:test outputs: type=docker,dest=/tmp/container.tar - name: Upload container image as artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: container-build path: /tmp/container.tar From 338009578b7d3ff716be7baa7f981c3f9f836bd6 Mon Sep 17 00:00:00 2001 From: Michael Oberdorf Date: Sun, 22 Dec 2024 18:45:16 +0100 Subject: [PATCH 13/13] Reoving python --- .tool-versions | 1 - 1 file changed, 1 deletion(-) diff --git a/.tool-versions b/.tool-versions index e92cdd4..df76196 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,2 +1 @@ -python 3.11.4 pre-commit 4.0.1