diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..2a26c77 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,6 @@ +# Ignore all hidden files and folders +.* + +# But keep these specific hidden files/folders +!.web/bun.lockb +!.web/package.json diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4551d89..038e58d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -67,7 +67,7 @@ jobs: strategy: matrix: - python-version: [ "3.11", "3.12", "3.13" ] + python-version: [ "3.13" ] os: [ ubuntu-latest, windows-latest, macos-latest ] distribution: [ "${{ needs.build.outputs.WHL }}", "${{ needs.build.outputs.TARGZ }}" ] diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 0000000..ddde004 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,97 @@ +name: Build and Push Docker Image + +on: + workflow_call: + inputs: + image_tag: + description: 'Docker image tag' + required: true + type: string + is_release: + description: 'Whether this is a release build' + required: false + type: boolean + default: false + push: + branches: + - main + pull_request: + branches: + - main + types: [opened, synchronize, reopened] + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + build-and-push: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set image tag + id: tag + run: | + if [ "${{ github.event_name }}" = "workflow_call" ]; then + echo "tag=${{ inputs.image_tag }}" >> $GITHUB_OUTPUT + echo "is_release=${{ inputs.is_release }}" >> $GITHUB_OUTPUT + # Extract version without 'v' prefix for release builds + if [ "${{ inputs.is_release }}" = "true" ]; then + VERSION_WITHOUT_V=${inputs.image_tag#v} + echo "version_without_v=$VERSION_WITHOUT_V" >> $GITHUB_OUTPUT + fi + elif [ "${{ github.ref }}" = "refs/heads/main" ]; then + echo "tag=dev" >> $GITHUB_OUTPUT + echo "is_release=false" >> $GITHUB_OUTPUT + else + echo "tag=pr-${{ github.event.number }}" >> $GITHUB_OUTPUT + echo "is_release=false" >> $GITHUB_OUTPUT + fi + + - name: Extract metadata + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=sha,prefix=sha- + type=raw,value=${{ steps.tag.outputs.tag }},enable=${{ steps.tag.outputs.is_release == 'true' }} + type=raw,value=${{ steps.tag.outputs.version_without_v }},enable=${{ steps.tag.outputs.is_release == 'true' }} + type=raw,value=latest,enable=${{ steps.tag.outputs.is_release == 'true' }} + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Output image details + run: | + echo "Built and pushed images:" + echo "${{ steps.meta.outputs.tags }}" + echo "" + echo "Image digest:" + echo "${{ steps.meta.outputs.json }}" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fc67055..7dff981 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,68 +1,160 @@ -name: Release package +name: Release Docker Image on: push: tags: - - "v[0-9]+.[0-9]+.[0-9]+" # normal release - - "v[0-9]+.[0-9]+.[0-9]+rc[0-9]+" # release candidate - - "v[0-9]+.[0-9]+.[0-9]+[ab][0-9]+" # alpha or beta release + - 'v*' + workflow_dispatch: + inputs: + version: + description: 'Version to release (e.g., v1.0.0)' + required: true + type: string jobs: - build: - uses: ./.github/workflows/build.yml - - upload: + release: runs-on: ubuntu-latest - needs: build - outputs: - DO_GITHUB_RELEASE: ${{ steps.detect-release.outputs.DO_GITHUB_RELEASE }} + permissions: + contents: read + packages: write steps: - - name: Download Artifact - uses: actions/download-artifact@v4 + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Extract version from tag + id: version + run: | + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + VERSION="${{ github.event.inputs.version }}" + else + VERSION="${{ github.ref_name }}" + fi + # Remove 'v' prefix if present + VERSION=${VERSION#v} + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "tag=v$VERSION" >> $GITHUB_OUTPUT + echo "Released version: $VERSION" + + - name: Call Docker Build Workflow + id: docker-build + uses: actions/github-script@v7 with: - name: distributions - path: dist + script: | + const { data } = await github.rest.actions.createWorkflowDispatch({ + owner: context.repo.owner, + repo: context.repo.repo, + workflow_id: 'docker-build.yml', + ref: context.ref, + inputs: { + image_tag: '${{ steps.version.outputs.tag }}', + is_release: 'true' + } + }); + console.log('Docker build workflow triggered for release'); + return data; - - name: Publish package to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + - name: Wait for Docker build to complete + uses: actions/github-script@v7 with: - password: ${{ secrets.PYPI_API_TOKEN }} - - name: Detect release version - id: detect-release - run: | - do_github_release=$((echo "${GITHUB_REF}" | grep -Eq "^refs\/tags\/v[0-9]+\.[0-9]+\.[0-9]+(rc[0-9]+)?$") && echo 1 || echo 0) - echo DO_GITHUB_RELEASE=$do_github_release >> $GITHUB_OUTPUT - echo DO_GITHUB_RELEASE=$do_github_release + script: | + const maxWaitTime = 10 * 60 * 1000; // 10 minutes + const checkInterval = 30 * 1000; // 30 seconds + const startTime = Date.now(); - publish: - runs-on: ubuntu-latest - needs: upload - if: needs.upload.outputs.DO_GITHUB_RELEASE == '1' - permissions: - contents: write + console.log('Waiting for Docker build workflow to complete...'); - steps: - - name: Check out Git repository - uses: actions/checkout@v4 + while (Date.now() - startTime < maxWaitTime) { + // Get the most recent workflow runs for docker-build.yml + const { data: runs } = await github.rest.actions.listWorkflowRuns({ + owner: context.repo.owner, + repo: context.repo.repo, + workflow_id: 'docker-build.yml', + per_page: 10 + }); - - name: Download Build - uses: actions/download-artifact@v4 - with: - name: distributions - path: dist + // Find the run that was triggered by our dispatch + // Look for runs within the last 5 minutes that are workflow_dispatch events + const recentRuns = runs.filter(run => + run.event === 'workflow_dispatch' && + run.created_at > new Date(Date.now() - 5 * 60 * 1000).toISOString() + ); - - name: Detect prerelease + console.log(`Found ${recentRuns.length} recent workflow_dispatch runs`); + + // Find the most recent run that matches our criteria + const ourRun = recentRuns.find(run => { + // Check if this run has the expected inputs (is_release: true) + // We can't directly access inputs from the API, so we'll use heuristics + // Look for runs that are either in progress or recently completed + return run.status === 'in_progress' || + run.status === 'queued' || + (run.status === 'completed' && run.conclusion); + }); + + if (!ourRun) { + // If no active run found, check if there's a recently completed run + const completedRun = recentRuns.find(run => run.status === 'completed'); + if (completedRun) { + console.log(`Found completed run ${completedRun.id} with conclusion: ${completedRun.conclusion}`); + if (completedRun.conclusion === 'success') { + console.log('Docker build completed successfully'); + break; + } else { + throw new Error(`Docker build failed with conclusion: ${completedRun.conclusion}`); + } + } else { + console.log('No matching workflow run found, assuming completed'); + break; + } + } else { + console.log(`Workflow run ${ourRun.id} status: ${ourRun.status}, conclusion: ${ourRun.conclusion || 'pending'}`); + + if (ourRun.status === 'completed') { + if (ourRun.conclusion === 'success') { + console.log('Docker build completed successfully'); + break; + } else { + throw new Error(`Docker build failed with conclusion: ${ourRun.conclusion}`); + } + } + } + + console.log(`Waiting ${checkInterval / 1000} seconds before next check...`); + await new Promise(resolve => setTimeout(resolve, checkInterval)); + } + + if (Date.now() - startTime >= maxWaitTime) { + throw new Error('Timeout waiting for Docker build to complete'); + } + + - name: Create GitHub Release + if: github.event_name == 'push' run: | - do_prerelease=$((echo "${GITHUB_REF}" | grep -Eq "^refs\/tags\/v[0-9]+\.[0-9]+\.[0-9]+rc[0-9]+$") && echo 1 || echo 0) - echo DO_PRERELEASE=$do_prerelease >> $GITHUB_ENV - echo DO_PRERELEASE=$do_prerelease + gh release create "${{ steps.version.outputs.tag }}" \ + --title "Release ${{ steps.version.outputs.tag }}" \ + --notes "## Docker Image - - name: Attach artifacts to github release - uses: softprops/action-gh-release@v2 - with: - files: | - dist/*.whl - CHANGELOG.md - prerelease: ${{ env.DO_PRERELEASE == '1' }} - body_path: CHANGELOG.md + The Docker image has been built and pushed to GitHub Container Registry: + + \`\`\`bash + docker pull ghcr.io/${{ github.repository }}:${{ steps.version.outputs.tag }} + \`\`\` + + ### Available Tags + - \`${{ steps.version.outputs.tag }}\` - Specific version + - \`${{ steps.version.outputs.version }}\` - Version without 'v' prefix + - \`latest\` - Latest release (if this is the default branch) + + ### Multi-Architecture Support + This image supports both \`linux/amd64\` and \`linux/arm64\` architectures." \ + ${{ (contains(steps.version.outputs.tag, 'alpha') || contains(steps.version.outputs.tag, 'beta') || contains(steps.version.outputs.tag, 'rc')) && '--prerelease' || '' }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Output release details + run: | + echo "Release details:" + echo "Version: ${{ steps.version.outputs.version }}" + echo "Tag: ${{ steps.version.outputs.tag }}" + echo "Docker image: ghcr.io/${{ github.repository }}:${{ steps.version.outputs.tag }}" diff --git a/.gitignore b/.gitignore index b008d09..1d012d9 100644 --- a/.gitignore +++ b/.gitignore @@ -107,6 +107,7 @@ celerybeat.pid # Environments .env +.env.production .venv env/ venv/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 105ce54..dd94311 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,9 +5,23 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0) ## [Unreleased] -## [v3.0.0] - +### Added + +- **Docker Support**: Complete containerization with multi-architecture support (AMD64 + ARM64) +- **CI/CD Pipeline**: Automated GitHub Actions workflows for testing, building, and deployment +- **GitHub Container Registry**: Automated Docker image publishing to `ghcr.io/leon1995/fwtv` +- **Multi-Platform Testing**: Automated testing across Python 3.11, 3.12, 3.13 on Linux, Windows, and macOS +- **Release Automation**: Automated release creation with versioned Docker images +- **Development Docker Images**: Pre-built development images tagged as `dev` +- **Release Docker Images**: Versioned images with tags like `v1.0.0`, `1.0.0`, and `latest` + +### Changed -### Changed app to run in the browser using reflex +- **Deployment**: Added Docker-first deployment option for easier production setup +- **Documentation**: Comprehensive README update with Docker usage instructions +- **Workflow Optimization**: Optimized release pipeline to reuse Docker build workflow (DRY principle) +- **Build Process**: Enhanced build workflow to trigger Docker builds on main branch pushes +- Changed app to run in the browser using reflex ## [2.4.1] - 2025-07-07 diff --git a/Caddyfile b/Caddyfile new file mode 100644 index 0000000..3e08274 --- /dev/null +++ b/Caddyfile @@ -0,0 +1,14 @@ +http://:8080 # has to match the frontend port (exposed port by docker) + +encode gzip + +@backend_routes path /_event/* /ping /_upload /_upload/* +handle @backend_routes { + reverse_proxy localhost:8000 # has to match the backend port of the reflex server +} + +root * /srv +route { + try_files {path} {path} /404.html + file_server +} diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..b5be787 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,51 @@ +# This docker file is intended to be used with docker compose to deploy a production +# instance of a Reflex app. + +# Stage 1: builder +FROM ghcr.io/astral-sh/uv:debian AS builder + +# Copy local context to `/app` inside container (see .dockerignore) +WORKDIR /app +COPY . . + +# Create virtual environment and install dependencies +RUN uv venv +RUN uv sync --frozen + +ENV UV_NO_SYNC=1 + +# Deploy templates and prepare app +RUN uv run reflex init + +# Install pre-cached frontend dependencies (if exist) +RUN if [ -f .web/bun.lockb ]; then cd .web && ~/.local/share/reflex/bun/bin/bun install --frozen-lockfile; fi + +# Export static copy of frontend to /srv +RUN uv run reflex export --loglevel debug --frontend-only --no-zip && mv .web/build/client/* /srv/ && rm -rf .web + +# Stage 2: final image +FROM ghcr.io/astral-sh/uv:debian-slim +WORKDIR /app +ENV UV_NO_SYNC=1 + +# Install libpq-dev for psycopg (skip if not using postgres) +RUN apt-get update -y && apt-get install -y caddy libpq-dev && rm -rf /var/lib/apt/lists/* + +# Copy application and virtual environment from builder +COPY --from=builder /app /app +COPY --from=builder /srv /srv + +# Create data directories +RUN mkdir -p /app/data /app/uploaded_files + +ENV PYTHONUNBUFFERED=1 + +# Needed until Reflex properly passes SIGTERM on backend. +STOPSIGNAL SIGKILL + +RUN uv sync --frozen + +# has to match the port specified in the Caddyfile +EXPOSE 8080 + +CMD ["sh", "-c", "[ -d alembic ] && uv run reflex db migrate; caddy start && exec uv run reflex run --env prod --backend-only"] diff --git a/README.md b/README.md index 658f77f..3e5e667 100644 --- a/README.md +++ b/README.md @@ -1,28 +1,169 @@ -# FactorialHR work time verification +# FactorialHR Work Time Verification -![pyversions](https://img.shields.io/pypi/pyversions/fwtv) ![implementation](https://img.shields.io/pypi/implementation/fwtv) ![status](https://img.shields.io/pypi/status/fwtv) ![pypi](https://img.shields.io/pypi/v/fwtv) ![dpm](https://img.shields.io/pypi/dm/fwtv) +![pyversions](https://img.shields.io/pypi/pyversions/fwtv) ![implementation](https://img.shields.io/pypi/implementation/fwtv) ![status](https://img.shields.io/pypi/status/fwtv) ![pypi](https://img.shields.io/pypi/v/fwtv) ![dpm](https://img.shields.io/pypi/dm/fwtv) ![docker](https://img.shields.io/docker/pulls/ghcr.io/leon1995/fwtv) -This script verifies attendances whether they comply with german law. In particular, the following rules are verified: -- Whether the work time is longer than 6 hours without a break of 30 min -- Whether the work time is longer than 9 hours without a break of 45 min -- Whether the work time is longer than 10 hours without a break of 11 hours -- Whether the work time is within the time of 6am and 10pm +A web application built with [Reflex](https://reflex.dev) that verifies employee attendance records against German labor law requirements. The application integrates with FactorialHR's API to fetch attendance data and provides compliance checking with an intuitive web interface. + +## ๐ŸŽฏ Features + +- **German Labor Law Compliance**: Automatically verifies attendance against German work time regulations +- **FactorialHR Integration**: Seamless connection to FactorialHR API for data retrieval +- **Modern Web Interface**: Built with Reflex for a responsive, modern UI +- **Docker Support**: Containerized deployment with multi-architecture support +- **CI/CD Pipeline**: Automated testing, building, and deployment + +## ๐Ÿ“‹ Compliance Rules + +The application verifies the following German labor law requirements: + +- โฐ **6-hour rule**: Work time longer than 6 hours requires a 30-minute break +- โฐ **9-hour rule**: Work time longer than 9 hours requires a 45-minute break +- โฐ **10-hour rule**: Work time longer than 10 hours requires an 11-hour rest period +- ๐Ÿ•• **Time window**: Work time must be within 6:00 AM and 10:00 PM ![main_window](./docs/images/working_time_verification.png "Main Window") -## Disclaimer +## ๐Ÿš€ Quick Start + +### Option 1: Docker (Recommended) + +```bash +# Pull the latest development image +docker pull ghcr.io/leon1995/fwtv:dev + +# Run the container +docker run -p 8080:8080 \ + -e FACTORIAL_API_KEY=your_api_key \ + -e FACTORIAL_COMPANY_ID=your_company_id \ + ghcr.io/leon1995/fwtv:dev +``` + +### Option 2: Local Development + +```bash +# Clone the repository +git clone https://github.com/leon1995/fwtv.git +cd fwtv + +# Install dependencies +uv sync --frozen + +# Configure environment +cp .env.sample .env +# Edit .env with your FactorialHR credentials + +# Run the application +uv run reflex run --env prod +``` + +## ๐Ÿณ Docker Images + +The project provides pre-built Docker images for easy deployment: + +| Tag | Description | Usage | +|-----|-------------|-------| +| `dev` | Latest development build | `ghcr.io/leon1995/fwtv:dev` | +| `v1.0.0` | Specific release version | `ghcr.io/leon1995/fwtv:v1.0.0` | +| `latest` | Latest stable release | `ghcr.io/leon1995/fwtv:latest` | + +### Multi-Architecture Support + +All Docker images support both `linux/amd64` and `linux/arm64` architectures, making them compatible with: +- Intel/AMD x86_64 systems +- ARM64 systems (Apple Silicon, ARM servers) + +## โš™๏ธ Configuration + +### Environment Variables + +Create a `.env` file with the following variables from [`.env.sample`](.env.sample) + +### FactorialHR Setup + +1. Log in to your FactorialHR account +2. Navigate to Settings โ†’ API +3. Generate an API key +4. Note your Company ID from the URL or settings + +## ๐Ÿ—๏ธ CI/CD Pipeline + +The project includes automated CI/CD pipelines: + +### Development Pipeline +- **Triggers**: Push to main branch, pull requests +- **Actions**: Linting, testing, Docker image building +- **Output**: Development Docker images tagged as `dev` + +### Release Pipeline +- **Triggers**: Git tags (e.g., `v1.0.0`) +- **Actions**: Version extraction, Docker image building, GitHub release creation +- **Output**: Versioned Docker images and GitHub releases + +### Workflow Features +- โœ… Multi-architecture Docker builds (AMD64 + ARM64) +- โœ… Automated testing across Python 3.11, 3.12, 3.13 +- โœ… Cross-platform compatibility (Linux, Windows, macOS) +- โœ… GitHub Container Registry integration +- โœ… Automated release management + +## ๐Ÿงช Development + +### Prerequisites + +- Python 3.13 +- [uv](https://docs.astral.sh/uv/) package manager +- Docker (optional) + +### Setup Development Environment + +```bash +# Install development dependencies +uv sync --group dev + +# Run tests +uv run pytest + +# Run linting +uv run ruff check . +uv run ruff format . + +# Run pre-commit hooks +uv run pre-commit run --all-files +``` + +## ๐Ÿค Contributing + +We welcome contributions! Please follow these steps: + +1. **Fork** the repository +2. **Create** a feature branch (`git checkout -b feature/amazing-feature`) +3. **Install** development dependencies (`uv sync --group dev`) +4. **Make** your changes +5. **Run** tests (`uv run pytest`) +6. **Commit** your changes (`git commit -m 'Add amazing feature'`) +7. **Push** to the branch (`git push origin feature/amazing-feature`) +8. **Open** a Pull Request + +### Development Guidelines + +- Follow the existing code style (enforced by Ruff) +- Add tests for new functionality +- Update documentation as needed +- Ensure all CI checks pass + +## โš ๏ธ Disclaimer -I do not guarantee that this package complies with german law all the time. Changes may occur anytime. Use at your own risk. +**Important**: This application is provided for informational purposes only. While it implements German labor law requirements, I do not guarantee complete compliance with current regulations. Labor laws may change, and this tool should not be considered legal advice. -Errors where the time attended is 1 min above the limit are ignored, because factorial's automated time tracking is not precises enough. +**Use at your own risk**: Always consult with legal professionals for official compliance verification. -## Usage +## ๐Ÿ“„ License -- clone this repository -- install dependencies using `uv sync --frozen` -- copy [`.env.sample`](.env.sample) to `.env` and adjust the environment variables -- run app `uv run reflex run --env prod` +This project is licensed under the GNU Affero General Public License v3 - see the [LICENSE](LICENSE) file for details. -## Contributing +## ๐Ÿ”— Links -Feel free to contribute! Please fork this repository, install the development dependencies with `pip install -e ".[dev]"` and create pull request. +- **Homepage**: https://github.com/leon1995/fwtv +- **Bug Tracker**: https://github.com/leon1995/fwtv/issues +- **Changelog**: https://github.com/leon1995/fwtv/blob/main/CHANGELOG.md +- **Docker Hub**: https://github.com/leon1995/fwtv/pkgs/container/fwtv diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..601552e --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,29 @@ +services: + #db: + # image: postgres + # restart: unless-stopped + # environment: + # POSTGRES_PASSWORD: secret + # volumes: + # - postgres-data:/var/lib/postgresql/data + + redis: + image: redis + restart: unless-stopped + app: + build: + context: . + env_file: .env + environment: + #REFLEX_DB_URL: postgresql+psycopg://postgres:secret@db/postgres + REFLEX_REDIS_URL: redis://redis:6379 + ports: + - 8080:8080 + volumes: + - upload-data:/app/uploaded_files + restart: unless-stopped + +volumes: + #postgres-data: + # Uploaded files + upload-data: diff --git a/factorialhr_analysis/components/__init__.py b/factorialhr_analysis/components/__init__.py index ecffeac..9d8bbd9 100644 --- a/factorialhr_analysis/components/__init__.py +++ b/factorialhr_analysis/components/__init__.py @@ -1,4 +1,12 @@ +from factorialhr_analysis.components.authentication_component import requires_authentication from factorialhr_analysis.components.date_range_selector import date_inputs, date_range_picker +from factorialhr_analysis.components.footer import footer from factorialhr_analysis.components.navbar import navbar -__all__ = ['date_inputs', 'date_range_picker', 'navbar'] +__all__ = [ + 'date_inputs', + 'date_range_picker', + 'footer', + 'navbar', + 'requires_authentication', +] diff --git a/factorialhr_analysis/components/authentication_component.py b/factorialhr_analysis/components/authentication_component.py new file mode 100644 index 0000000..c7c23a8 --- /dev/null +++ b/factorialhr_analysis/components/authentication_component.py @@ -0,0 +1,31 @@ +"""Authentication component.""" + +import functools +from collections.abc import Callable + +import reflex as rx + +from factorialhr_analysis import states + + +def requires_authentication(page: Callable[[], rx.Component]) -> Callable[[], rx.Component]: + """Require authentication before rendering a page. + + If the user is not authenticated, then redirect to the login page. + """ + + @functools.wraps(page) + def protected_page() -> rx.Component: + return rx.fragment( + rx.cond( + states.OAuthSessionState.is_hydrated, + rx.cond( + states.OAuthSessionState.is_session_authenticated, + page(), + rx.spinner(on_mount=states.OAuthSessionState.redir), + ), + rx.text('hydrating states...'), + ) + ) + + return protected_page diff --git a/factorialhr_analysis/components/footer.py b/factorialhr_analysis/components/footer.py new file mode 100644 index 0000000..503470b --- /dev/null +++ b/factorialhr_analysis/components/footer.py @@ -0,0 +1,38 @@ +"""Footer component.""" + +import reflex as rx + +from factorialhr_analysis import states + + +def refresh_data() -> rx.Component: + """Refresh data button.""" + return rx.hstack( + rx.button( + rx.icon('refresh-ccw'), + on_click=states.DataState.refresh_data, + loading=states.DataState.is_loading, + border_radius='1em', + ), + rx.text( + 'Last data update: ', + rx.cond( + states.DataState.last_updated.is_not_none(), + rx.moment(states.DataState.last_updated, from_now=True), + 'Never', + ), + ), + align='center', + ) + + +def footer() -> rx.Component: + """Footer component.""" + return rx.el.footer( + refresh_data(), + position='fixed', + padding='0.5em', + bottom='0', + width='100%', + bg=rx.color('gray', 3), + ) diff --git a/factorialhr_analysis/components/navbar.py b/factorialhr_analysis/components/navbar.py index d361f43..030d0bf 100644 --- a/factorialhr_analysis/components/navbar.py +++ b/factorialhr_analysis/components/navbar.py @@ -3,27 +3,34 @@ import reflex as rx from reflex.style import color_mode, set_color_mode -from factorialhr_analysis import routes, state +from factorialhr_analysis import routes, states + + +class NavbarState(rx.State): + """State for the navigation bar.""" + + @rx.event + async def logout(self): + """Log out the user.""" + yield [states.OAuthSessionState.delete_session, states.DataState.clear, rx.redirect(routes.INDEX)] def dark_mode_toggle() -> rx.Component: """Toggle for dark/light mode.""" return rx.segmented_control.root( rx.segmented_control.item( - rx.icon(tag='monitor', size=20), + rx.icon(tag='monitor'), value='system', ), rx.segmented_control.item( - rx.icon(tag='sun', size=20), + rx.icon(tag='sun'), value='light', ), rx.segmented_control.item( - rx.icon(tag='moon', size=20), + rx.icon(tag='moon'), value='dark', ), on_change=set_color_mode, - variant='classic', - radius='large', value=color_mode, ) @@ -33,33 +40,69 @@ def navbar_link(text: str, url: str) -> rx.Component: return rx.link(rx.text(text, size='4', weight='medium'), href=url) +def refresh_data() -> rx.Component: + """Refresh data button.""" + return rx.hstack( + rx.button( + rx.icon('refresh-ccw'), + on_click=states.DataState.refresh_data, + loading=states.DataState.is_loading, + aria_label='Refresh data', + ), + rx.text( + 'Last data update: ', + rx.cond( + states.DataState.last_updated.is_not_none(), + rx.moment(states.DataState.last_updated, from_now=True), + 'Never', + ), + ), + align='center', + ) + + +def icon_menu() -> rx.Component: + """Icon menu.""" + return ( + rx.menu.root( + rx.menu.trigger( + rx.icon_button( + rx.icon('user'), + size='2', + radius='full', + ) + ), + rx.menu.content( + rx.menu.item( + rx.link( + rx.text('Log out'), + href=routes.INDEX, + on_click=NavbarState.logout, + ) + ), + ), + justify='end', + ), + ) + + def navbar() -> rx.Component: """Navigation bar component.""" return rx.box( rx.desktop_only( rx.hstack( - rx.heading('Factorialhr analysis', size='7', weight='bold'), rx.hstack( - navbar_link('Working time verification', '/#'), - spacing='5', + rx.link(rx.heading('Working time analysis', size='5', weight='bold'), href=routes.INDEX), + navbar_link('Verification', '/verification'), + navbar_link('Projects', '/projects'), + align_items='center', ), rx.hstack( - rx.menu.root( - rx.menu.trigger( - rx.icon_button( - rx.icon('user'), - size='2', - radius='full', - ) - ), - rx.menu.content( - rx.menu.item( - rx.link(rx.text('Log out'), href=routes.INDEX, on_click=state.LoginState.logout) - ), - ), - justify='end', - ), + refresh_data(), + rx.spacer(), + icon_menu(), dark_mode_toggle(), + justify='between', ), justify='between', align_items='center', @@ -67,5 +110,5 @@ def navbar() -> rx.Component: ), bg=rx.color('accent', 3), padding='1em', - width='100%', + top='0', ) diff --git a/factorialhr_analysis/constants.py b/factorialhr_analysis/constants.py new file mode 100644 index 0000000..8c05c9e --- /dev/null +++ b/factorialhr_analysis/constants.py @@ -0,0 +1,10 @@ +"""Constants for the application.""" + +import os + +CLIENT_ID: str = os.environ.get('FACTORIALHR_CLIENT_ID', '') +CLIENT_SECRET: str = os.environ.get('FACTORIALHR_CLIENT_SECRET', '') +REDIRECT_URI: str = os.environ.get('FACTORIALHR_REDIRECT_URI', '') +ENVIRONMENT_URL: str = os.environ.get('FACTORIALHR_ENVIRONMENT_URL', 'https://api.factorialhr.com') +API_KEY: str = os.environ.get('FACTORIALHR_API_KEY', '') +SCOPE = 'read' diff --git a/factorialhr_analysis/factorialhr_analysis.py b/factorialhr_analysis/factorialhr_analysis.py index ee402d3..54e6851 100644 --- a/factorialhr_analysis/factorialhr_analysis.py +++ b/factorialhr_analysis/factorialhr_analysis.py @@ -1,13 +1,31 @@ """Main app file for the FactorialHR Analysis application.""" -import dotenv +import logging + import reflex as rx from factorialhr_analysis import pages, routes -dotenv.load_dotenv() +logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) + + +def backend_exception_handler(exc: Exception) -> None: + """Handle backend exceptions.""" + logger = logging.getLogger(__name__) + logger.exception('Backend exception', exc_info=exc) + + +def frontend_exception_handler(exc: Exception) -> None: + """Handle frontend exceptions.""" + logger = logging.getLogger(__name__) + logger.exception('Frontend exception', exc_info=exc) app = rx.App() +# app.backend_exception_handler = backend_exception_handler # noqa: ERA001 +# app.frontend_exception_handler = frontend_exception_handler # noqa: ERA001 + app.add_page(pages.index_page, route=routes.INDEX) -app.add_page(pages.login_page, route=routes.LOGIN_ROUTE) +app.add_page(pages.working_time_verification_page, route=routes.VERIFICATION_ROUTE) +app.add_page(pages.authorize_oauth_page, route=routes.OAUTH_AUTHORIZE_ROUTE) +app.add_page(pages.start_oauth_process, route=routes.OAUTH_START_ROUTE) diff --git a/factorialhr_analysis/pages/__init__.py b/factorialhr_analysis/pages/__init__.py index 70e8f6b..808e599 100644 --- a/factorialhr_analysis/pages/__init__.py +++ b/factorialhr_analysis/pages/__init__.py @@ -1,4 +1,5 @@ -from factorialhr_analysis.pages.index import index_page -from factorialhr_analysis.pages.login import login_page +from factorialhr_analysis.pages.index_page import index_page +from factorialhr_analysis.pages.oauth_page import authorize_oauth_page, start_oauth_process +from factorialhr_analysis.pages.working_time_verification_page import working_time_verification_page -__all__ = ['index_page', 'login_page'] +__all__ = ['authorize_oauth_page', 'index_page', 'start_oauth_process', 'working_time_verification_page'] diff --git a/factorialhr_analysis/pages/index.py b/factorialhr_analysis/pages/index.py deleted file mode 100644 index f32f55f..0000000 --- a/factorialhr_analysis/pages/index.py +++ /dev/null @@ -1,478 +0,0 @@ -"""The main page of the app.""" - -import asyncio -import collections -import csv -import datetime -import io -import os -import typing -from collections.abc import Container, Sequence - -import anyio.from_thread -import factorialhr -import reflex as rx -from reflex.utils.prerequisites import get_app - -from factorialhr_analysis import state, templates, working_time_verification - - -def time_to_moment(time_: datetime.time | None) -> rx.MomentDelta: - """Convert a datetime.time to a rx.MomentDelta.""" - if time_ is None: - return rx.MomentDelta() - return rx.MomentDelta(hours=time_.hour, minutes=time_.minute, seconds=time_.second) - - -class Attendance(typing.TypedDict): - """TypedDict for attendance.""" - - date: datetime.date - clock_in: rx.MomentDelta - clock_out: rx.MomentDelta - minutes: rx.MomentDelta - - -class ErrorToShow(typing.TypedDict): - """TypedDict for errors to show.""" - - name: str - affected_days: str - error: str - cumulated_break: datetime.timedelta - cumulated_attendance: datetime.timedelta - - attendances: Sequence[Attendance] - - -class DataState(rx.State): - """State holding all the data.""" - - _shifts: collections.defaultdict[int, list[factorialhr.AttendanceShift]] = collections.defaultdict( # noqa: RUF012 - list - ) # employee id as key - _employees: list[factorialhr.Employee] = [] # noqa: RUF012 - _employee_team_name_mapping: dict[int, list[str]] = {} # noqa: RUF012 - calculated_errors: list[working_time_verification.Error] = [] # noqa: RUF012 - errors_to_show: list[ErrorToShow] = [] # noqa: RUF012 - start_date: str = '' - end_date: str = '' - is_loading: bool = False # Guard flag - tolerance: str = '' - processed_employees: int = 0 # Number of employees processed so far - - filter_value: str = '' # Placeholder for search functionality - - selected_error_ids: list[int] = [] # noqa: RUF012 - - @rx.var - def date_error(self) -> bool: - """Check if the end date is before the start date.""" - if not self.start_date or not self.end_date: - return False - return datetime.date.fromisoformat(self.end_date) < datetime.date.fromisoformat(self.start_date) - - @rx.var - def disable_submit(self) -> bool: - """Disable the submit button if there is a date error.""" - return self.date_error or not self.start_date or not self.end_date - - def _should_cancel(self) -> bool: - """Check if the current session is still valid.""" - return self.router.session.client_token not in get_app().app.event_namespace.token_to_sid - - def _cleanup(self): - """Cleanup method to reset state.""" - self._shifts = collections.defaultdict(list) - self.processed_employees = 0 - self.errors_to_show = [] - self._employees = [] - self._employee_team_name_mapping = {} - self.selected_error_ids = [] - self.is_loading = False - - @rx.var - def length_of_employees(self) -> int: - """Get the length of employees.""" - return len(self._employees) - - async def _handle_employee( - self, client: factorialhr.ApiClient, employee: factorialhr.Employee, teams: Sequence[factorialhr.Team] - ): - """Handle fetching shifts for an employee.""" - async with self: - self._employees.append(employee) - self._employee_team_name_mapping[employee.id] = [ - t.name for t in teams if t.employee_ids and employee.id in t.employee_ids - ] - shifts = await factorialhr.ShiftsEndpoint(client).all( - params={'start_on': self.start_date, 'end_on': self.end_date, 'employee_ids[]': [employee.id]}, - timeout=60, - ) - async with self: - for shift in shifts.data(): - if self._should_cancel(): - self._cleanup() - return - self._shifts[employee.id].append(shift) - - @rx.event(background=True) - async def handle_submit(self, form_data: dict): - """Fetch employees and teams data.""" - async with self: - if self.is_loading: - return - - self.start_date = form_data.get('start_date') - self.end_date = form_data.get('end_date') - self.is_loading = True - self._shifts = collections.defaultdict(list) - self.processed_employees = 0 - self.errors_to_show = [] - self._employees = [] - self._employee_team_name_mapping = {} - self.selected_error_ids = [] - yield # Send initial state update to frontend - - try: - async with self: - api_session = (await self.get_state(state.LoginState)).get_auth() - # API calls outside async with block - async with factorialhr.ApiClient(os.environ['FACTORIALHR_ENVIRONMENT_URL'], auth=api_session) as client: - employees = await factorialhr.EmployeesEndpoint(client).all() - teams = list((await factorialhr.TeamsEndpoint(client).all()).data()) - async with anyio.from_thread.create_task_group() as tg: - for employee in employees.data(): - tg.start_soon(self._handle_employee, client, employee, teams) - - async with self: - yield DataState.fill_errors_to_show # set is_loading to false - - except asyncio.CancelledError: - # Handle cancellation when page is reloaded/closed - async with self: - self._cleanup() - raise - except Exception: - # Handle other errors - async with self: - self._cleanup() - raise - - @rx.event - async def fill_errors_to_show(self): - """Fill the errors_to_show list based on the fetched data.""" - self.selected_error_ids = [] - self.errors_to_show = [] - self.processed_employees = 0 - self.is_loading = True - yield - tolerance = datetime.timedelta(minutes=int(self.tolerance) if self.tolerance.isdigit() else 0) - value = self.filter_value.lower() - for employee in self._employees: - teams = self._employee_team_name_mapping.get(employee.id, []) - if value in employee.full_name.lower() or any(value in team.lower() for team in teams): - for error in working_time_verification.get_error(self._shifts[employee.id], tolerance=tolerance): - if self._should_cancel(): - self._cleanup() - return - self.errors_to_show.append( - ErrorToShow( - name=employee.full_name, - affected_days=', '.join(str(d) for d in error.days_affected), - error=error.reason, - cumulated_break=error.break_time, - cumulated_attendance=error.time_attended, - attendances=[ - Attendance( - date=a.date, - clock_in=time_to_moment(a.clock_in) if a.clock_in is not None else None, - clock_out=time_to_moment(a.clock_out) if a.clock_out is not None else None, - minutes=rx.MomentDelta(minutes=a.minutes), - ) - for a in error.attendances - ], - ) - ) - yield - self.processed_employees += 1 - self.is_loading = False - - @rx.event - def filter_employees(self, value: str): - """Filter employees based on the search value.""" - self.filter_value = value - yield DataState.fill_errors_to_show - - @rx.event - def set_tolerance(self, value: str): - """Set the tolerance value.""" - if value == '' or value.isdigit(): - self.tolerance = value - yield DataState.fill_errors_to_show - - @rx.event - def select_row(self, index: int): - """Handle row selection.""" - if index in self.selected_error_ids: - self.selected_error_ids.remove(index) - else: - self.selected_error_ids.append(index) - - def _convert_to_csv(self, indices: Container[int]) -> str: - # Create a string buffer to hold the CSV data - with io.StringIO() as output: - writer = csv.DictWriter( - output, fieldnames=['Name', 'Affected Days', 'Cumulated Break', 'Cumulated Attendance', 'Error'] - ) - writer.writeheader() - for index, error in enumerate(self.errors_to_show): - if index in indices: - writer.writerow( - { - 'Name': error['name'], - 'Affected Days': error['affected_days'], - 'Cumulated Break': error['cumulated_break'], - 'Cumulated Attendance': error['cumulated_attendance'], - 'Error': error['error'], - } - ) - - # Get the CSV data as a string - return output.getvalue() - - @rx.event - def download(self, data: str): - """Download the given data as a CSV file.""" - file_name = ( - f'{self.start_date}-{self.end_date}_errors.csv' if self.start_date and self.end_date else 'errors.csv' - ) - yield rx.download( - data=data, - filename=file_name, - ) - - @rx.event - def download_all_errors(self): - """Download all errors as a CSV file.""" - csv_data = self._convert_to_csv(range(len(self.errors_to_show))) - yield self.download(csv_data) - - @rx.event - def download_selected_errors(self): - """Download selected errors as a CSV file.""" - csv_data = self._convert_to_csv(self.selected_error_ids) - yield self.download(csv_data) - - -def render_input() -> rx.Component: - """Render the date input form.""" - return rx.form( - rx.hstack( - rx.text('Start date'), - rx.input( - type='date', - name='start_date', - value=DataState.start_date, - on_change=DataState.set_start_date, - ), - rx.text('End date'), - rx.input( - type='date', - name='end_date', - value=DataState.end_date, - on_change=DataState.set_end_date, - ), - rx.button( - 'Submit', - type='submit', - loading=DataState.is_loading, - disabled=DataState.disable_submit, - ), - rx.cond( - DataState.date_error, - rx.text('End date must be after start date', color='red'), - ), - spacing='3', - align='center', - width='100%', - ), - on_submit=DataState.handle_submit, - width='100%', - ) - - -def render_export_buttons() -> rx.Component: - """Render the export buttons.""" - return rx.hstack( - rx.button( - 'Export Selected', - disabled=DataState.selected_error_ids.length() == 0, - on_click=DataState.download_selected_errors, - ), - rx.button( - 'Export All', disabled=DataState.errors_to_show.length() == 0, on_click=DataState.download_all_errors - ), - justify='center', - align='center', - width='100%', - ) - - -def render_search() -> rx.Component: - """Render the search input.""" - return rx.hstack( - rx.text('Search'), - rx.input( - value=DataState.filter_value, - on_change=DataState.filter_employees, - width='100%', - placeholder='Filter by name or team', - ), - width='50%', - align='center', - ) - - -def render_tolerance_input() -> rx.Component: - """Render the tolerance input.""" - return rx.hstack( - rx.text('Tolerance'), - rx.input( - placeholder='Minutes', - type='number', - value=DataState.tolerance, - on_change=DataState.set_tolerance, - width='100%', - regex=r'^\d*$', - min=0, - ), - width='25%', - align='center', - ) - - -def render_filters() -> rx.Component: - """Render the filters section.""" - return rx.hstack( - render_tolerance_input(), - render_search(), - width='100%', - align='center', - justify='end', - ) - - -def show_employee(error: rx.Var[ErrorToShow], index: int) -> rx.Component: - """Show a customer in a table row.""" - return rx.table.row( - rx.table.cell(error['name']), - rx.table.cell(error['affected_days']), - rx.table.cell(error['cumulated_break']), - rx.table.cell(error['cumulated_attendance']), - rx.table.cell(error['error'], align='left'), - rx.table.cell( - rx.alert_dialog.root( - rx.alert_dialog.trigger( - rx.icon_button('info'), - ), - rx.alert_dialog.content( - rx.alert_dialog.title('Relevant attendance records'), - rx.inset( - rx.table.root( - rx.table.header( - rx.table.row( - rx.table.column_header_cell('Date'), - rx.table.column_header_cell('Clock in'), - rx.table.column_header_cell('Clock out'), - rx.table.column_header_cell('Hours attended'), - ), - ), - rx.table.body( - rx.foreach( - error['attendances'], - lambda x: rx.table.row( - rx.table.cell(rx.moment(x['date'], format='YYYY-MM-DD')), - rx.table.cell( - rx.cond( - x['clock_in'].is_none(), - None, - rx.moment(x['date'], add=x['clock_in'], format='HH:mm'), - ) - ), - rx.table.cell( - rx.cond( - x['clock_out'].is_none(), - None, - rx.moment(x['date'], add=x['clock_out'], format='HH:mm'), - ) - ), - rx.table.cell(rx.moment(x['date'], add=x['minutes'], format='HH:mm')), - ), - ) - ), - ), - side='x', - margin_top='24px', - margin_bottom='24px', - ), - rx.flex( - rx.alert_dialog.cancel( - rx.button( - 'Close', - variant='soft', - ), - ), - justify='end', - ), - ), - ), - align='right', - ), - on_click=lambda: DataState.select_row(index), - background_color=rx.cond(DataState.selected_error_ids.contains(index), rx.color('blue', 3), 'transparent'), - ) - - -def render_table() -> rx.Component: - """Render the main table showing errors.""" - return rx.table.root( - rx.table.header( - rx.table.row( - rx.table.column_header_cell('Name', min_width='17.5%', max_width='17.5%'), - rx.table.column_header_cell('Affected Days', min_width='15%', max_width='15%'), - rx.table.column_header_cell('Cumulated Break', min_width='12.5%', max_width='12.5%'), - rx.table.column_header_cell('Cumulated Attendance', min_width='12.5%', max_width='12.5%'), - rx.table.column_header_cell('Error', min_width='40%', max_width='40%'), - rx.table.column_header_cell('Records', align='right', min_width='2.5%', max_width='2.5%'), - ), - ), - rx.table.body( - rx.foreach( - DataState.errors_to_show, - show_employee, - ) - ), - width='100%', - ) - - -def live_progress() -> rx.Component: - """Show a live progress bar when loading data.""" - return rx.cond( - ~DataState.is_loading, - rx.fragment(), - rx.progress(value=DataState.processed_employees, max=DataState.length_of_employees), - ) - - -@templates.template -@state.redirect_for_login -def index_page() -> rx.Component: - """Index page of the app.""" - return rx.vstack( - rx.hstack(render_input(), render_export_buttons(), render_filters(), justify='between', width='100%'), - live_progress(), - render_table(), - width='100%', - ) diff --git a/factorialhr_analysis/pages/index_page.py b/factorialhr_analysis/pages/index_page.py new file mode 100644 index 0000000..f6e303b --- /dev/null +++ b/factorialhr_analysis/pages/index_page.py @@ -0,0 +1,42 @@ +"""Index page of the application.""" + +import reflex as rx + +from factorialhr_analysis import states, templates + + +class IndexState(rx.State): + """State for the index page.""" + + is_loading: rx.Field[bool] = rx.field(default=False) + + +@templates.template +def index_page() -> rx.Component: + """Index page of the application.""" + return rx.vstack( + rx.heading('Welcome to FactorialHR Analysis', size='4'), + rx.hstack( + rx.button( + rx.icon('refresh-ccw'), + on_click=states.DataState.refresh_data, + loading=states.DataState.is_loading, + border_radius='1em', + ), + rx.text( + 'Last data update: ', + rx.cond( + states.DataState.last_updated.is_not_none(), + rx.moment(states.DataState.last_updated, from_now=True), + 'Never', + ), + ), + align='center', + border_radius='1em', + border='1px solid', + padding='0.5em', + ), + rx.text('loaded shifts:', states.DataState.len_of_shifts), + bg=rx.color('accent'), + align='center', + ) diff --git a/factorialhr_analysis/pages/login.py b/factorialhr_analysis/pages/login.py deleted file mode 100644 index 8b29739..0000000 --- a/factorialhr_analysis/pages/login.py +++ /dev/null @@ -1,133 +0,0 @@ -"""Login page.""" - -import functools -import typing - -import httpx -import reflex as rx - -from factorialhr_analysis import routes, state, templates - - -class LoginFormState(rx.State): - """State for the login form.""" - - user_entered_authorization_code: str - auth_code_invalid: bool = False - auth_code_invalid_message: str - - @rx.var - def authorization_code_empty(self) -> bool: - """Check if the authorization code input is empty.""" - return not self.user_entered_authorization_code.strip() - - @rx.var - def authorization_invalid(self) -> bool: - """Check if the authorization code is invalid.""" - return self.auth_code_invalid - - @rx.var - def authorization_code_invalid_message(self) -> str: - """Get the authorization code invalid message.""" - return self.auth_code_invalid_message - - @rx.event - async def handle_submit(self, form_data: dict[str, typing.Any]): - """Handle form submission.""" - self.auth_code_invalid = False - login_state = await self.get_state(state.LoginState) - try: - await login_state.login(form_data.get('authorization_code'), grant_type='authorization_code') - yield rx.redirect(routes.INDEX) - except httpx.HTTPStatusError as e: - self.auth_code_invalid = True - self.auth_code_invalid_message = f'Login failed with status code: {e.response.status_code}' - - -def authorization_code_form() -> rx.Component: - """Form for the authorization code input.""" - return rx.form.root( - rx.form.field( - rx.vstack( - rx.form.label( - 'Authorization Code', - size='3', - weight='medium', - text_align='left', - width='100%', - ), - rx.form.control( - rx.input( - name='authorization_code', - size='3', - width='100%', - on_change=LoginFormState.set_user_entered_authorization_code, - ), - as_child=True, - ), - rx.cond( - LoginFormState.authorization_invalid, - rx.form.message( - LoginFormState.authorization_code_invalid_message, - color='var(--red-11)', - ), - ), - rx.button( - 'Sign in', size='2', width='100%', type='submit', disabled=LoginFormState.authorization_code_empty - ), - spacing='2', - width='100%', - ), - name='authorization_code', - server_invalid=LoginFormState.auth_code_invalid, - ), - on_submit=LoginFormState.handle_submit, - reset_on_submit=True, - ) - - -def redirect_if_authenticated(page: rx.app.ComponentCallable) -> rx.app.ComponentCallable: - """Redirect authenticated users away from login page.""" - - @functools.wraps(page) - def login_page_wrapper() -> rx.Component: - return rx.cond( - state.LoginState.is_hydrated, - rx.cond( - state.LoginState.is_authenticated, - rx.spinner(on_mount=state.LoginState.redir), - page(), # Show login form if not authenticated - ), - rx.spinner(), - ) - - return login_page_wrapper - - -@templates.template -@redirect_if_authenticated -def login_page() -> rx.Component: - """Login page.""" - return rx.center( - rx.card( - rx.vstack( - rx.center( - rx.heading( - 'Login to FactorialHR', - size='6', - as_='h2', - text_align='center', - width='100%', - ), - direction='column', - spacing='5', - width='100%', - ), - authorization_code_form(), - ), - size='4', - max_width='28em', - width='100%', - ), - height='100vh', - ) diff --git a/factorialhr_analysis/pages/oauth_page.py b/factorialhr_analysis/pages/oauth_page.py new file mode 100644 index 0000000..fb6e86e --- /dev/null +++ b/factorialhr_analysis/pages/oauth_page.py @@ -0,0 +1,99 @@ +"""Pages for the OAuth process.""" + +import functools +import logging +import secrets +import urllib.parse +from collections.abc import Callable + +import httpx +import reflex as rx + +from factorialhr_analysis import constants, states + + +class OAuthProcessState(rx.State): + """State to handle OAuth token processing.""" + + error: rx.Field[str | None] = rx.field(default=None) + expected_state: rx.Field[str | None] = rx.field(default=None) + + @rx.event + async def start_oauth_process(self): + """Redirect to the OAuth authorization URL.""" + if not self.expected_state: + self.expected_state = secrets.token_urlsafe(16) + auth_url = ( + f'{constants.ENVIRONMENT_URL}/oauth/authorize?' + f'response_type=code&' + f'client_id={constants.CLIENT_ID}&' + f'redirect_uri={urllib.parse.quote(constants.REDIRECT_URI)}&' + f'scope={constants.SCOPE}&' + f'state={self.expected_state}' + ) + yield rx.redirect(auth_url) + + @rx.event + async def process_oauth_response(self): + """Process the OAuth response to exchange code for an access token.""" + expected_state = self.router.url.query_parameters.get('state') + if not expected_state: + self.error = 'State is missing.' + self.expected_state = None + return + if self.expected_state != expected_state: + self.error = f'State mismatch error. Expected {self.expected_state} but got {expected_state}.' + self.expected_state = None + return + code = self.router.url.query_parameters.get('code') + if not code: + self.error = 'Authorization code is missing.' + self.expected_state = None + return + oauth_session = await self.get_state(states.OAuthSessionState) + try: + await oauth_session.create_session(code, grant_type='authorization_code') + except (httpx.RequestError, httpx.HTTPStatusError) as e: + logging.getLogger(__name__).exception('error creating oauth session') + self.error = str(e) + else: + logging.getLogger(__name__).info('created oauth session') + self.error = '' + yield states.DataState.refresh_data + # Redirect to the main page after successful authentication + yield states.OAuthSessionState.redir + finally: + self.expected_state = '' + + +def redirect_if_authenticated(page: Callable[[], rx.Component]) -> Callable[[], rx.Component]: + """Redirect authenticated users away from login page.""" + + @functools.wraps(page) + def login_page_wrapper() -> rx.Component: + return rx.cond( + states.OAuthSessionState.is_hydrated, + rx.cond( + states.OAuthSessionState.is_session_authenticated, + rx.fragment(on_mount=states.OAuthSessionState.redir), + page(), + ), + rx.spinner(), + ) + + return login_page_wrapper + + +@redirect_if_authenticated +def start_oauth_process() -> rx.Component: + """Page to start the OAuth process.""" + return rx.text('Redirecting to factorialhr...', on_mount=OAuthProcessState.start_oauth_process) + + +def authorize_oauth_page() -> rx.Component: + """Page to authorize the OAuth process.""" + return rx.box( + rx.text('Validating response...'), + rx.text(OAuthProcessState.error, color='red'), + on_mount=OAuthProcessState.process_oauth_response, + ) diff --git a/factorialhr_analysis/pages/working_time_verification_page.py b/factorialhr_analysis/pages/working_time_verification_page.py new file mode 100644 index 0000000..04277a4 --- /dev/null +++ b/factorialhr_analysis/pages/working_time_verification_page.py @@ -0,0 +1,541 @@ +"""The main page of the app.""" + +import csv +import datetime +import io +import logging +import typing +from collections.abc import Container, Iterable, Sequence + +import anyio.from_thread +import factorialhr +import reflex as rx +from reflex.utils.prerequisites import get_app + +from factorialhr_analysis import components, states, templates, working_time_verification + + +class SettingsState(rx.State): + """State for managing verification settings.""" + + _start_date: datetime.date | None = None + _end_date: datetime.date | None = None + _tolerance: datetime.timedelta | None = None + + only_active: rx.Field[bool] = rx.field(default=True) + + @rx.var + def start_date(self) -> str: + """Get the start date as a string.""" + if self._start_date is None: + return '' + return self._start_date.isoformat() + + @rx.var + def end_date(self) -> str: + """Get the end date as a string.""" + if self._end_date is None: + return '' + return self._end_date.isoformat() + + @rx.var + def tolerance(self) -> str: + """Get the tolerance value.""" + return str(int(self._tolerance.total_seconds() / 60)) if self._tolerance is not None else '' + + @rx.event + def set_tolerance(self, value: str): + """Set the tolerance value.""" + if value.isdigit(): + self._tolerance = datetime.timedelta(minutes=int(value)) + else: + self._tolerance = None + + @rx.event + def set_start_date(self, date: str): + """Set the start date.""" + self._start_date = datetime.date.fromisoformat(date) + + @rx.event + def set_end_date(self, date: str): + """Set the end date.""" + self._end_date = datetime.date.fromisoformat(date) + + @rx.var + def date_error(self) -> bool: + """Check if the end date is before the start date.""" + if not self._start_date or not self._end_date: + return False + return self._end_date < self._start_date + + @rx.event + def set_only_active(self, active: bool): # noqa: FBT001 + """Set whether to only include active employees.""" + self.only_active = active + + +def time_to_moment(time_: datetime.time | None) -> rx.MomentDelta: + """Convert a datetime.time to a rx.MomentDelta. + + Args: + time_: The time to convert, or None for empty delta. + + Returns: + A MomentDelta representing the time. + + """ + if time_ is None: + return rx.MomentDelta() + return rx.MomentDelta(hours=time_.hour, minutes=time_.minute, seconds=time_.second) + + +class Attendance(typing.TypedDict): + """TypedDict for attendance.""" + + date: datetime.date + clock_in: rx.MomentDelta + clock_out: rx.MomentDelta + minutes: rx.MomentDelta + + +class ErrorToShow(typing.TypedDict): + """TypedDict for errors to show.""" + + name: str + team_names: Iterable[str] + affected_days: str + error: str + cumulated_break: datetime.timedelta + cumulated_attendance: datetime.timedelta + + attendances: Sequence[Attendance] + + +def _filter_error(filter_value: str, error: ErrorToShow) -> bool: + """Filter error based on name or team names. + + Args: + filter_value: The filter string to search for. + error: The error to check against the filter. + + Returns: + True if the error matches the filter, False otherwise. + + """ + return filter_value in error['name'].lower() or any( + filter_value in team_name.lower() for team_name in error['team_names'] + ) + + +class DataStateDeprecated(rx.State): + """State holding all the data for working time verification.""" + + errors_to_show: rx.Field[list[ErrorToShow]] = rx.field(default_factory=list) + _calculated_errors: list[ErrorToShow] = [] # noqa: RUF012 + is_loading: rx.Field[bool] = rx.field(default=False) + processed_employees: rx.Field[int] = rx.field(0) # Number of employees processed so far + total_amount_of_employees: rx.Field[int] = rx.field(0) + + filter_value: rx.Field[str] = rx.field('') # Placeholder for search functionality + + selected_error_ids: rx.Field[list[int]] = rx.field(default_factory=list) + + def _should_cancel(self) -> bool: + """Check if the current session is still valid.""" + return self.router.session.client_token not in get_app().app.event_namespace.token_to_sid + + async def _handle_single_employee( + self, + employee: factorialhr.Employee, + teams: Sequence[factorialhr.Team], + shifts: Sequence[factorialhr.AttendanceShift], + tolerance: datetime.timedelta | None, + ): + """Handle a single employee.""" + for error in working_time_verification.get_error( + filter(lambda x: x.employee_id == employee.id, shifts), tolerance=tolerance + ): + async with self: + error_to_show = ErrorToShow( + name=employee.full_name, + team_names=[ + team.name + for team in teams + if team.employee_ids is not None and employee.id in team.employee_ids + ], + affected_days=', '.join(str(d) for d in error.days_affected), + error=error.reason, + cumulated_break=error.break_time, + cumulated_attendance=error.time_attended, + attendances=[ + Attendance( + date=a.date, + clock_in=time_to_moment(a.clock_in) if a.clock_in is not None else None, + clock_out=time_to_moment(a.clock_out) if a.clock_out is not None else None, + minutes=rx.MomentDelta(minutes=a.minutes), + ) + for a in error.attendances + ], + ) + self._calculated_errors.append(error_to_show) + async with self: + self.processed_employees += 1 + + @rx.event(background=True) + async def calculate_errors(self): + """Calculate errors based on the shifts.""" + async with self: + if self.is_loading: + return + self.is_loading = True + self.selected_error_ids.clear() + self.errors_to_show.clear() + self._calculated_errors.clear() + self.processed_employees = 0 + + # Get states once and store references + data_state = await self.get_state(states.DataState) + settings_state = await self.get_state(SettingsState) + + if settings_state._start_date is None or settings_state._end_date is None: # noqa: SLF001 + return + + # Filter employees and shifts outside of async context for better performance + employees = [ + employee + for employee in data_state._employees.values() # noqa: SLF001 + if not settings_state.only_active or employee.active + ] + + shifts = [ + shift + for shift in data_state._shifts.values() # noqa: SLF001 + if settings_state._start_date <= shift.date <= settings_state._end_date # noqa: SLF001 + ] + + # Update total count + async with self: + self.total_amount_of_employees = len(employees) + + # Process employees concurrently with proper error handling + try: + async with anyio.from_thread.create_task_group() as tg: + for employee in employees: + tg.start_soon( + self._handle_single_employee, + employee, + data_state._teams.values(), # noqa: SLF001 + shifts, + settings_state._tolerance, # noqa: SLF001 + ) + except ExceptionGroup as e: + # Log error and reset loading state + logging.getLogger(__name__).exception('error calculating errors', exc_info=e) + async with self: + self.is_loading = False + return + + # Apply filtering + async with self: + if not self.filter_value: + self.errors_to_show = self._calculated_errors[:] + else: + self.errors_to_show = [ + error for error in self._calculated_errors if _filter_error(self.filter_value.lower(), error) + ] + self.is_loading = False + + @rx.event + def set_filter_value(self, value: str): + """Filter employees based on the search value.""" + self.filter_value = value + self.errors_to_show.clear() + if not value: + self.errors_to_show = self._calculated_errors[:] + return + for error in self._calculated_errors: + if _filter_error(value.lower(), error): + self.errors_to_show.append(error) + yield + + @rx.event + def select_row(self, index: int): + """Handle row selection.""" + if index in self.selected_error_ids: + self.selected_error_ids.remove(index) + else: + self.selected_error_ids.append(index) + + def _convert_to_csv(self, indices: Container[int]) -> str: + # Create a string buffer to hold the CSV data + with io.StringIO() as output: + writer = csv.DictWriter( + output, fieldnames=['Name', 'Affected Days', 'Cumulated Break', 'Cumulated Attendance', 'Error'] + ) + writer.writeheader() + for index, error in enumerate(self.errors_to_show): + if index in indices: + writer.writerow( + { + 'Name': error['name'], + 'Affected Days': error['affected_days'], + 'Cumulated Break': error['cumulated_break'], + 'Cumulated Attendance': error['cumulated_attendance'], + 'Error': error['error'], + } + ) + + # Get the CSV data as a string + return output.getvalue() + + async def _file_name(self) -> str: + settings_state = await self.get_state(SettingsState) + return ( + f'{settings_state.start_date}-{settings_state.end_date}_errors.csv' + if settings_state.start_date and settings_state.end_date + else 'errors.csv' + ) + + @rx.event + async def download_all_errors(self): + """Download all errors as a CSV file.""" + csv_data = self._convert_to_csv(range(len(self.errors_to_show))) + yield rx.download( + data=csv_data, + filename=await self._file_name(), + ) + + @rx.event + async def download_selected_errors(self): + """Download selected errors as a CSV file.""" + csv_data = self._convert_to_csv(self.selected_error_ids) + yield rx.download( + data=csv_data, + filename=await self._file_name(), + ) + + +@rx.memo +def render_input() -> rx.Component: + """Render the date input form.""" + return rx.hstack( + rx.hstack( # Group "Start date" and its input + rx.text('Start date'), + rx.input( + type='date', + name='start_date', + value=SettingsState.start_date, + on_change=SettingsState.set_start_date, + ), + align='center', + spacing='1', + min_width='max-content', + ), + rx.hstack( # Group "End date" and its input + rx.text('End date'), + rx.input( + type='date', + name='end_date', + value=SettingsState.end_date, + on_change=SettingsState.set_end_date, + ), + align='center', + spacing='1', + min_width='max-content', + ), + rx.hstack( + rx.text('Only active'), + rx.checkbox(default_checked=SettingsState.only_active, on_change=SettingsState.set_only_active), + align='center', + min_width='max-content', + spacing='1', + ), + rx.hstack( + rx.text('Tolerance'), + rx.input( + placeholder='Minutes', + type='number', + value=SettingsState.tolerance, + on_change=SettingsState.set_tolerance, + width='100%', + regex=r'^\d*$', + min=0, + ), + align='center', + spacing='1', + min_width='max-content', + ), + rx.cond( + SettingsState.date_error, + rx.tooltip( + rx.button('Submit', disabled=True), + content='End date must be after start date.', + ), + rx.button( + 'Submit', + loading=DataStateDeprecated.is_loading, + on_click=DataStateDeprecated.calculate_errors, + ), + ), + spacing='3', + align='center', + width='100%', + ) + + +@rx.memo +def render_export_buttons() -> rx.Component: + """Render the export buttons.""" + return rx.hstack( + rx.button( + 'Export Selected', + disabled=DataStateDeprecated.selected_error_ids.length() == 0, + on_click=DataStateDeprecated.download_selected_errors, + ), + rx.button( + 'Export All', + disabled=DataStateDeprecated.errors_to_show.length() == 0, + on_click=DataStateDeprecated.download_all_errors, + ), + justify='center', + align='center', + width='100%', + ) + + +@rx.memo +def render_search() -> rx.Component: + """Render the search input.""" + return rx.hstack( + rx.text('Search'), + rx.input( + value=DataStateDeprecated.filter_value, + on_change=DataStateDeprecated.set_filter_value, + width='100%', + placeholder='Filter by name or team', + disabled=DataStateDeprecated.is_loading, + ), + width='50%', + align='center', + ) + + +def show_employee(error: rx.Var[ErrorToShow], index: int) -> rx.Component: + """Show a customer in a table row.""" + return rx.table.row( + rx.table.cell(error['name']), + rx.table.cell(error['affected_days']), + rx.table.cell(error['cumulated_break']), + rx.table.cell(error['cumulated_attendance']), + rx.table.cell(error['error'], align='left'), + rx.table.cell( + rx.alert_dialog.root( + rx.alert_dialog.trigger( + rx.icon_button('info'), + ), + rx.alert_dialog.content( + rx.alert_dialog.title('Relevant attendance records'), + rx.inset( + rx.table.root( + rx.table.header( + rx.table.row( + rx.table.column_header_cell('Date'), + rx.table.column_header_cell('Clock in'), + rx.table.column_header_cell('Clock out'), + rx.table.column_header_cell('Hours attended'), + ), + ), + rx.table.body( + rx.foreach( + error['attendances'], + lambda x: rx.table.row( + rx.table.cell(rx.moment(x['date'], format='YYYY-MM-DD')), + rx.table.cell( + rx.cond( + x['clock_in'].is_none(), + None, + rx.moment(x['date'], add=x['clock_in'], format='HH:mm'), + ) + ), + rx.table.cell( + rx.cond( + x['clock_out'].is_none(), + None, + rx.moment(x['date'], add=x['clock_out'], format='HH:mm'), + ) + ), + rx.table.cell(rx.moment(x['date'], add=x['minutes'], format='HH:mm')), + ), + ) + ), + ), + side='x', + margin_top='24px', + margin_bottom='24px', + ), + rx.flex( + rx.alert_dialog.cancel( + rx.button( + 'Close', + variant='soft', + ), + ), + justify='end', + ), + ), + ), + align='right', + ), + on_click=lambda: DataStateDeprecated.select_row(index), + background_color=rx.cond( + DataStateDeprecated.selected_error_ids.contains(index), rx.color('blue', 3), 'transparent' + ), + ) + + +def render_table() -> rx.Component: + """Render the main table showing errors.""" + return rx.table.root( + rx.table.header( + rx.table.row( + rx.table.column_header_cell('Name', min_width='17.5%', max_width='17.5%'), + rx.table.column_header_cell('Affected Days', min_width='15%', max_width='15%'), + rx.table.column_header_cell('Cumulated Break', min_width='12.5%', max_width='12.5%'), + rx.table.column_header_cell('Cumulated Attendance', min_width='12.5%', max_width='12.5%'), + rx.table.column_header_cell('Error', min_width='40%', max_width='40%'), + rx.table.column_header_cell('Records', align='right', min_width='2.5%', max_width='2.5%'), + ), + ), + rx.table.body( + rx.foreach( + DataStateDeprecated.errors_to_show, + show_employee, + ) + ), + width='100%', + ) + + +@rx.memo +def live_progress() -> rx.Component: + """Show a live progress bar when loading data.""" + return rx.cond( + ~DataStateDeprecated.is_loading, + rx.fragment(), + rx.progress( + value=DataStateDeprecated.processed_employees, + max=DataStateDeprecated.total_amount_of_employees, + ), + ) + + +@components.requires_authentication +@templates.template +def working_time_verification_page() -> rx.Component: + """Index page of the app.""" + return rx.vstack( + rx.hstack(render_input(), render_export_buttons(), render_search(), justify='between', width='100%'), + live_progress(), + render_table(), + width='100%', + ) diff --git a/factorialhr_analysis/routes.py b/factorialhr_analysis/routes.py index 13abc2f..441eb52 100644 --- a/factorialhr_analysis/routes.py +++ b/factorialhr_analysis/routes.py @@ -1,4 +1,6 @@ """Routes for the application.""" INDEX = '/' -LOGIN_ROUTE = '/login' +OAUTH_START_ROUTE = '/oauth/start' +OAUTH_AUTHORIZE_ROUTE = '/oauth/authorize' +VERIFICATION_ROUTE = '/verification' diff --git a/factorialhr_analysis/state.py b/factorialhr_analysis/state.py deleted file mode 100644 index 96dc35f..0000000 --- a/factorialhr_analysis/state.py +++ /dev/null @@ -1,182 +0,0 @@ -"""Login state.""" - -import functools -import logging -import os -import time -import typing - -import factorialhr -import httpx -import pydantic -import reflex as rx - -from factorialhr_analysis import routes - -# Set up logging -logging.basicConfig(level=logging.WARNING) -logger = logging.getLogger(__name__) - - -class ApiSession(pydantic.BaseModel): - """Wrapper class for the API session cookie.""" - - access_token: str - refresh_token: str - created_at: int - token_type: str - - def access_token_expiration(self) -> int: - """Get the expiration date of the access token.""" - return self.created_at + 60 * 60 # access token is valid for 1 hour - - def is_access_token_expired(self) -> bool: - """Determine whether the access token is expired or not.""" - return self.access_token_expiration() <= time.time() - - def refresh_token_expiration(self) -> int: - """Get the expiration date of the refresh token.""" - return self.created_at + 7 * 24 * 60 * 60 # refresh token is valid for 1 week - - def is_refresh_token_expired(self) -> bool: - """Determine whether the refresh token is expired or not.""" - return self.refresh_token_expiration() <= time.time() - - -class LoginState(rx.State): - """State for managing login and authentication.""" - - api_session_cookie: str = rx.Cookie( - name='api_session', - same_site='strict', - ) - redirect_to: str = '' - - def get_auth(self) -> factorialhr.AccessTokenAuth | factorialhr.ApiKeyAuth: - """Get the authentication object for the API session.""" - if api_token := os.environ.get('FACTORIALHR_API_KEY'): - return factorialhr.ApiKeyAuth(api_key=api_token) - api_session = self.api_session() - if api_session is None: - msg = 'api_session_cookie must be valid' - raise RuntimeError(msg) - return factorialhr.AccessTokenAuth( - access_token=api_session.access_token, - token_type=api_session.token_type, - ) - - def api_session(self) -> ApiSession | None: - """Get the API session from the cookie.""" - if not self.api_session_cookie: - return None - try: - return ApiSession.model_validate_json(self.api_session_cookie) - except pydantic.ValidationError as e: - logger.exception('parsing cookie failed', exc_info=e) - return None - - @rx.event - async def login(self, token: str, *, grant_type: typing.Literal['refresh_token', 'authorization_code']) -> None: - """Log in to the API and store the session cookie.""" - if grant_type == 'refresh_token': - data = { - 'client_id': os.environ['FACTORIALHR_CLIENT_ID'], - 'client_secret': os.environ['FACTORIALHR_CLIENT_SECRET'], - 'grant_type': 'refresh_token', - 'refresh_token': token, - } - else: - data = { - 'client_id': os.environ['FACTORIALHR_CLIENT_ID'], - 'client_secret': os.environ['FACTORIALHR_CLIENT_SECRET'], - 'code': token, - 'grant_type': 'authorization_code', - 'redirect_uri': os.environ['FACTORIALHR_REDIRECT_URI'], - } - try: - async with httpx.AsyncClient() as client: - response = await client.post( - os.environ['FACTORIALHR_ENVIRONMENT_URL'] + '/oauth/token', - data=data, - ) - response.raise_for_status() - except httpx.HTTPStatusError as e: - logger.exception('Login failed', exc_info=e) - raise - else: - self.api_session_cookie = ApiSession(**response.json()).model_dump_json() - logger.info('Refreshed access token') - - @rx.var(cache=False) - async def is_authenticated(self) -> bool: - """Check if the user is authenticated.""" - if os.environ.get('FACTORIALHR_API_KEY'): - return True # If using API key, always authenticated - api_session = self.api_session() - if api_session is None: - return False - return not api_session.is_access_token_expired() - - @rx.event - async def refresh(self) -> bool: - """Check if the user is authenticated.""" - if not self.is_hydrated: - return False - api_session = self.api_session() - if api_session is None: - return False - if api_session.is_refresh_token_expired(): - return False - if api_session.is_access_token_expired(): - try: - await self.login(token=api_session.refresh_token, grant_type='refresh_token') - except httpx.HTTPStatusError: - return False - return True - - @rx.event - async def redir(self): - """Redirect to the redirect_to route if logged in, or to the login page if not.""" - if not self.is_hydrated: - yield self.redir() - page = self.router.url.path - is_authenticated = await self.is_authenticated - if not is_authenticated: - is_authenticated = await self.refresh() - if not is_authenticated: - self.redirect_to = page - return_value = [] - if not self.api_session_cookie: - return_value.append(rx.remove_cookie('api_session')) - if page != routes.LOGIN_ROUTE: - yield [*return_value, rx.redirect(routes.LOGIN_ROUTE)] - if is_authenticated and page == routes.LOGIN_ROUTE: - yield rx.redirect(self.redirect_to or '/') - - @rx.event - def logout(self): - """Log out the user.""" - yield [rx.remove_cookie('api_session'), rx.redirect(routes.LOGIN_ROUTE)] - - -def redirect_for_login(page: rx.app.ComponentCallable) -> rx.app.ComponentCallable: - """Require authentication before rendering a page. - - If the user is not authenticated, then redirect to the login page. - """ - - @functools.wraps(page) - def protected_page() -> rx.Component: - return rx.fragment( - rx.cond( - LoginState.is_hydrated, - rx.cond( - LoginState.is_authenticated, - page(), - rx.spinner(on_mount=LoginState.redir), - ), - rx.spinner(), # Show spinner while hydrating - ) - ) - - return protected_page diff --git a/factorialhr_analysis/states/__init__.py b/factorialhr_analysis/states/__init__.py new file mode 100644 index 0000000..262be16 --- /dev/null +++ b/factorialhr_analysis/states/__init__.py @@ -0,0 +1,4 @@ +from factorialhr_analysis.states.data_state import DataState +from factorialhr_analysis.states.oauth_state import OAuthSessionState + +__all__ = ['DataState', 'OAuthSessionState'] diff --git a/factorialhr_analysis/states/data_state.py b/factorialhr_analysis/states/data_state.py new file mode 100644 index 0000000..ee47193 --- /dev/null +++ b/factorialhr_analysis/states/data_state.py @@ -0,0 +1,95 @@ +"""State for managing data.""" + +import datetime +import logging + +import anyio +import factorialhr +import reflex as rx + +from factorialhr_analysis import constants, states + + +class DataState(rx.State): + """State for managing data.""" + + _employees: dict[int, factorialhr.Employee] = {} # noqa: RUF012 + _teams: dict[int, factorialhr.Team] = {} # noqa: RUF012 + _shifts: dict[int, factorialhr.AttendanceShift] = {} # noqa: RUF012 + _credentials: factorialhr.Credentials | None = None + + is_loading: rx.Field[bool] = rx.field(default=False) + last_updated: rx.Field[datetime.datetime | None] = rx.field(default=None) + + @rx.var + def len_of_shifts(self) -> int: + """Get the number of shifts.""" + return len(self._shifts) + + async def _load_employees(self, api_client: factorialhr.ApiClient): + employees = await factorialhr.EmployeesEndpoint(api_client).all() + async with self: + self._employees = {emp.id: emp for emp in employees.data()} + + async def _load_teams(self, api_client: factorialhr.ApiClient): + teams = await factorialhr.TeamsEndpoint(api_client).all() + async with self: + self._teams = {team.id: team for team in teams.data()} + + async def _load_shifts(self, api_client: factorialhr.ApiClient): + # all shifts are obtained in a single page and therefore requires a high timeout + shifts = await factorialhr.ShiftsEndpoint(api_client).all(timeout=100) + async with self: + self._shifts = {shift.id: shift for shift in shifts.data()} + + async def _load_credentials(self, api_client: factorialhr.ApiClient): + credentials = await factorialhr.CredentialsEndpoint(api_client).all() + async with self: + self._credentials = next(iter(credentials.data()), None) + + @rx.event + async def refresh_data(self): # noqa: ANN201 + """Refresh the data.""" + self.clear() + if constants.API_KEY: + return DataState.poll_data + auth_state = await self.get_state(states.OAuthSessionState) + if await auth_state.refresh_session(): + return DataState.poll_data + return states.OAuthSessionState.redir + + @rx.event(background=True) + async def poll_data(self): + """Poll the data.""" + async with self: + if self.is_loading: + return + self.is_loading = True + auth = (await self.get_state(states.OAuthSessionState)).get_auth() + try: + async with ( + factorialhr.ApiClient(constants.ENVIRONMENT_URL, auth=auth) as client, # pyright: ignore[reportArgumentType] + anyio.create_task_group() as tg, + ): + tg.start_soon(self._load_teams, client) + tg.start_soon(self._load_employees, client) + tg.start_soon(self._load_shifts, client) + tg.start_soon(self._load_credentials, client) + except Exception: + logging.getLogger(__name__).exception('error loading data') + raise + finally: + async with self: + self.is_loading = False + async with self: + self.last_updated = datetime.datetime.now(tz=datetime.UTC) + logging.getLogger(__name__).info('data loaded') + + @rx.event + def clear(self): + """Clear the data.""" + self.last_updated = None + self._employees.clear() + self._teams.clear() + self._shifts.clear() + self._credentials = None diff --git a/factorialhr_analysis/states/oauth_state.py b/factorialhr_analysis/states/oauth_state.py new file mode 100644 index 0000000..994d86a --- /dev/null +++ b/factorialhr_analysis/states/oauth_state.py @@ -0,0 +1,146 @@ +"""State for managing OAuth session and authentication.""" + +import os +import time +import typing + +import factorialhr +import httpx +import pydantic +import reflex as rx + +from factorialhr_analysis import constants, routes + + +class ApiSession(pydantic.BaseModel): + """Wrapper class for the API session cookie.""" + + access_token: str + refresh_token: str + created_at: int + token_type: str + + def access_token_expiration(self) -> int: + """Get the expiration date of the access token.""" + return self.created_at + 60 * 60 # access token is valid for 1 hour + + def is_access_token_expired(self) -> bool: + """Determine whether the access token is expired or not.""" + return self.access_token_expiration() <= time.time() + + def refresh_token_expiration(self) -> int: + """Get the expiration date of the refresh token.""" + return self.created_at + 7 * 24 * 60 * 60 # refresh token is valid for 1 week + + def is_refresh_token_expired(self) -> bool: + """Determine whether the refresh token is expired or not.""" + return self.refresh_token_expiration() <= time.time() + + +class OAuthSessionState(rx.State): + """State for managing OAuth session and authentication.""" + + api_session_cookie: str = rx.Cookie( + name='api_session', + same_site='strict', + secure=os.environ.get('REFLEX_ENV_MODE') == rx.constants.Env.PROD.value, + max_age=7 * 24 * 60 * 60, + ) + _redirect_to: str = '' + + def api_session(self) -> ApiSession | None: + """Get the API session from the cookie.""" + if not self.api_session_cookie: + return None + try: + return ApiSession.model_validate_json(self.api_session_cookie) + except pydantic.ValidationError: + return None + + @rx.event + async def create_session(self, token: str, grant_type: typing.Literal['refresh_token', 'authorization_code']): + """Log in to the API and store the session cookie.""" + data = { + 'client_id': constants.CLIENT_ID, + 'client_secret': constants.CLIENT_SECRET, + } + if grant_type == 'refresh_token': + data.update( + { + 'grant_type': 'refresh_token', + 'refresh_token': token, + } + ) + else: + data.update( + { + 'code': token, + 'grant_type': 'authorization_code', + 'redirect_uri': constants.REDIRECT_URI, + } + ) + async with httpx.AsyncClient() as client: + response = await client.post( + f'{constants.ENVIRONMENT_URL}/oauth/token', + data=data, + ) + response.raise_for_status() + self.api_session_cookie = ApiSession(**response.json()).model_dump_json() + + @rx.event + def delete_session(self): + """Log out the user.""" + yield rx.remove_cookie('api_session') + + @rx.event + async def refresh_session(self) -> bool: + """Refresh the access token if it is expired.""" + api_session = self.api_session() + if api_session is None: + return False + if api_session.is_refresh_token_expired(): + return False + if api_session.is_access_token_expired(): + try: + await self.create_session(token=api_session.refresh_token, grant_type='refresh_token') + except (httpx.RequestError, httpx.HTTPError): + return False + return True + + @rx.var(cache=False) + async def is_session_authenticated(self) -> bool: + """Check if the user is authenticated.""" + if constants.API_KEY: + return True + api_session = self.api_session() + if api_session is None: + return False + return not api_session.is_access_token_expired() + + @rx.event + async def redir(self): + """Redirect to the redirect_to route if logged in, or to the login page if not.""" + if not self.is_hydrated: + yield self.redir() + page = self.router.url.path + is_authenticated = await self.is_session_authenticated + if not is_authenticated: + is_authenticated = await self.refresh_session() + if not is_authenticated and page != routes.OAUTH_START_ROUTE: + self._redirect_to = page + yield rx.redirect(routes.OAUTH_START_ROUTE) + if is_authenticated and page in (routes.OAUTH_START_ROUTE, routes.OAUTH_AUTHORIZE_ROUTE): + yield rx.redirect(self._redirect_to or routes.INDEX) + + def get_auth(self) -> factorialhr.AccessTokenAuth | factorialhr.ApiKeyAuth: + """Get the authentication object for the API session.""" + if constants.API_KEY: + return factorialhr.ApiKeyAuth(api_key=constants.API_KEY) + api_session = self.api_session() + if api_session is None: + msg = 'api_session_cookie must be valid' + raise ValueError(msg) + return factorialhr.AccessTokenAuth( + access_token=api_session.access_token, + token_type=api_session.token_type, + ) diff --git a/factorialhr_analysis/templates.py b/factorialhr_analysis/templates.py index 8377d9c..4fbc3fd 100644 --- a/factorialhr_analysis/templates.py +++ b/factorialhr_analysis/templates.py @@ -1,18 +1,28 @@ """Templates for the web application.""" +import functools +from collections.abc import Callable + import reflex as rx from factorialhr_analysis import components -def template(page: rx.app.ComponentCallable) -> rx.app.ComponentCallable: +def template(page: Callable[[], rx.Component]) -> Callable[[], rx.Component]: """Wrap a page in the main template.""" - return rx.fragment( - components.navbar(), - rx.box( - page(), - padding_top='1em', # Space between navbar and content - padding_left='1em', - padding_right='1em', - ), - ) + + @functools.wraps(page) + def page_template() -> rx.Component: + return rx.fragment( + components.navbar(), + rx.box( + page(), + padding_top='1em', # Space between navbar and content + padding_left='1em', + padding_right='1em', + ), + components.footer(), + width='100%', + ) + + return page_template diff --git a/factorialhr_analysis/working_time_verification/verification.py b/factorialhr_analysis/working_time_verification/verification.py index 0987973..9e5a51b 100644 --- a/factorialhr_analysis/working_time_verification/verification.py +++ b/factorialhr_analysis/working_time_verification/verification.py @@ -71,13 +71,15 @@ def check_breaks_and_reset( def get_error( - attendances: Iterable[factorialhr.AttendanceShift], tolerance: datetime.timedelta + attendances: Iterable[factorialhr.AttendanceShift], + tolerance: datetime.timedelta | None = None, ) -> Iterator[helper.Error]: """Verification function. Iterates over attendances and yields any errors found. Splits logic into smaller helper functions for clarity and maintainability. """ + tolerance = tolerance or datetime.timedelta() current_attendances: list[factorialhr.AttendanceShift] = [] for attendance in attendances: # Validate clock-in/clock-out times diff --git a/pyproject.toml b/pyproject.toml index c5af558..2a38cb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,8 @@ dependencies = [ "dotenv>=0.9.9", "factorialhr>=4.1.0", "httpx>=0.28.1", - "reflex==0.8.7", + "redis>=6.4.0", + "reflex==0.8.9", ] [dependency-groups] @@ -43,6 +44,7 @@ test = [ "pytest-cov>=5.0.0", ] dev = [ + "pyright>=1.1.405", "ruff>=0.12.9", { include-group = "test" }, ] diff --git a/rxconfig.py b/rxconfig.py index 8b3e4ef..79ea5cc 100644 --- a/rxconfig.py +++ b/rxconfig.py @@ -8,4 +8,7 @@ rx.plugins.SitemapPlugin(), rx.plugins.TailwindV4Plugin(), ], + telemetry_enabled=False, + env_file='.env', # ignored if not found + show_built_with_reflex=False, ) diff --git a/uv.lock b/uv.lock index 9738477..42372ad 100644 --- a/uv.lock +++ b/uv.lock @@ -141,11 +141,13 @@ dependencies = [ { name = "dotenv" }, { name = "factorialhr" }, { name = "httpx" }, + { name = "redis" }, { name = "reflex" }, ] [package.dev-dependencies] dev = [ + { name = "pyright" }, { name = "pytest" }, { name = "pytest-cov" }, { name = "pytest-html" }, @@ -163,11 +165,13 @@ requires-dist = [ { name = "dotenv", specifier = ">=0.9.9" }, { name = "factorialhr", specifier = ">=4.1.0" }, { name = "httpx", specifier = ">=0.28.1" }, - { name = "reflex", specifier = "==0.8.7" }, + { name = "redis", specifier = ">=6.4.0" }, + { name = "reflex", specifier = "==0.8.9" }, ] [package.metadata.requires-dev] dev = [ + { name = "pyright", specifier = ">=1.1.405" }, { name = "pytest", specifier = ">=8.4.1" }, { name = "pytest-cov", specifier = ">=5.0.0" }, { name = "pytest-html", specifier = ">=4" }, @@ -358,6 +362,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -447,6 +460,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pyright" +version = "1.1.405" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/6c/ba4bbee22e76af700ea593a1d8701e3225080956753bee9750dcc25e2649/pyright-1.1.405.tar.gz", hash = "sha256:5c2a30e1037af27eb463a1cc0b9f6d65fec48478ccf092c1ac28385a15c55763", size = 4068319, upload-time = "2025-09-04T03:37:06.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/1a/524f832e1ff1962a22a1accc775ca7b143ba2e9f5924bb6749dce566784a/pyright-1.1.405-py3-none-any.whl", hash = "sha256:a2cb13700b5508ce8e5d4546034cb7ea4aedb60215c6c33f56cec7f53996035a", size = 5905038, upload-time = "2025-09-04T03:37:04.913Z" }, +] + [[package]] name = "pytest" version = "8.4.1" @@ -557,14 +583,13 @@ wheels = [ [[package]] name = "reflex" -version = "0.8.7" +version = "0.8.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alembic" }, { name = "click" }, { name = "granian", extra = ["reload"] }, { name = "httpx" }, - { name = "jinja2" }, { name = "packaging" }, { name = "platformdirs" }, { name = "psutil", marker = "sys_platform == 'win32'" }, @@ -579,9 +604,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/3a/d9b54b0f5021ec99f28af5753275fc279d43b43b30fa5f7d71ecc3d3f80c/reflex-0.8.7.tar.gz", hash = "sha256:42af70890ea817e520e3c7a5dd0d94a56d43e393ff777dbec4253bc0b6bcac1c", size = 587068, upload-time = "2025-08-18T19:37:21.705Z" } +sdist = { url = "https://files.pythonhosted.org/packages/60/61/5a9dc5a79f919e9a8b75e1d09f80f848469d6a723f2e8555dfc82e9ca500/reflex-0.8.9.tar.gz", hash = "sha256:c9c7c4d23770269e7e2ca04e19d106ffe6d0e5dacc5dc0b5f830958f5b79687e", size = 597102, upload-time = "2025-09-02T20:30:56.563Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/14/b25f38370d96017ed02e5548012d898166d7e8da7bc0ee63bee04da2faea/reflex-0.8.7-py3-none-any.whl", hash = "sha256:ac0e3b56ec11a5d97d2cdec1c8aa2aaee501597a02a12381d62a2918ce6783a1", size = 883153, upload-time = "2025-08-18T19:37:19.433Z" }, + { url = "https://files.pythonhosted.org/packages/18/2c/5f62538dfe482c0847585d71adcccc17d3ab0b2b79b47cb11b58214b8c9d/reflex-0.8.9-py3-none-any.whl", hash = "sha256:244b06078acf60d81515e89835eba9d25f981bd4bd4537fcbca18aac1b0c6135", size = 885247, upload-time = "2025-09-02T20:30:54.683Z" }, ] [[package]]