From de03aa0a72ca3ee64c8180db4cd63885407f0933 Mon Sep 17 00:00:00 2001 From: Leon Date: Fri, 5 Sep 2025 16:42:23 +0300 Subject: [PATCH 01/24] reworked oauth implementation for better structuring --- factorialhr_analysis/components/__init__.py | 3 +- .../components/authentication_component.py | 28 +++ factorialhr_analysis/components/navbar.py | 8 +- factorialhr_analysis/constants.py | 11 ++ factorialhr_analysis/factorialhr_analysis.py | 7 +- factorialhr_analysis/pages/__init__.py | 7 +- factorialhr_analysis/pages/index_page.py | 36 ++++ factorialhr_analysis/pages/login.py | 133 ------------- factorialhr_analysis/pages/oauth_page.py | 82 ++++++++ ...x.py => working_time_verification_page.py} | 8 +- factorialhr_analysis/routes.py | 4 +- factorialhr_analysis/state.py | 182 ------------------ factorialhr_analysis/states/__init__.py | 3 + factorialhr_analysis/states/oauth_state.py | 137 +++++++++++++ factorialhr_analysis/templates.py | 25 ++- 15 files changed, 337 insertions(+), 337 deletions(-) create mode 100644 factorialhr_analysis/components/authentication_component.py create mode 100644 factorialhr_analysis/constants.py create mode 100644 factorialhr_analysis/pages/index_page.py delete mode 100644 factorialhr_analysis/pages/login.py create mode 100644 factorialhr_analysis/pages/oauth_page.py rename factorialhr_analysis/pages/{index.py => working_time_verification_page.py} (98%) delete mode 100644 factorialhr_analysis/state.py create mode 100644 factorialhr_analysis/states/__init__.py create mode 100644 factorialhr_analysis/states/oauth_state.py diff --git a/factorialhr_analysis/components/__init__.py b/factorialhr_analysis/components/__init__.py index ecffeac..43fca57 100644 --- a/factorialhr_analysis/components/__init__.py +++ b/factorialhr_analysis/components/__init__.py @@ -1,4 +1,5 @@ +from factorialhr_analysis.components.authentication_component import requires_authentication from factorialhr_analysis.components.date_range_selector import date_inputs, date_range_picker from factorialhr_analysis.components.navbar import navbar -__all__ = ['date_inputs', 'date_range_picker', 'navbar'] +__all__ = ['date_inputs', 'date_range_picker', 'navbar', 'requires_authentication'] diff --git a/factorialhr_analysis/components/authentication_component.py b/factorialhr_analysis/components/authentication_component.py new file mode 100644 index 0000000..e0a485c --- /dev/null +++ b/factorialhr_analysis/components/authentication_component.py @@ -0,0 +1,28 @@ +import functools + +import reflex as rx + +from factorialhr_analysis import states + + +def requires_authentication(page: rx.app.ComponentCallable) -> rx.app.ComponentCallable: + """Require authentication before rendering a page. + + If the user is not authenticated, then redirect to the login page. + """ + + @functools.wraps(page) + def protected_page() -> rx.Component: + return rx.fragment( + rx.cond( + states.OAuthSessionState.is_hydrated, + rx.cond( + states.OAuthSessionState.is_session_authenticated, + page(), + rx.spinner(on_mount=states.OAuthSessionState.redir), + ), + rx.text('hydrating states...'), + ) + ) + + return protected_page diff --git a/factorialhr_analysis/components/navbar.py b/factorialhr_analysis/components/navbar.py index d361f43..c9f74ab 100644 --- a/factorialhr_analysis/components/navbar.py +++ b/factorialhr_analysis/components/navbar.py @@ -3,7 +3,7 @@ import reflex as rx from reflex.style import color_mode, set_color_mode -from factorialhr_analysis import routes, state +from factorialhr_analysis import routes, states def dark_mode_toggle() -> rx.Component: @@ -54,7 +54,11 @@ def navbar() -> rx.Component: ), rx.menu.content( rx.menu.item( - rx.link(rx.text('Log out'), href=routes.INDEX, on_click=state.LoginState.logout) + rx.link( + rx.text('Log out'), + href=routes.INDEX, + on_click=states.OAuthSessionState.delete_session, + ) ), ), justify='end', diff --git a/factorialhr_analysis/constants.py b/factorialhr_analysis/constants.py new file mode 100644 index 0000000..55def21 --- /dev/null +++ b/factorialhr_analysis/constants.py @@ -0,0 +1,11 @@ +import os +import dotenv + +dotenv.load_dotenv() + +CLIENT_ID: str = os.environ.get('FACTORIALHR_CLIENT_ID', '') +CLIENT_SECRET: str = os.environ.get('FACTORIALHR_CLIENT_SECRET', '') +REDIRECT_URI: str = os.environ.get('FACTORIALHR_REDIRECT_URI', '') +ENVIRONMENT_URL: str = os.environ.get('FACTORIALHR_ENVIRONMENT_URL', '') +API_KEY: str = os.environ.get('FACTORIALHR_API_KEY', '') +SCOPE = 'read' diff --git a/factorialhr_analysis/factorialhr_analysis.py b/factorialhr_analysis/factorialhr_analysis.py index ee402d3..6e523f5 100644 --- a/factorialhr_analysis/factorialhr_analysis.py +++ b/factorialhr_analysis/factorialhr_analysis.py @@ -5,9 +5,12 @@ from factorialhr_analysis import pages, routes -dotenv.load_dotenv() + +# TODO: check if env variables in constants have been set app = rx.App() app.add_page(pages.index_page, route=routes.INDEX) -app.add_page(pages.login_page, route=routes.LOGIN_ROUTE) +app.add_page(pages.working_time_verification_page, route=routes.WORKING_TIME_VERIFICATION_ROUTE) +app.add_page(pages.authorize_oauth_page, route=routes.OAUTH_AUTHORIZE_ROUTE) +app.add_page(pages.start_oauth_process, route=routes.OAUTH_START_ROUTE) diff --git a/factorialhr_analysis/pages/__init__.py b/factorialhr_analysis/pages/__init__.py index 70e8f6b..f30cdfe 100644 --- a/factorialhr_analysis/pages/__init__.py +++ b/factorialhr_analysis/pages/__init__.py @@ -1,4 +1,5 @@ -from factorialhr_analysis.pages.index import index_page -from factorialhr_analysis.pages.login import login_page +from factorialhr_analysis.pages.working_time_verification_page import working_time_verification_page +from factorialhr_analysis.pages.oauth_page import authorize_oauth_page, start_oauth_process +from factorialhr_analysis.pages.index_page import index_page -__all__ = ['index_page', 'login_page'] +__all__ = ['working_time_verification_page', 'authorize_oauth_page', 'start_oauth_process', 'index_page'] diff --git a/factorialhr_analysis/pages/index_page.py b/factorialhr_analysis/pages/index_page.py new file mode 100644 index 0000000..81f83fe --- /dev/null +++ b/factorialhr_analysis/pages/index_page.py @@ -0,0 +1,36 @@ +import logging + +import factorialhr +import reflex as rx +from factorialhr_analysis import templates, constants, states + + +class IndexState(rx.State): + """State for the index page.""" + + @rx.event + async def get_credentials(self): + oauth_state = await self.get_state(states.OAuthSessionState) + async with factorialhr.ApiClient(base_url=constants.ENVIRONMENT_URL, auth=oauth_state.get_auth()) as api_client: + logging.getLogger(__name__).error(await factorialhr.CredentialsEndpoint(api_client).get()) + +@templates.template +def index_page() -> rx.Component: + """The index page of the application.""" + return rx.center( + rx.vstack( + rx.heading('Welcome to FactorialHR Analysis', size='4'), + rx.text('Analyze your FactorialHR data with ease.'), + rx.button( + 'Get Started', + as_='a', + href='/start-oauth', + color_scheme='teal', + size='2', + mt='4', + on_click=IndexState.get_credentials, + ), + ), + height='100vh', + bg='gray.50', + ) diff --git a/factorialhr_analysis/pages/login.py b/factorialhr_analysis/pages/login.py deleted file mode 100644 index 8b29739..0000000 --- a/factorialhr_analysis/pages/login.py +++ /dev/null @@ -1,133 +0,0 @@ -"""Login page.""" - -import functools -import typing - -import httpx -import reflex as rx - -from factorialhr_analysis import routes, state, templates - - -class LoginFormState(rx.State): - """State for the login form.""" - - user_entered_authorization_code: str - auth_code_invalid: bool = False - auth_code_invalid_message: str - - @rx.var - def authorization_code_empty(self) -> bool: - """Check if the authorization code input is empty.""" - return not self.user_entered_authorization_code.strip() - - @rx.var - def authorization_invalid(self) -> bool: - """Check if the authorization code is invalid.""" - return self.auth_code_invalid - - @rx.var - def authorization_code_invalid_message(self) -> str: - """Get the authorization code invalid message.""" - return self.auth_code_invalid_message - - @rx.event - async def handle_submit(self, form_data: dict[str, typing.Any]): - """Handle form submission.""" - self.auth_code_invalid = False - login_state = await self.get_state(state.LoginState) - try: - await login_state.login(form_data.get('authorization_code'), grant_type='authorization_code') - yield rx.redirect(routes.INDEX) - except httpx.HTTPStatusError as e: - self.auth_code_invalid = True - self.auth_code_invalid_message = f'Login failed with status code: {e.response.status_code}' - - -def authorization_code_form() -> rx.Component: - """Form for the authorization code input.""" - return rx.form.root( - rx.form.field( - rx.vstack( - rx.form.label( - 'Authorization Code', - size='3', - weight='medium', - text_align='left', - width='100%', - ), - rx.form.control( - rx.input( - name='authorization_code', - size='3', - width='100%', - on_change=LoginFormState.set_user_entered_authorization_code, - ), - as_child=True, - ), - rx.cond( - LoginFormState.authorization_invalid, - rx.form.message( - LoginFormState.authorization_code_invalid_message, - color='var(--red-11)', - ), - ), - rx.button( - 'Sign in', size='2', width='100%', type='submit', disabled=LoginFormState.authorization_code_empty - ), - spacing='2', - width='100%', - ), - name='authorization_code', - server_invalid=LoginFormState.auth_code_invalid, - ), - on_submit=LoginFormState.handle_submit, - reset_on_submit=True, - ) - - -def redirect_if_authenticated(page: rx.app.ComponentCallable) -> rx.app.ComponentCallable: - """Redirect authenticated users away from login page.""" - - @functools.wraps(page) - def login_page_wrapper() -> rx.Component: - return rx.cond( - state.LoginState.is_hydrated, - rx.cond( - state.LoginState.is_authenticated, - rx.spinner(on_mount=state.LoginState.redir), - page(), # Show login form if not authenticated - ), - rx.spinner(), - ) - - return login_page_wrapper - - -@templates.template -@redirect_if_authenticated -def login_page() -> rx.Component: - """Login page.""" - return rx.center( - rx.card( - rx.vstack( - rx.center( - rx.heading( - 'Login to FactorialHR', - size='6', - as_='h2', - text_align='center', - width='100%', - ), - direction='column', - spacing='5', - width='100%', - ), - authorization_code_form(), - ), - size='4', - max_width='28em', - width='100%', - ), - height='100vh', - ) diff --git a/factorialhr_analysis/pages/oauth_page.py b/factorialhr_analysis/pages/oauth_page.py new file mode 100644 index 0000000..6811c9f --- /dev/null +++ b/factorialhr_analysis/pages/oauth_page.py @@ -0,0 +1,82 @@ +import functools +import logging +import secrets +import urllib.parse +import httpx +import reflex as rx +from factorialhr_analysis import states, constants + + +class OAuthProcessState(rx.State): + """State to handle OAuth token processing.""" + + error: str = '' + expected_state: str = '' + + @rx.event + async def start_oauth_process(self): + """Redirect to the OAuth authorization URL.""" + if not self.expected_state: + self.expected_state = secrets.token_urlsafe(16) + auth_url = ( + f'{constants.ENVIRONMENT_URL}/oauth/authorize?' + f'response_type=code&' + f'client_id={constants.CLIENT_ID}&' + f'redirect_uri={urllib.parse.quote(constants.REDIRECT_URI)}&' + f'scope={constants.SCOPE}&' + f'state={self.expected_state}' + ) + yield rx.redirect(auth_url) + + @rx.event + async def process_oauth_response(self): + """Process the OAuth response to exchange code for an access token.""" + # states missmatch + if self.expected_state != self.router.url.query_parameters.get('state'): + self.error = 'State mismatch error.' + self.expected_state = '' + return + code = self.router.url.query_parameters.get('code', '') + if not code: + self.error = 'Authorization code is missing.' + self.expected_state = '' + return + oauth_session = await self.get_state(states.OAuthSessionState) + try: + await oauth_session.create_session(code, grant_type='authorization_code') + except httpx.HTTPStatusError as e: + self.error = str(e) + finally: + self.expected_state = '' + + +def redirect_if_authenticated(page: rx.app.ComponentCallable) -> rx.app.ComponentCallable: + """Redirect authenticated users away from login page.""" + + @functools.wraps(page) + def login_page_wrapper() -> rx.Component: + return rx.cond( + states.OAuthSessionState.is_hydrated, + rx.cond( + states.OAuthSessionState.is_session_authenticated, + rx.fragment(on_mount=states.OAuthSessionState.redir), + page(), + ), + rx.spinner(), + ) + + return login_page_wrapper + + +@redirect_if_authenticated +def start_oauth_process(): + return rx.text('Redirecting to factorialhr...', on_mount=OAuthProcessState.start_oauth_process) + + +@redirect_if_authenticated +def authorize_oauth_page(): + return rx.box( + rx.text('Validating response...'), + rx.text(OAuthProcessState.error, color='red'), + on_mount=OAuthProcessState.process_oauth_response, + ) diff --git a/factorialhr_analysis/pages/index.py b/factorialhr_analysis/pages/working_time_verification_page.py similarity index 98% rename from factorialhr_analysis/pages/index.py rename to factorialhr_analysis/pages/working_time_verification_page.py index f32f55f..c1f128b 100644 --- a/factorialhr_analysis/pages/index.py +++ b/factorialhr_analysis/pages/working_time_verification_page.py @@ -14,7 +14,7 @@ import reflex as rx from reflex.utils.prerequisites import get_app -from factorialhr_analysis import state, templates, working_time_verification +from factorialhr_analysis import templates, working_time_verification, components, states def time_to_moment(time_: datetime.time | None) -> rx.MomentDelta: @@ -136,7 +136,7 @@ async def handle_submit(self, form_data: dict): try: async with self: - api_session = (await self.get_state(state.LoginState)).get_auth() + api_session = (await self.get_state(states.OAuthSessionState)).get_auth() # API calls outside async with block async with factorialhr.ApiClient(os.environ['FACTORIALHR_ENVIRONMENT_URL'], auth=api_session) as client: employees = await factorialhr.EmployeesEndpoint(client).all() @@ -466,9 +466,9 @@ def live_progress() -> rx.Component: ) +@components.requires_authentication @templates.template -@state.redirect_for_login -def index_page() -> rx.Component: +def working_time_verification_page() -> rx.Component: """Index page of the app.""" return rx.vstack( rx.hstack(render_input(), render_export_buttons(), render_filters(), justify='between', width='100%'), diff --git a/factorialhr_analysis/routes.py b/factorialhr_analysis/routes.py index 13abc2f..8a5b0ca 100644 --- a/factorialhr_analysis/routes.py +++ b/factorialhr_analysis/routes.py @@ -1,4 +1,6 @@ """Routes for the application.""" INDEX = '/' -LOGIN_ROUTE = '/login' +OAUTH_START_ROUTE = '/oauth/start' +OAUTH_AUTHORIZE_ROUTE = '/oauth/authorize' +WORKING_TIME_VERIFICATION_ROUTE = '/working-time-verification' diff --git a/factorialhr_analysis/state.py b/factorialhr_analysis/state.py deleted file mode 100644 index 96dc35f..0000000 --- a/factorialhr_analysis/state.py +++ /dev/null @@ -1,182 +0,0 @@ -"""Login state.""" - -import functools -import logging -import os -import time -import typing - -import factorialhr -import httpx -import pydantic -import reflex as rx - -from factorialhr_analysis import routes - -# Set up logging -logging.basicConfig(level=logging.WARNING) -logger = logging.getLogger(__name__) - - -class ApiSession(pydantic.BaseModel): - """Wrapper class for the API session cookie.""" - - access_token: str - refresh_token: str - created_at: int - token_type: str - - def access_token_expiration(self) -> int: - """Get the expiration date of the access token.""" - return self.created_at + 60 * 60 # access token is valid for 1 hour - - def is_access_token_expired(self) -> bool: - """Determine whether the access token is expired or not.""" - return self.access_token_expiration() <= time.time() - - def refresh_token_expiration(self) -> int: - """Get the expiration date of the refresh token.""" - return self.created_at + 7 * 24 * 60 * 60 # refresh token is valid for 1 week - - def is_refresh_token_expired(self) -> bool: - """Determine whether the refresh token is expired or not.""" - return self.refresh_token_expiration() <= time.time() - - -class LoginState(rx.State): - """State for managing login and authentication.""" - - api_session_cookie: str = rx.Cookie( - name='api_session', - same_site='strict', - ) - redirect_to: str = '' - - def get_auth(self) -> factorialhr.AccessTokenAuth | factorialhr.ApiKeyAuth: - """Get the authentication object for the API session.""" - if api_token := os.environ.get('FACTORIALHR_API_KEY'): - return factorialhr.ApiKeyAuth(api_key=api_token) - api_session = self.api_session() - if api_session is None: - msg = 'api_session_cookie must be valid' - raise RuntimeError(msg) - return factorialhr.AccessTokenAuth( - access_token=api_session.access_token, - token_type=api_session.token_type, - ) - - def api_session(self) -> ApiSession | None: - """Get the API session from the cookie.""" - if not self.api_session_cookie: - return None - try: - return ApiSession.model_validate_json(self.api_session_cookie) - except pydantic.ValidationError as e: - logger.exception('parsing cookie failed', exc_info=e) - return None - - @rx.event - async def login(self, token: str, *, grant_type: typing.Literal['refresh_token', 'authorization_code']) -> None: - """Log in to the API and store the session cookie.""" - if grant_type == 'refresh_token': - data = { - 'client_id': os.environ['FACTORIALHR_CLIENT_ID'], - 'client_secret': os.environ['FACTORIALHR_CLIENT_SECRET'], - 'grant_type': 'refresh_token', - 'refresh_token': token, - } - else: - data = { - 'client_id': os.environ['FACTORIALHR_CLIENT_ID'], - 'client_secret': os.environ['FACTORIALHR_CLIENT_SECRET'], - 'code': token, - 'grant_type': 'authorization_code', - 'redirect_uri': os.environ['FACTORIALHR_REDIRECT_URI'], - } - try: - async with httpx.AsyncClient() as client: - response = await client.post( - os.environ['FACTORIALHR_ENVIRONMENT_URL'] + '/oauth/token', - data=data, - ) - response.raise_for_status() - except httpx.HTTPStatusError as e: - logger.exception('Login failed', exc_info=e) - raise - else: - self.api_session_cookie = ApiSession(**response.json()).model_dump_json() - logger.info('Refreshed access token') - - @rx.var(cache=False) - async def is_authenticated(self) -> bool: - """Check if the user is authenticated.""" - if os.environ.get('FACTORIALHR_API_KEY'): - return True # If using API key, always authenticated - api_session = self.api_session() - if api_session is None: - return False - return not api_session.is_access_token_expired() - - @rx.event - async def refresh(self) -> bool: - """Check if the user is authenticated.""" - if not self.is_hydrated: - return False - api_session = self.api_session() - if api_session is None: - return False - if api_session.is_refresh_token_expired(): - return False - if api_session.is_access_token_expired(): - try: - await self.login(token=api_session.refresh_token, grant_type='refresh_token') - except httpx.HTTPStatusError: - return False - return True - - @rx.event - async def redir(self): - """Redirect to the redirect_to route if logged in, or to the login page if not.""" - if not self.is_hydrated: - yield self.redir() - page = self.router.url.path - is_authenticated = await self.is_authenticated - if not is_authenticated: - is_authenticated = await self.refresh() - if not is_authenticated: - self.redirect_to = page - return_value = [] - if not self.api_session_cookie: - return_value.append(rx.remove_cookie('api_session')) - if page != routes.LOGIN_ROUTE: - yield [*return_value, rx.redirect(routes.LOGIN_ROUTE)] - if is_authenticated and page == routes.LOGIN_ROUTE: - yield rx.redirect(self.redirect_to or '/') - - @rx.event - def logout(self): - """Log out the user.""" - yield [rx.remove_cookie('api_session'), rx.redirect(routes.LOGIN_ROUTE)] - - -def redirect_for_login(page: rx.app.ComponentCallable) -> rx.app.ComponentCallable: - """Require authentication before rendering a page. - - If the user is not authenticated, then redirect to the login page. - """ - - @functools.wraps(page) - def protected_page() -> rx.Component: - return rx.fragment( - rx.cond( - LoginState.is_hydrated, - rx.cond( - LoginState.is_authenticated, - page(), - rx.spinner(on_mount=LoginState.redir), - ), - rx.spinner(), # Show spinner while hydrating - ) - ) - - return protected_page diff --git a/factorialhr_analysis/states/__init__.py b/factorialhr_analysis/states/__init__.py new file mode 100644 index 0000000..bcf2591 --- /dev/null +++ b/factorialhr_analysis/states/__init__.py @@ -0,0 +1,3 @@ +from factorialhr_analysis.states.oauth_state import OAuthSessionState + +__all__ = ['OAuthSessionState'] diff --git a/factorialhr_analysis/states/oauth_state.py b/factorialhr_analysis/states/oauth_state.py new file mode 100644 index 0000000..24afcb0 --- /dev/null +++ b/factorialhr_analysis/states/oauth_state.py @@ -0,0 +1,137 @@ +import logging +import time +import typing + +import factorialhr +import httpx +import pydantic +import reflex as rx +from factorialhr_analysis import constants, routes + + +class ApiSession(pydantic.BaseModel): + """Wrapper class for the API session cookie.""" + + access_token: str + refresh_token: str + created_at: int + token_type: str + + def access_token_expiration(self) -> int: + """Get the expiration date of the access token.""" + return self.created_at + 60 * 60 # access token is valid for 1 hour + + def is_access_token_expired(self) -> bool: + """Determine whether the access token is expired or not.""" + return self.access_token_expiration() <= time.time() + + def refresh_token_expiration(self) -> int: + """Get the expiration date of the refresh token.""" + return self.created_at + 7 * 24 * 60 * 60 # refresh token is valid for 1 week + + def is_refresh_token_expired(self) -> bool: + """Determine whether the refresh token is expired or not.""" + return self.refresh_token_expiration() <= time.time() + + +class OAuthSessionState(rx.State): + """State for managing OAuth session and authentication.""" + + api_session_cookie: str = rx.Cookie( + name='api_session', + same_site='strict', + ) + _redirect_to: str = '' + + def api_session(self) -> ApiSession | None: + """Get the API session from the cookie.""" + if not self.api_session_cookie: + return None + try: + return ApiSession.model_validate_json(self.api_session_cookie) + except pydantic.ValidationError: + return None + + @rx.event + async def create_session(self, token: str, *, grant_type: typing.Literal['refresh_token', 'authorization_code']): + """Log in to the API and store the session cookie.""" + if grant_type == 'refresh_token': + data = { + 'client_id': constants.CLIENT_ID, + 'client_secret': constants.CLIENT_SECRET, + 'grant_type': 'refresh_token', + 'refresh_token': token, + } + else: + data = { + 'client_id': constants.CLIENT_ID, + 'client_secret': constants.CLIENT_SECRET, + 'code': token, + 'grant_type': 'authorization_code', + 'redirect_uri': constants.REDIRECT_URI, + } + async with httpx.AsyncClient() as client: + response = await client.post( + f'{constants.ENVIRONMENT_URL}/oauth/token', + data=data, + ) + response.raise_for_status() + self.api_session_cookie = ApiSession(**response.json()).model_dump_json() + + @rx.event + def delete_session(self): + """Log out the user.""" + yield rx.remove_cookie('api_session') + + @rx.event + async def refresh_session(self) -> bool: + """Refresh the access token if it is expired.""" + api_session = self.api_session() + if api_session is None: + return False + if api_session.is_refresh_token_expired(): + return False + if api_session.is_access_token_expired(): + try: + await self.create_session(token=api_session.refresh_token, grant_type='refresh_token') + except httpx.HTTPStatusError: + return False + return True + + @rx.var(cache=False) + async def is_session_authenticated(self) -> bool: + """Check if the user is authenticated.""" + if constants.API_KEY: + return True + api_session = self.api_session() + if api_session is None: + return False + return not api_session.is_access_token_expired() + + @rx.event + async def redir(self): + """Redirect to the redirect_to route if logged in, or to the login page if not.""" + if not self.is_hydrated: + yield self.redir() + page = self.router.url.path + is_authenticated = await self.is_session_authenticated + if not is_authenticated: + is_authenticated = await self.refresh_session() + if not is_authenticated and page != routes.OAUTH_START_ROUTE: + self._redirect_to = page + yield rx.redirect(routes.OAUTH_START_ROUTE) + if is_authenticated and page in (routes.OAUTH_START_ROUTE, routes.OAUTH_AUTHORIZE_ROUTE): + yield rx.redirect(self._redirect_to or routes.INDEX) + + def get_auth(self) -> factorialhr.AccessTokenAuth | factorialhr.ApiKeyAuth: + """Get the authentication object for the API session.""" + if constants.API_KEY: + return factorialhr.ApiKeyAuth(api_key=constants.API_KEY) + api_session = self.api_session() + if api_session is None: + msg = 'api_session_cookie must be valid' + raise RuntimeError(msg) + return factorialhr.AccessTokenAuth( + access_token=api_session.access_token, + token_type=api_session.token_type, + ) diff --git a/factorialhr_analysis/templates.py b/factorialhr_analysis/templates.py index 8377d9c..263441e 100644 --- a/factorialhr_analysis/templates.py +++ b/factorialhr_analysis/templates.py @@ -1,5 +1,7 @@ """Templates for the web application.""" +import functools + import reflex as rx from factorialhr_analysis import components @@ -7,12 +9,17 @@ def template(page: rx.app.ComponentCallable) -> rx.app.ComponentCallable: """Wrap a page in the main template.""" - return rx.fragment( - components.navbar(), - rx.box( - page(), - padding_top='1em', # Space between navbar and content - padding_left='1em', - padding_right='1em', - ), - ) + + @functools.wraps(page) + def page_template() -> rx.Component: + return rx.fragment( + components.navbar(), + rx.box( + page(), + padding_top='1em', # Space between navbar and content + padding_left='1em', + padding_right='1em', + ), + ) + + return page_template From b5d68372a148662db89df5bab80ccf6c72f220e3 Mon Sep 17 00:00:00 2001 From: Leon Date: Tue, 9 Sep 2025 14:15:50 +0200 Subject: [PATCH 02/24] optimizations + docker files --- .dockerignore | 8 + Caddy.Dockerfile | 4 + Caddyfile | 14 + Dockerfile | 52 +++ compose.prod.yaml | 25 ++ compose.yaml | 41 ++ factorialhr_analysis/components/__init__.py | 9 +- .../components/authentication_component.py | 3 +- factorialhr_analysis/components/footer.py | 35 ++ factorialhr_analysis/components/navbar.py | 83 ++-- factorialhr_analysis/constants.py | 3 +- factorialhr_analysis/factorialhr_analysis.py | 5 +- factorialhr_analysis/pages/__init__.py | 6 +- factorialhr_analysis/pages/index_page.py | 54 ++- factorialhr_analysis/pages/oauth_page.py | 8 +- .../pages/working_time_verification_page.py | 418 +++++++++--------- factorialhr_analysis/routes.py | 2 +- factorialhr_analysis/states/__init__.py | 3 +- factorialhr_analysis/states/data_state.py | 76 ++++ factorialhr_analysis/states/oauth_state.py | 2 +- factorialhr_analysis/templates.py | 5 +- .../working_time_verification/verification.py | 4 +- pyproject.toml | 3 +- rxconfig.py | 2 + uv.lock | 11 +- 25 files changed, 603 insertions(+), 273 deletions(-) create mode 100644 .dockerignore create mode 100644 Caddy.Dockerfile create mode 100644 Caddyfile create mode 100644 Dockerfile create mode 100644 compose.prod.yaml create mode 100644 compose.yaml create mode 100644 factorialhr_analysis/components/footer.py create mode 100644 factorialhr_analysis/states/data_state.py diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..e86a29e --- /dev/null +++ b/.dockerignore @@ -0,0 +1,8 @@ +.web +.git +__pycache__/* +Dockerfile +Caddy.Dockerfile +compose.yaml +compose.*.yaml +uploaded_files \ No newline at end of file diff --git a/Caddy.Dockerfile b/Caddy.Dockerfile new file mode 100644 index 0000000..76ae320 --- /dev/null +++ b/Caddy.Dockerfile @@ -0,0 +1,4 @@ +FROM library/caddy + +COPY --from=local/reflex-app /app/.web/build/client /srv +ADD Caddyfile /etc/caddy/Caddyfile \ No newline at end of file diff --git a/Caddyfile b/Caddyfile new file mode 100644 index 0000000..a77063d --- /dev/null +++ b/Caddyfile @@ -0,0 +1,14 @@ +{$DOMAIN} + +encode gzip + +@backend_routes path /_event/* /ping /_upload /_upload/* +handle @backend_routes { + reverse_proxy app:8000 +} + +root * /srv +route { + try_files {path} {path}/ /404.html + file_server +} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..803f070 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,52 @@ +# This docker file is intended to be used with docker compose to deploy a production +# instance of a Reflex app. + +# Stage 1: init +FROM python:3.13 as init + +ARG uv=/root/.local/bin/uv + +# Install `uv` for faster package bootstrapping +ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh +RUN /install.sh && rm /install.sh + +# Copy local context to `/app` inside container (see .dockerignore) +WORKDIR /app +COPY . . +RUN mkdir -p /app/data /app/uploaded_files + +# Create virtualenv which will be copied into final container +ENV VIRTUAL_ENV=/app/.venv +ENV PATH="$VIRTUAL_ENV/bin:$PATH" +RUN $uv venv + +# Install app requirements and reflex inside virtualenv +RUN $uv sync --frozen + +# Deploy templates and prepare app +RUN $uv run python -m reflex init + +# Export static copy of frontend to /app/.web/build/client +RUN $uv run python -m reflex export --frontend-only --no-zip + +# Copy static files out of /app to save space in backend image +RUN mv .web/build/client /tmp/client +RUN rm -rf .web && mkdir -p .web/build +RUN mv /tmp/client .web/build/client + +# Stage 2: copy artifacts into slim image +FROM python:3.13-slim +WORKDIR /app +RUN adduser --disabled-password --home /app reflex +COPY --chown=reflex --from=init /app /app +# Install libpq-dev for psycopg (skip if not using postgres). +RUN apt-get update -y && apt-get install -y libpq-dev && rm -rf /var/lib/apt/lists/* +USER reflex +ENV PATH="/app/.venv/bin:$PATH" PYTHONUNBUFFERED=1 + +# Needed until Reflex properly passes SIGTERM on backend. +STOPSIGNAL SIGKILL + +# Always apply migrations before starting the backend. +CMD [ -d alembic ] && reflex db migrate; \ + exec python -m reflex run --env prod --backend-only diff --git a/compose.prod.yaml b/compose.prod.yaml new file mode 100644 index 0000000..d728727 --- /dev/null +++ b/compose.prod.yaml @@ -0,0 +1,25 @@ +# Use this override file to run the app in prod mode with postgres and redis +# docker compose -f compose.yaml -f compose.prod.yaml up -d +services: + db: + image: postgres + restart: always + environment: + POSTGRES_PASSWORD: secret + volumes: + - postgres-data:/var/lib/postgresql/data + + redis: + image: redis + restart: always + + app: + environment: + REFLEX_DB_URL: postgresql+psycopg://postgres:secret@db/postgres + REFLEX_REDIS_URL: redis://redis:6379 + depends_on: + - db + - redis + +volumes: + postgres-data: \ No newline at end of file diff --git a/compose.yaml b/compose.yaml new file mode 100644 index 0000000..8974f43 --- /dev/null +++ b/compose.yaml @@ -0,0 +1,41 @@ +# Base compose file production deployment of reflex app with Caddy webserver +# providing TLS termination and reverse proxying. +# +# See `compose.prod.yaml` for more robust and performant deployment option. +# +# During build and run, set environment DOMAIN pointing +# to publicly accessible domain where app will be hosted +services: + app: + image: local/reflex-app + environment: + REFLEX_DB_URL: sqlite:///data/reflex.db + build: + context: . + volumes: + - db-data:/app/data + - upload-data:/app/uploaded_files + restart: always + + webserver: + environment: + DOMAIN: ${DOMAIN:-localhost} + ports: + - 443:443 + - 80:80 # For acme-challenge via HTTP. + build: + context: . + dockerfile: Caddy.Dockerfile + volumes: + - caddy-data:/root/.caddy + restart: always + depends_on: + - app + +volumes: + # SQLite data + db-data: + # Uploaded files + upload-data: + # TLS keys and certificates + caddy-data: \ No newline at end of file diff --git a/factorialhr_analysis/components/__init__.py b/factorialhr_analysis/components/__init__.py index 43fca57..9d8bbd9 100644 --- a/factorialhr_analysis/components/__init__.py +++ b/factorialhr_analysis/components/__init__.py @@ -1,5 +1,12 @@ from factorialhr_analysis.components.authentication_component import requires_authentication from factorialhr_analysis.components.date_range_selector import date_inputs, date_range_picker +from factorialhr_analysis.components.footer import footer from factorialhr_analysis.components.navbar import navbar -__all__ = ['date_inputs', 'date_range_picker', 'navbar', 'requires_authentication'] +__all__ = [ + 'date_inputs', + 'date_range_picker', + 'footer', + 'navbar', + 'requires_authentication', +] diff --git a/factorialhr_analysis/components/authentication_component.py b/factorialhr_analysis/components/authentication_component.py index e0a485c..2436880 100644 --- a/factorialhr_analysis/components/authentication_component.py +++ b/factorialhr_analysis/components/authentication_component.py @@ -1,11 +1,12 @@ import functools +from collections.abc import Callable import reflex as rx from factorialhr_analysis import states -def requires_authentication(page: rx.app.ComponentCallable) -> rx.app.ComponentCallable: +def requires_authentication(page: Callable[[], rx.Component]) -> Callable[[], rx.Component]: """Require authentication before rendering a page. If the user is not authenticated, then redirect to the login page. diff --git a/factorialhr_analysis/components/footer.py b/factorialhr_analysis/components/footer.py new file mode 100644 index 0000000..7846e80 --- /dev/null +++ b/factorialhr_analysis/components/footer.py @@ -0,0 +1,35 @@ +import reflex as rx + +from factorialhr_analysis import states + + +def refresh_data(): + return rx.hstack( + rx.button( + rx.icon('refresh-ccw'), + on_click=states.DataState.refresh_data, + loading=states.DataState.is_loading, + border_radius='1em', + ), + rx.text( + 'Last data update: ', + rx.cond( + states.DataState.last_updated.is_not_none(), + rx.moment(states.DataState.last_updated, from_now=True), + 'Never', + ), + ), + align='center', + ) + + +def footer() -> rx.Component: + return rx.el.footer( + refresh_data(), + position='fixed', + padding='0.5em', + bottom='0', + width='100%', + bg=rx.color('gray', 3), + # color="white", + ) diff --git a/factorialhr_analysis/components/navbar.py b/factorialhr_analysis/components/navbar.py index c9f74ab..16e0703 100644 --- a/factorialhr_analysis/components/navbar.py +++ b/factorialhr_analysis/components/navbar.py @@ -10,20 +10,18 @@ def dark_mode_toggle() -> rx.Component: """Toggle for dark/light mode.""" return rx.segmented_control.root( rx.segmented_control.item( - rx.icon(tag='monitor', size=20), + rx.icon(tag='monitor'), value='system', ), rx.segmented_control.item( - rx.icon(tag='sun', size=20), + rx.icon(tag='sun'), value='light', ), rx.segmented_control.item( - rx.icon(tag='moon', size=20), + rx.icon(tag='moon'), value='dark', ), on_change=set_color_mode, - variant='classic', - radius='large', value=color_mode, ) @@ -33,37 +31,66 @@ def navbar_link(text: str, url: str) -> rx.Component: return rx.link(rx.text(text, size='4', weight='medium'), href=url) +def refresh_data(): + return rx.hstack( + rx.button( + rx.icon('refresh-ccw'), + on_click=states.DataState.refresh_data, + loading=states.DataState.is_loading, + ), + rx.text( + 'Last data update: ', + rx.cond( + states.DataState.last_updated.is_not_none(), + rx.moment(states.DataState.last_updated, from_now=True), + 'Never', + ), + ), + align='center', + ) + + +def icon_menu(): + return ( + rx.menu.root( + rx.menu.trigger( + rx.icon_button( + rx.icon('user'), + size='2', + radius='full', + ) + ), + rx.menu.content( + rx.menu.item( + rx.link( + rx.text('Log out'), + href=routes.INDEX, + on_click=states.OAuthSessionState.delete_session, + ) + ), + ), + justify='end', + ), + ) + + def navbar() -> rx.Component: """Navigation bar component.""" return rx.box( rx.desktop_only( rx.hstack( - rx.heading('Factorialhr analysis', size='7', weight='bold'), rx.hstack( - navbar_link('Working time verification', '/#'), - spacing='5', + rx.link(rx.heading('Working time analysis', size='5', weight='bold'), href=routes.INDEX), + navbar_link('Verification', '/verification'), + navbar_link('Projects', '/projects'), + align_items='center', ), rx.hstack( - rx.menu.root( - rx.menu.trigger( - rx.icon_button( - rx.icon('user'), - size='2', - radius='full', - ) - ), - rx.menu.content( - rx.menu.item( - rx.link( - rx.text('Log out'), - href=routes.INDEX, - on_click=states.OAuthSessionState.delete_session, - ) - ), - ), - justify='end', - ), + refresh_data(), + rx.spacer(), + icon_menu(), dark_mode_toggle(), + justify='between', ), justify='between', align_items='center', @@ -71,5 +98,5 @@ def navbar() -> rx.Component: ), bg=rx.color('accent', 3), padding='1em', - width='100%', + top='0', ) diff --git a/factorialhr_analysis/constants.py b/factorialhr_analysis/constants.py index 55def21..8c481ab 100644 --- a/factorialhr_analysis/constants.py +++ b/factorialhr_analysis/constants.py @@ -1,7 +1,6 @@ import os -import dotenv -dotenv.load_dotenv() +# dotenv.load_dotenv() CLIENT_ID: str = os.environ.get('FACTORIALHR_CLIENT_ID', '') CLIENT_SECRET: str = os.environ.get('FACTORIALHR_CLIENT_SECRET', '') diff --git a/factorialhr_analysis/factorialhr_analysis.py b/factorialhr_analysis/factorialhr_analysis.py index 6e523f5..9ea8a5c 100644 --- a/factorialhr_analysis/factorialhr_analysis.py +++ b/factorialhr_analysis/factorialhr_analysis.py @@ -1,16 +1,13 @@ """Main app file for the FactorialHR Analysis application.""" -import dotenv import reflex as rx from factorialhr_analysis import pages, routes - # TODO: check if env variables in constants have been set - app = rx.App() app.add_page(pages.index_page, route=routes.INDEX) -app.add_page(pages.working_time_verification_page, route=routes.WORKING_TIME_VERIFICATION_ROUTE) +app.add_page(pages.working_time_verification_page, route=routes.VERIFICATION_ROUTE) app.add_page(pages.authorize_oauth_page, route=routes.OAUTH_AUTHORIZE_ROUTE) app.add_page(pages.start_oauth_process, route=routes.OAUTH_START_ROUTE) diff --git a/factorialhr_analysis/pages/__init__.py b/factorialhr_analysis/pages/__init__.py index f30cdfe..808e599 100644 --- a/factorialhr_analysis/pages/__init__.py +++ b/factorialhr_analysis/pages/__init__.py @@ -1,5 +1,5 @@ -from factorialhr_analysis.pages.working_time_verification_page import working_time_verification_page -from factorialhr_analysis.pages.oauth_page import authorize_oauth_page, start_oauth_process from factorialhr_analysis.pages.index_page import index_page +from factorialhr_analysis.pages.oauth_page import authorize_oauth_page, start_oauth_process +from factorialhr_analysis.pages.working_time_verification_page import working_time_verification_page -__all__ = ['working_time_verification_page', 'authorize_oauth_page', 'start_oauth_process', 'index_page'] +__all__ = ['authorize_oauth_page', 'index_page', 'start_oauth_process', 'working_time_verification_page'] diff --git a/factorialhr_analysis/pages/index_page.py b/factorialhr_analysis/pages/index_page.py index 81f83fe..5cfc2ca 100644 --- a/factorialhr_analysis/pages/index_page.py +++ b/factorialhr_analysis/pages/index_page.py @@ -1,36 +1,54 @@ -import logging - import factorialhr import reflex as rx -from factorialhr_analysis import templates, constants, states + +from factorialhr_analysis import constants, states, templates class IndexState(rx.State): """State for the index page.""" + is_loading: rx.Field[bool] = rx.field(default=False) + @rx.event async def get_credentials(self): + self.is_loading = True + yield oauth_state = await self.get_state(states.OAuthSessionState) - async with factorialhr.ApiClient(base_url=constants.ENVIRONMENT_URL, auth=oauth_state.get_auth()) as api_client: - logging.getLogger(__name__).error(await factorialhr.CredentialsEndpoint(api_client).get()) + try: + async with factorialhr.ApiClient( + base_url=constants.ENVIRONMENT_URL, auth=oauth_state.get_auth() + ) as api_client: + shifts = await factorialhr.ShiftsEndpoint(api_client).all(timeout=100) + finally: + self.is_loading = False + @templates.template def index_page() -> rx.Component: """The index page of the application.""" - return rx.center( - rx.vstack( - rx.heading('Welcome to FactorialHR Analysis', size='4'), - rx.text('Analyze your FactorialHR data with ease.'), + return rx.vstack( + rx.heading('Welcome to FactorialHR Analysis', size='4'), + rx.hstack( rx.button( - 'Get Started', - as_='a', - href='/start-oauth', - color_scheme='teal', - size='2', - mt='4', - on_click=IndexState.get_credentials, + rx.icon('refresh-ccw'), + on_click=states.DataState.refresh_data, + loading=states.DataState.is_loading, + border_radius='1em', + ), + rx.text( + 'Last data update: ', + rx.cond( + states.DataState.last_updated.is_not_none(), + rx.moment(states.DataState.last_updated, from_now=True), + 'Never', + ), ), + align='center', + border_radius='1em', + border='1px solid', + padding='0.5em', ), - height='100vh', - bg='gray.50', + rx.text('loaded shifts:', states.DataState.len_of_shifts), + bg=rx.color('accent'), + align='center', ) diff --git a/factorialhr_analysis/pages/oauth_page.py b/factorialhr_analysis/pages/oauth_page.py index 6811c9f..d0f3bc2 100644 --- a/factorialhr_analysis/pages/oauth_page.py +++ b/factorialhr_analysis/pages/oauth_page.py @@ -1,10 +1,12 @@ import functools -import logging import secrets import urllib.parse +from collections.abc import Callable + import httpx import reflex as rx -from factorialhr_analysis import states, constants + +from factorialhr_analysis import constants, states class OAuthProcessState(rx.State): @@ -50,7 +52,7 @@ async def process_oauth_response(self): self.expected_state = '' -def redirect_if_authenticated(page: rx.app.ComponentCallable) -> rx.app.ComponentCallable: +def redirect_if_authenticated(page: Callable[[], rx.Component]) -> Callable[[], rx.Component]: """Redirect authenticated users away from login page.""" @functools.wraps(page) diff --git a/factorialhr_analysis/pages/working_time_verification_page.py b/factorialhr_analysis/pages/working_time_verification_page.py index c1f128b..149e999 100644 --- a/factorialhr_analysis/pages/working_time_verification_page.py +++ b/factorialhr_analysis/pages/working_time_verification_page.py @@ -1,20 +1,74 @@ """The main page of the app.""" -import asyncio -import collections import csv import datetime import io -import os import typing -from collections.abc import Container, Sequence +from collections.abc import Container, Iterable, Sequence import anyio.from_thread import factorialhr import reflex as rx from reflex.utils.prerequisites import get_app -from factorialhr_analysis import templates, working_time_verification, components, states +from factorialhr_analysis import components, states, templates, working_time_verification + + +class SettingsState(rx.State): + _start_date: datetime.date | None = None + _end_date: datetime.date | None = None + _tolerance: datetime.timedelta | None = None + + only_active: rx.Field[bool] = rx.field(default=True) + + @rx.var + def start_date(self) -> str: + """Get the start date as a string.""" + if self._start_date is None: + return '' + return self._start_date.isoformat() + + @rx.var + def end_date(self) -> str: + """Get the end date as a string.""" + if self._end_date is None: + return '' + return self._end_date.isoformat() + + @rx.var + def tolerance(self) -> str: + """Get the tolerance value.""" + return str(int(self._tolerance.total_seconds() / 60)) if self._tolerance is not None else '' + + @rx.event + def set_tolerance(self, value: str): + """Set the tolerance value.""" + if value.isdigit(): + self._tolerance = datetime.timedelta(minutes=int(value)) + else: + self._tolerance = None + + @rx.event + def set_start_date(self, date: str): + """Set the start date.""" + self._start_date = datetime.date.fromisoformat(date) + + @rx.event + def set_end_date(self, date: str): + """Set the end date.""" + self._end_date = datetime.date.fromisoformat(date) + + @rx.var + def date_error(self) -> bool: + """Check if the end date is before the start date.""" + if not self._start_date or not self._end_date: + return False + return self._end_date < self._start_date + + @rx.event + def set_only_active(self, active: bool): # noqa: FBT001 + """Set whether to only include active employees.""" + self.only_active = active def time_to_moment(time_: datetime.time | None) -> rx.MomentDelta: @@ -37,6 +91,7 @@ class ErrorToShow(typing.TypedDict): """TypedDict for errors to show.""" name: str + team_names: Iterable[str] affected_days: str error: str cumulated_break: datetime.timedelta @@ -45,171 +100,121 @@ class ErrorToShow(typing.TypedDict): attendances: Sequence[Attendance] -class DataState(rx.State): - """State holding all the data.""" +def _filter_error(filter_value: str, error: ErrorToShow) -> bool: + return filter_value in error['name'].lower() or any( + filter_value in team_name.lower() for team_name in error['team_names'] + ) - _shifts: collections.defaultdict[int, list[factorialhr.AttendanceShift]] = collections.defaultdict( # noqa: RUF012 - list - ) # employee id as key - _employees: list[factorialhr.Employee] = [] # noqa: RUF012 - _employee_team_name_mapping: dict[int, list[str]] = {} # noqa: RUF012 - calculated_errors: list[working_time_verification.Error] = [] # noqa: RUF012 - errors_to_show: list[ErrorToShow] = [] # noqa: RUF012 - start_date: str = '' - end_date: str = '' - is_loading: bool = False # Guard flag - tolerance: str = '' - processed_employees: int = 0 # Number of employees processed so far - filter_value: str = '' # Placeholder for search functionality +class DataStateDeprecated(rx.State): + """State holding all the data.""" - selected_error_ids: list[int] = [] # noqa: RUF012 + errors_to_show: rx.Field[list[ErrorToShow]] = rx.field(default_factory=list) + _calculated_errors: list[ErrorToShow] = [] # noqa: RUF012 + is_loading: rx.Field[bool] = rx.field(default=False) + processed_employees: rx.Field[int] = rx.field(0) # Number of employees processed so far + total_amount_of_employees: rx.Field[int] = rx.field(0) - @rx.var - def date_error(self) -> bool: - """Check if the end date is before the start date.""" - if not self.start_date or not self.end_date: - return False - return datetime.date.fromisoformat(self.end_date) < datetime.date.fromisoformat(self.start_date) + filter_value: rx.Field[str] = rx.field('') # Placeholder for search functionality - @rx.var - def disable_submit(self) -> bool: - """Disable the submit button if there is a date error.""" - return self.date_error or not self.start_date or not self.end_date + selected_error_ids: rx.Field[list[int]] = rx.field(default_factory=list) def _should_cancel(self) -> bool: """Check if the current session is still valid.""" return self.router.session.client_token not in get_app().app.event_namespace.token_to_sid - def _cleanup(self): - """Cleanup method to reset state.""" - self._shifts = collections.defaultdict(list) - self.processed_employees = 0 - self.errors_to_show = [] - self._employees = [] - self._employee_team_name_mapping = {} - self.selected_error_ids = [] - self.is_loading = False - - @rx.var - def length_of_employees(self) -> int: - """Get the length of employees.""" - return len(self._employees) - - async def _handle_employee( - self, client: factorialhr.ApiClient, employee: factorialhr.Employee, teams: Sequence[factorialhr.Team] + async def _handle_single_employee( + self, + employee: factorialhr.Employee, + teams: Sequence[factorialhr.Team], + shifts: Sequence[factorialhr.AttendanceShift], + tolerance: datetime.timedelta | None, ): - """Handle fetching shifts for an employee.""" - async with self: - self._employees.append(employee) - self._employee_team_name_mapping[employee.id] = [ - t.name for t in teams if t.employee_ids and employee.id in t.employee_ids - ] - shifts = await factorialhr.ShiftsEndpoint(client).all( - params={'start_on': self.start_date, 'end_on': self.end_date, 'employee_ids[]': [employee.id]}, - timeout=60, - ) + """Handle a single employee.""" + for error in working_time_verification.get_error( + filter(lambda x: x.employee_id == employee.id, shifts), tolerance=tolerance + ): + async with self: + error_to_show = ErrorToShow( + name=employee.full_name, + team_names=[team.name for team in teams if employee.id in team.employee_ids], + affected_days=', '.join(str(d) for d in error.days_affected), + error=error.reason, + cumulated_break=error.break_time, + cumulated_attendance=error.time_attended, + attendances=[ + Attendance( + date=a.date, + clock_in=time_to_moment(a.clock_in) if a.clock_in is not None else None, + clock_out=time_to_moment(a.clock_out) if a.clock_out is not None else None, + minutes=rx.MomentDelta(minutes=a.minutes), + ) + for a in error.attendances + ], + ) + self._calculated_errors.append(error_to_show) async with self: - for shift in shifts.data(): - if self._should_cancel(): - self._cleanup() - return - self._shifts[employee.id].append(shift) + self.processed_employees += 1 @rx.event(background=True) - async def handle_submit(self, form_data: dict): - """Fetch employees and teams data.""" + async def calculate_errors(self): + """Calculate errors based on the shifts.""" async with self: if self.is_loading: return - - self.start_date = form_data.get('start_date') - self.end_date = form_data.get('end_date') self.is_loading = True - self._shifts = collections.defaultdict(list) + self.selected_error_ids.clear() + self.errors_to_show.clear() + self._calculated_errors.clear() self.processed_employees = 0 - self.errors_to_show = [] - self._employees = [] - self._employee_team_name_mapping = {} - self.selected_error_ids = [] - yield # Send initial state update to frontend + data_state = await self.get_state(states.DataState) + settings_state = await self.get_state(SettingsState) - try: - async with self: - api_session = (await self.get_state(states.OAuthSessionState)).get_auth() - # API calls outside async with block - async with factorialhr.ApiClient(os.environ['FACTORIALHR_ENVIRONMENT_URL'], auth=api_session) as client: - employees = await factorialhr.EmployeesEndpoint(client).all() - teams = list((await factorialhr.TeamsEndpoint(client).all()).data()) - async with anyio.from_thread.create_task_group() as tg: - for employee in employees.data(): - tg.start_soon(self._handle_employee, client, employee, teams) - - async with self: - yield DataState.fill_errors_to_show # set is_loading to false - - except asyncio.CancelledError: - # Handle cancellation when page is reloaded/closed - async with self: - self._cleanup() - raise - except Exception: - # Handle other errors + async with self: + employees = [ + employee + for employee in data_state._employees.values() + if not settings_state.only_active or employee.active + ] + self.total_amount_of_employees = len(employees) + shifts = [ + shift + for shift in data_state._shifts.values() + if settings_state._start_date <= shift.date <= settings_state._end_date + ] + async with anyio.from_thread.create_task_group() as tg: + for employee in employees: + tg.start_soon( + self._handle_single_employee, + employee, + data_state._teams.values(), + shifts, + settings_state._tolerance, + ) + + if not self.filter_value: async with self: - self._cleanup() - raise - - @rx.event - async def fill_errors_to_show(self): - """Fill the errors_to_show list based on the fetched data.""" - self.selected_error_ids = [] - self.errors_to_show = [] - self.processed_employees = 0 - self.is_loading = True - yield - tolerance = datetime.timedelta(minutes=int(self.tolerance) if self.tolerance.isdigit() else 0) - value = self.filter_value.lower() - for employee in self._employees: - teams = self._employee_team_name_mapping.get(employee.id, []) - if value in employee.full_name.lower() or any(value in team.lower() for team in teams): - for error in working_time_verification.get_error(self._shifts[employee.id], tolerance=tolerance): - if self._should_cancel(): - self._cleanup() - return - self.errors_to_show.append( - ErrorToShow( - name=employee.full_name, - affected_days=', '.join(str(d) for d in error.days_affected), - error=error.reason, - cumulated_break=error.break_time, - cumulated_attendance=error.time_attended, - attendances=[ - Attendance( - date=a.date, - clock_in=time_to_moment(a.clock_in) if a.clock_in is not None else None, - clock_out=time_to_moment(a.clock_out) if a.clock_out is not None else None, - minutes=rx.MomentDelta(minutes=a.minutes), - ) - for a in error.attendances - ], - ) - ) - yield - self.processed_employees += 1 - self.is_loading = False + self.errors_to_show = self._calculated_errors[:] + else: + for error_to_show in self._calculated_errors: + if not self.filter_value or _filter_error(self.filter_value.lower(), error_to_show): + async with self: + self.errors_to_show.append(error_to_show) + async with self: + self.is_loading = False @rx.event - def filter_employees(self, value: str): + def set_filter_value(self, value: str): """Filter employees based on the search value.""" self.filter_value = value - yield DataState.fill_errors_to_show - - @rx.event - def set_tolerance(self, value: str): - """Set the tolerance value.""" - if value == '' or value.isdigit(): - self.tolerance = value - yield DataState.fill_errors_to_show + self.errors_to_show.clear() + if not value: + self.errors_to_show = self._calculated_errors[:] + return + for error in self._calculated_errors: + if _filter_error(value.lower(), error): + self.errors_to_show.append(error) + yield @rx.event def select_row(self, index: int): @@ -267,37 +272,67 @@ def download_selected_errors(self): def render_input() -> rx.Component: """Render the date input form.""" - return rx.form( - rx.hstack( + return rx.hstack( + rx.hstack( # Group "Start date" and its input rx.text('Start date'), rx.input( type='date', name='start_date', - value=DataState.start_date, - on_change=DataState.set_start_date, + value=SettingsState.start_date, + on_change=SettingsState.set_start_date, ), + align='center', + spacing='1', + min_width='max-content', + ), + rx.hstack( # Group "End date" and its input rx.text('End date'), rx.input( type='date', name='end_date', - value=DataState.end_date, - on_change=DataState.set_end_date, + value=SettingsState.end_date, + on_change=SettingsState.set_end_date, + ), + align='center', + spacing='1', + min_width='max-content', + ), + rx.hstack( + rx.text('Only active'), + rx.checkbox(default_checked=SettingsState.only_active, on_change=SettingsState.set_only_active), + align='center', + min_width='max-content', + spacing='1', + ), + rx.hstack( + rx.text('Tolerance'), + rx.input( + placeholder='Minutes', + type='number', + value=SettingsState.tolerance, + on_change=SettingsState.set_tolerance, + width='100%', + regex=r'^\d*$', + min=0, + ), + align='center', + spacing='1', + min_width='max-content', + ), + rx.cond( + SettingsState.date_error, + rx.tooltip( + rx.button('Submit', disabled=True), + content='End date must be after start date.', ), rx.button( 'Submit', - type='submit', - loading=DataState.is_loading, - disabled=DataState.disable_submit, - ), - rx.cond( - DataState.date_error, - rx.text('End date must be after start date', color='red'), + loading=DataStateDeprecated.is_loading, + on_click=DataStateDeprecated.calculate_errors, ), - spacing='3', - align='center', - width='100%', ), - on_submit=DataState.handle_submit, + spacing='3', + align='center', width='100%', ) @@ -307,11 +342,13 @@ def render_export_buttons() -> rx.Component: return rx.hstack( rx.button( 'Export Selected', - disabled=DataState.selected_error_ids.length() == 0, - on_click=DataState.download_selected_errors, + disabled=DataStateDeprecated.selected_error_ids.length() == 0, + on_click=DataStateDeprecated.download_selected_errors, ), rx.button( - 'Export All', disabled=DataState.errors_to_show.length() == 0, on_click=DataState.download_all_errors + 'Export All', + disabled=DataStateDeprecated.errors_to_show.length() == 0, + on_click=DataStateDeprecated.download_all_errors, ), justify='center', align='center', @@ -324,45 +361,17 @@ def render_search() -> rx.Component: return rx.hstack( rx.text('Search'), rx.input( - value=DataState.filter_value, - on_change=DataState.filter_employees, + value=DataStateDeprecated.filter_value, + on_change=DataStateDeprecated.set_filter_value, width='100%', placeholder='Filter by name or team', + disabled=DataStateDeprecated.is_loading, ), width='50%', align='center', ) -def render_tolerance_input() -> rx.Component: - """Render the tolerance input.""" - return rx.hstack( - rx.text('Tolerance'), - rx.input( - placeholder='Minutes', - type='number', - value=DataState.tolerance, - on_change=DataState.set_tolerance, - width='100%', - regex=r'^\d*$', - min=0, - ), - width='25%', - align='center', - ) - - -def render_filters() -> rx.Component: - """Render the filters section.""" - return rx.hstack( - render_tolerance_input(), - render_search(), - width='100%', - align='center', - justify='end', - ) - - def show_employee(error: rx.Var[ErrorToShow], index: int) -> rx.Component: """Show a customer in a table row.""" return rx.table.row( @@ -429,8 +438,10 @@ def show_employee(error: rx.Var[ErrorToShow], index: int) -> rx.Component: ), align='right', ), - on_click=lambda: DataState.select_row(index), - background_color=rx.cond(DataState.selected_error_ids.contains(index), rx.color('blue', 3), 'transparent'), + on_click=lambda: DataStateDeprecated.select_row(index), + background_color=rx.cond( + DataStateDeprecated.selected_error_ids.contains(index), rx.color('blue', 3), 'transparent' + ), ) @@ -449,7 +460,7 @@ def render_table() -> rx.Component: ), rx.table.body( rx.foreach( - DataState.errors_to_show, + DataStateDeprecated.errors_to_show, show_employee, ) ), @@ -460,9 +471,12 @@ def render_table() -> rx.Component: def live_progress() -> rx.Component: """Show a live progress bar when loading data.""" return rx.cond( - ~DataState.is_loading, + ~DataStateDeprecated.is_loading, rx.fragment(), - rx.progress(value=DataState.processed_employees, max=DataState.length_of_employees), + rx.progress( + value=DataStateDeprecated.processed_employees, + max=DataStateDeprecated.total_amount_of_employees, + ), ) @@ -471,7 +485,7 @@ def live_progress() -> rx.Component: def working_time_verification_page() -> rx.Component: """Index page of the app.""" return rx.vstack( - rx.hstack(render_input(), render_export_buttons(), render_filters(), justify='between', width='100%'), + rx.hstack(render_input(), render_export_buttons(), render_search(), justify='between', width='100%'), live_progress(), render_table(), width='100%', diff --git a/factorialhr_analysis/routes.py b/factorialhr_analysis/routes.py index 8a5b0ca..441eb52 100644 --- a/factorialhr_analysis/routes.py +++ b/factorialhr_analysis/routes.py @@ -3,4 +3,4 @@ INDEX = '/' OAUTH_START_ROUTE = '/oauth/start' OAUTH_AUTHORIZE_ROUTE = '/oauth/authorize' -WORKING_TIME_VERIFICATION_ROUTE = '/working-time-verification' +VERIFICATION_ROUTE = '/verification' diff --git a/factorialhr_analysis/states/__init__.py b/factorialhr_analysis/states/__init__.py index bcf2591..262be16 100644 --- a/factorialhr_analysis/states/__init__.py +++ b/factorialhr_analysis/states/__init__.py @@ -1,3 +1,4 @@ +from factorialhr_analysis.states.data_state import DataState from factorialhr_analysis.states.oauth_state import OAuthSessionState -__all__ = ['OAuthSessionState'] +__all__ = ['DataState', 'OAuthSessionState'] diff --git a/factorialhr_analysis/states/data_state.py b/factorialhr_analysis/states/data_state.py new file mode 100644 index 0000000..f383ba6 --- /dev/null +++ b/factorialhr_analysis/states/data_state.py @@ -0,0 +1,76 @@ +import datetime + +import anyio.abc +import anyio.from_thread +import factorialhr +import reflex as rx + +from factorialhr_analysis import constants, states + + +class DataState(rx.State): + """State for managing data.""" + + _employees: dict[int, factorialhr.Employee] = {} # noqa: RUF012 + _teams: dict[int, factorialhr.Team] = {} # noqa: RUF012 + _shifts: dict[int, factorialhr.AttendanceShift] = {} # noqa: RUF012 + _credentials: factorialhr.Credentials | None = None + + is_loading: rx.Field[bool] = rx.field(default=False) + last_updated: rx.Field[datetime.datetime | None] = rx.field(default=None) + + @rx.var + def len_of_shifts(self) -> int: + """Get the number of shifts.""" + return len(self._shifts) + + async def _load_employees(self, api_client: factorialhr.ApiClient): + employees = await factorialhr.EmployeesEndpoint(api_client).all() + async with self: + self._employees = {emp.id: emp for emp in employees.data()} + + async def _load_teams(self, api_client: factorialhr.ApiClient): + teams = await factorialhr.TeamsEndpoint(api_client).all() + async with self: + self._teams = {team.id: team for team in teams.data()} + + async def _load_shifts(self, api_client: factorialhr.ApiClient): + # all shifts are obtained in a single page and therefore requires a high timeout + shifts = await factorialhr.ShiftsEndpoint(api_client).all(timeout=100) + async with self: + self._shifts = {shift.id: shift for shift in shifts.data()} + + async def _load_credentials(self, api_client: factorialhr.ApiClient): + credentials = await factorialhr.CredentialsEndpoint(api_client).all() + async with self: + self._credentials = next(iter(credentials.data()), None) + + @rx.event(background=True) + async def refresh_data(self): + """Refresh the data.""" + async with self: + if self.is_loading: + return + self.is_loading = True + auth = (await self.get_state(states.OAuthSessionState)).get_auth() + try: + async with ( + factorialhr.ApiClient(constants.ENVIRONMENT_URL, auth=auth) as client, + anyio.from_thread.create_task_group() as tg, + ): + tg.start_soon(self._load_teams, client) + tg.start_soon(self._load_employees, client) + tg.start_soon(self._load_shifts, client) + tg.start_soon(self._load_credentials, client) + finally: + async with self: + self.is_loading = False + async with self: + self.last_updated = datetime.datetime.now(tz=datetime.UTC) + + @rx.event + def clear(self): + self.last_updated = None + self._employees.clear() + self._teams.clear() + self._shifts.clear() diff --git a/factorialhr_analysis/states/oauth_state.py b/factorialhr_analysis/states/oauth_state.py index 24afcb0..cf32007 100644 --- a/factorialhr_analysis/states/oauth_state.py +++ b/factorialhr_analysis/states/oauth_state.py @@ -1,4 +1,3 @@ -import logging import time import typing @@ -6,6 +5,7 @@ import httpx import pydantic import reflex as rx + from factorialhr_analysis import constants, routes diff --git a/factorialhr_analysis/templates.py b/factorialhr_analysis/templates.py index 263441e..4fbc3fd 100644 --- a/factorialhr_analysis/templates.py +++ b/factorialhr_analysis/templates.py @@ -1,13 +1,14 @@ """Templates for the web application.""" import functools +from collections.abc import Callable import reflex as rx from factorialhr_analysis import components -def template(page: rx.app.ComponentCallable) -> rx.app.ComponentCallable: +def template(page: Callable[[], rx.Component]) -> Callable[[], rx.Component]: """Wrap a page in the main template.""" @functools.wraps(page) @@ -20,6 +21,8 @@ def page_template() -> rx.Component: padding_left='1em', padding_right='1em', ), + components.footer(), + width='100%', ) return page_template diff --git a/factorialhr_analysis/working_time_verification/verification.py b/factorialhr_analysis/working_time_verification/verification.py index 0987973..9e5a51b 100644 --- a/factorialhr_analysis/working_time_verification/verification.py +++ b/factorialhr_analysis/working_time_verification/verification.py @@ -71,13 +71,15 @@ def check_breaks_and_reset( def get_error( - attendances: Iterable[factorialhr.AttendanceShift], tolerance: datetime.timedelta + attendances: Iterable[factorialhr.AttendanceShift], + tolerance: datetime.timedelta | None = None, ) -> Iterator[helper.Error]: """Verification function. Iterates over attendances and yields any errors found. Splits logic into smaller helper functions for clarity and maintainability. """ + tolerance = tolerance or datetime.timedelta() current_attendances: list[factorialhr.AttendanceShift] = [] for attendance in attendances: # Validate clock-in/clock-out times diff --git a/pyproject.toml b/pyproject.toml index c5af558..8df8aba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,8 @@ dependencies = [ "dotenv>=0.9.9", "factorialhr>=4.1.0", "httpx>=0.28.1", - "reflex==0.8.7", + "redis>=6.4.0", + "reflex==0.8.9", ] [dependency-groups] diff --git a/rxconfig.py b/rxconfig.py index 8b3e4ef..112327f 100644 --- a/rxconfig.py +++ b/rxconfig.py @@ -8,4 +8,6 @@ rx.plugins.SitemapPlugin(), rx.plugins.TailwindV4Plugin(), ], + telemetry_enabled=False, + env_file='.env', ) diff --git a/uv.lock b/uv.lock index 9738477..d860a3c 100644 --- a/uv.lock +++ b/uv.lock @@ -141,6 +141,7 @@ dependencies = [ { name = "dotenv" }, { name = "factorialhr" }, { name = "httpx" }, + { name = "redis" }, { name = "reflex" }, ] @@ -163,7 +164,8 @@ requires-dist = [ { name = "dotenv", specifier = ">=0.9.9" }, { name = "factorialhr", specifier = ">=4.1.0" }, { name = "httpx", specifier = ">=0.28.1" }, - { name = "reflex", specifier = "==0.8.7" }, + { name = "redis", specifier = ">=6.4.0" }, + { name = "reflex", specifier = "==0.8.9" }, ] [package.metadata.requires-dev] @@ -557,14 +559,13 @@ wheels = [ [[package]] name = "reflex" -version = "0.8.7" +version = "0.8.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alembic" }, { name = "click" }, { name = "granian", extra = ["reload"] }, { name = "httpx" }, - { name = "jinja2" }, { name = "packaging" }, { name = "platformdirs" }, { name = "psutil", marker = "sys_platform == 'win32'" }, @@ -579,9 +580,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/3a/d9b54b0f5021ec99f28af5753275fc279d43b43b30fa5f7d71ecc3d3f80c/reflex-0.8.7.tar.gz", hash = "sha256:42af70890ea817e520e3c7a5dd0d94a56d43e393ff777dbec4253bc0b6bcac1c", size = 587068, upload-time = "2025-08-18T19:37:21.705Z" } +sdist = { url = "https://files.pythonhosted.org/packages/60/61/5a9dc5a79f919e9a8b75e1d09f80f848469d6a723f2e8555dfc82e9ca500/reflex-0.8.9.tar.gz", hash = "sha256:c9c7c4d23770269e7e2ca04e19d106ffe6d0e5dacc5dc0b5f830958f5b79687e", size = 597102, upload-time = "2025-09-02T20:30:56.563Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/14/b25f38370d96017ed02e5548012d898166d7e8da7bc0ee63bee04da2faea/reflex-0.8.7-py3-none-any.whl", hash = "sha256:ac0e3b56ec11a5d97d2cdec1c8aa2aaee501597a02a12381d62a2918ce6783a1", size = 883153, upload-time = "2025-08-18T19:37:19.433Z" }, + { url = "https://files.pythonhosted.org/packages/18/2c/5f62538dfe482c0847585d71adcccc17d3ab0b2b79b47cb11b58214b8c9d/reflex-0.8.9-py3-none-any.whl", hash = "sha256:244b06078acf60d81515e89835eba9d25f981bd4bd4537fcbca18aac1b0c6135", size = 885247, upload-time = "2025-09-02T20:30:54.683Z" }, ] [[package]] From c08fe4466990debe7f9337b5d00a18e3af23c2a1 Mon Sep 17 00:00:00 2001 From: Leon Date: Thu, 11 Sep 2025 10:12:02 +0200 Subject: [PATCH 03/24] minor improvemnts --- .gitignore | 1 + cloud.yml | 4 + factorialhr_analysis/components/navbar.py | 11 +- factorialhr_analysis/pages/oauth_page.py | 2 + factorialhr_analysis/states/data_state.py | 11 +- factorialhr_analysis/states/oauth_state.py | 2 +- requirements.txt | 396 +++++++++++++++++++++ rxconfig.py | 1 + 8 files changed, 425 insertions(+), 3 deletions(-) create mode 100644 cloud.yml create mode 100644 requirements.txt diff --git a/.gitignore b/.gitignore index b008d09..1d012d9 100644 --- a/.gitignore +++ b/.gitignore @@ -107,6 +107,7 @@ celerybeat.pid # Environments .env +.env.production .venv env/ venv/ diff --git a/cloud.yml b/cloud.yml new file mode 100644 index 0000000..2ba0f67 --- /dev/null +++ b/cloud.yml @@ -0,0 +1,4 @@ +name: factorialhr_analysis +regions: + fra: 1 +envfile: .env.production \ No newline at end of file diff --git a/factorialhr_analysis/components/navbar.py b/factorialhr_analysis/components/navbar.py index 16e0703..6c0e337 100644 --- a/factorialhr_analysis/components/navbar.py +++ b/factorialhr_analysis/components/navbar.py @@ -6,6 +6,15 @@ from factorialhr_analysis import routes, states +class NavbarState(rx.State): + """State for the navigation bar.""" + + @rx.event + async def logout(self): + """Log out the user.""" + yield [states.OAuthSessionState.delete_session, states.DataState.clear, rx.redirect(routes.INDEX)] + + def dark_mode_toggle() -> rx.Component: """Toggle for dark/light mode.""" return rx.segmented_control.root( @@ -65,7 +74,7 @@ def icon_menu(): rx.link( rx.text('Log out'), href=routes.INDEX, - on_click=states.OAuthSessionState.delete_session, + on_click=NavbarState.logout, ) ), ), diff --git a/factorialhr_analysis/pages/oauth_page.py b/factorialhr_analysis/pages/oauth_page.py index d0f3bc2..4f30f03 100644 --- a/factorialhr_analysis/pages/oauth_page.py +++ b/factorialhr_analysis/pages/oauth_page.py @@ -48,6 +48,8 @@ async def process_oauth_response(self): await oauth_session.create_session(code, grant_type='authorization_code') except httpx.HTTPStatusError as e: self.error = str(e) + else: + yield states.DataState.refresh_data finally: self.expected_state = '' diff --git a/factorialhr_analysis/states/data_state.py b/factorialhr_analysis/states/data_state.py index f383ba6..21085e3 100644 --- a/factorialhr_analysis/states/data_state.py +++ b/factorialhr_analysis/states/data_state.py @@ -45,9 +45,18 @@ async def _load_credentials(self, api_client: factorialhr.ApiClient): async with self: self._credentials = next(iter(credentials.data()), None) - @rx.event(background=True) + @rx.event async def refresh_data(self): """Refresh the data.""" + auth_state = await self.get_state(states.OAuthSessionState) + if await auth_state.refresh_session(): + return DataState.poll_data + self.clear() + return states.OAuthSessionState.redir + + @rx.event(background=True) + async def poll_data(self): + """Poll the data.""" async with self: if self.is_loading: return diff --git a/factorialhr_analysis/states/oauth_state.py b/factorialhr_analysis/states/oauth_state.py index cf32007..3502b34 100644 --- a/factorialhr_analysis/states/oauth_state.py +++ b/factorialhr_analysis/states/oauth_state.py @@ -130,7 +130,7 @@ def get_auth(self) -> factorialhr.AccessTokenAuth | factorialhr.ApiKeyAuth: api_session = self.api_session() if api_session is None: msg = 'api_session_cookie must be valid' - raise RuntimeError(msg) + raise ValueError(msg) return factorialhr.AccessTokenAuth( access_token=api_session.access_token, token_type=api_session.token_type, diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..5d0f76f --- /dev/null +++ b/requirements.txt @@ -0,0 +1,396 @@ +# This file was autogenerated by uv via the following command: +# uv export --format requirements-txt +-e . +alembic==1.16.4 \ + --hash=sha256:b05e51e8e82efc1abd14ba2af6392897e145930c3e0a2faf2b0da2f7f7fd660d \ + --hash=sha256:efab6ada0dd0fae2c92060800e0bf5c1dc26af15a10e02fb4babff164b4725e2 + # via reflex +annotated-types==0.7.0 \ + --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \ + --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89 + # via pydantic +anyio==4.10.0 \ + --hash=sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6 \ + --hash=sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1 + # via + # factorialhr + # fwtv + # httpx + # starlette + # watchfiles +bidict==0.23.1 \ + --hash=sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71 \ + --hash=sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5 + # via python-socketio +certifi==2025.8.3 \ + --hash=sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407 \ + --hash=sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5 + # via + # httpcore + # httpx +click==8.2.1 \ + --hash=sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202 \ + --hash=sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b + # via + # granian + # reflex + # reflex-hosting-cli +colorama==0.4.6 ; sys_platform == 'win32' \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via + # click + # pytest +coverage==7.10.5 \ + --hash=sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7 \ + --hash=sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae \ + --hash=sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a \ + --hash=sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4 \ + --hash=sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760 \ + --hash=sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5 \ + --hash=sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2 \ + --hash=sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44 \ + --hash=sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee \ + --hash=sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235 \ + --hash=sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c \ + --hash=sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db \ + --hash=sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14 \ + --hash=sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f \ + --hash=sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc \ + --hash=sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e \ + --hash=sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff \ + --hash=sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869 \ + --hash=sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b \ + --hash=sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5 \ + --hash=sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84 \ + --hash=sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b \ + --hash=sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c \ + --hash=sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6 + # via pytest-cov +dotenv==0.9.9 \ + --hash=sha256:29cf74a087b31dafdb5a446b6d7e11cbce8ed2741540e2339c69fbef92c94ce9 + # via fwtv +factorialhr==4.1.0 \ + --hash=sha256:594f52d0e134bec2e02fce9282b236a2a8329d163576333f85462177e7aee6cc \ + --hash=sha256:b0eee684c32982fd297de2d7fcc8c70a34e856121ec5aa19cd3da8837907563c + # via fwtv +granian==2.5.0 \ + --hash=sha256:3152037d799ea97e5736de054a48bf75368fb79b7cfee7e6aa46de1076a43882 \ + --hash=sha256:31705782cd616b9b70536c1b61b7f15815ebc4dcccdb72f58aa806ba7ac5dfa1 \ + --hash=sha256:36493c4f2b672d027eb11b05ca6660f9fd4944452841d213cb0cb64da869539b \ + --hash=sha256:50d4dc74ab763c1bf396cf85d93a8202bf1bfb74150b03f9fd62b600cd0c777c \ + --hash=sha256:74601bda3aedb249a3d5059d48108acfa61d6f71686162bda0bedc013a443efb \ + --hash=sha256:76dc084d1c528683a88c2d1a00786c9bc013b695b1776ad8a3c71419c45e1df0 \ + --hash=sha256:879fdeb71fe279175a25d709d95dd2db01eb67cd12d300e51e3dc704ca5e52fd \ + --hash=sha256:8f9918bee3c21eb1410f4323440d76eaa0c2d2e6ca4fa3e3a20d07cc54b788f6 \ + --hash=sha256:944ea3bd400a7ccc8129835eda65bd6a37f8fb77828f4e6ded2f06827d6ec25f \ + --hash=sha256:9a53151c2d31dbcf1acbe6af89ce0282387614b6401650d511ca4260ba0e03c1 \ + --hash=sha256:9f6d080e45735dd93e4c60a79e42ee9ed37124a9580a08292d83b0961c705e39 \ + --hash=sha256:af272218076663280fdc293b7da3adb716f23d54211cefad92fcf7e01b3eed19 \ + --hash=sha256:afafac4908d5931e4b2c2a09612e063d7ccd05e531f16b7f11e3bccc4ca8972c \ + --hash=sha256:bbc4ebc727202ad4b3073ca8148c2af49904710d6fce84872191b2dd5cd36916 \ + --hash=sha256:bed0d047c9c0c6c6a5a85ee5b3c7e2683fc63e03ac032eaf3d7654fa96bde102 \ + --hash=sha256:c28a34951c1ed8eea97948882bdbc374ce111be5a59293693613d25043ba1313 \ + --hash=sha256:f371dd9eedae26158901fee3eb934e8fa61491cc78d234470ce364b989c78a1f \ + --hash=sha256:f7bf7ed30bcda9bbc9962f187081c5dfa6aa07e06c3a59486bc573b5def35914 \ + --hash=sha256:fb157c3d66301ffad4113da4c51aed4d56006b9ebe9d0892c682a634b5fff773 + # via reflex +greenlet==3.2.4 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' \ + --hash=sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b \ + --hash=sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d \ + --hash=sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31 \ + --hash=sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671 \ + --hash=sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b \ + --hash=sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc \ + --hash=sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a \ + --hash=sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945 \ + --hash=sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae \ + --hash=sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504 + # via sqlalchemy +h11==0.16.0 \ + --hash=sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1 \ + --hash=sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86 + # via + # httpcore + # wsproto +httpcore==1.0.9 \ + --hash=sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55 \ + --hash=sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8 + # via httpx +httpx==0.28.1 \ + --hash=sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc \ + --hash=sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad + # via + # factorialhr + # fwtv + # reflex + # reflex-hosting-cli +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via + # anyio + # httpx +iniconfig==2.1.0 \ + --hash=sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7 \ + --hash=sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760 + # via pytest +jinja2==3.1.6 \ + --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ + --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 + # via pytest-html +mako==1.3.10 \ + --hash=sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28 \ + --hash=sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59 + # via alembic +markdown-it-py==4.0.0 \ + --hash=sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147 \ + --hash=sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3 + # via rich +markupsafe==3.0.2 \ + --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ + --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ + --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ + --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ + --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ + --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ + --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ + --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ + --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ + --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ + --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ + --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ + --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ + --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ + --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ + --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ + --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ + --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ + --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \ + --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ + --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 + # via + # jinja2 + # mako +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +packaging==25.0 \ + --hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484 \ + --hash=sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f + # via + # pytest + # reflex + # reflex-hosting-cli +platformdirs==4.3.8 \ + --hash=sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc \ + --hash=sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4 + # via + # reflex + # reflex-hosting-cli +pluggy==1.6.0 \ + --hash=sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3 \ + --hash=sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746 + # via + # pytest + # pytest-cov +psutil==7.0.0 ; sys_platform == 'win32' \ + --hash=sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553 \ + --hash=sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456 \ + --hash=sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99 + # via reflex +pydantic==2.11.7 \ + --hash=sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db \ + --hash=sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b + # via + # factorialhr + # reflex + # sqlmodel +pydantic-core==2.33.2 \ + --hash=sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56 \ + --hash=sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef \ + --hash=sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a \ + --hash=sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f \ + --hash=sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916 \ + --hash=sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a \ + --hash=sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849 \ + --hash=sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e \ + --hash=sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac \ + --hash=sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162 \ + --hash=sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc \ + --hash=sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5 \ + --hash=sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d \ + --hash=sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9 \ + --hash=sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9 \ + --hash=sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5 \ + --hash=sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9 \ + --hash=sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6 + # via pydantic +pygments==2.19.2 \ + --hash=sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887 \ + --hash=sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b + # via + # pytest + # rich +pytest==8.4.1 \ + --hash=sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7 \ + --hash=sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c + # via + # pytest-cov + # pytest-html + # pytest-metadata +pytest-cov==6.2.1 \ + --hash=sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2 \ + --hash=sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5 +pytest-html==4.1.1 \ + --hash=sha256:70a01e8ae5800f4a074b56a4cb1025c8f4f9b038bba5fe31e3c98eb996686f07 \ + --hash=sha256:c8152cea03bd4e9bee6d525573b67bbc6622967b72b9628dda0ea3e2a0b5dd71 +pytest-metadata==3.1.1 \ + --hash=sha256:c8e0844db684ee1c798cfa38908d20d67d0463ecb6137c72e91f418558dd5f4b \ + --hash=sha256:d2a29b0355fbc03f168aa96d41ff88b1a3b44a3b02acbe491801c98a048017c8 + # via pytest-html +python-dotenv==1.1.1 \ + --hash=sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc \ + --hash=sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab + # via dotenv +python-engineio==4.12.2 \ + --hash=sha256:8218ab66950e179dfec4b4bbb30aecf3f5d86f5e58e6fc1aa7fde2c698b2804f \ + --hash=sha256:e7e712ffe1be1f6a05ee5f951e72d434854a32fcfc7f6e4d9d3cae24ec70defa + # via python-socketio +python-multipart==0.0.20 \ + --hash=sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104 \ + --hash=sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13 + # via reflex +python-socketio==5.13.0 \ + --hash=sha256:51f68d6499f2df8524668c24bcec13ba1414117cfb3a90115c559b601ab10caf \ + --hash=sha256:ac4e19a0302ae812e23b712ec8b6427ca0521f7c582d6abb096e36e24a263029 + # via reflex +redis==6.4.0 \ + --hash=sha256:b01bc7282b8444e28ec36b261df5375183bb47a07eb9c603f284e89cbc5ef010 \ + --hash=sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f + # via + # fwtv + # reflex +reflex==0.8.9 \ + --hash=sha256:244b06078acf60d81515e89835eba9d25f981bd4bd4537fcbca18aac1b0c6135 \ + --hash=sha256:c9c7c4d23770269e7e2ca04e19d106ffe6d0e5dacc5dc0b5f830958f5b79687e + # via fwtv +reflex-hosting-cli==0.1.55 \ + --hash=sha256:84e78715a1f112996a0a1f8c5503958931a7fc6f0f61b7332645b8fde96c3b17 \ + --hash=sha256:f5d5b0c26cefdc7ba3356954b7be6bfa8ec50267c13a2247f6cf18a140e95918 + # via reflex +rich==14.1.0 \ + --hash=sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f \ + --hash=sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8 + # via + # reflex + # reflex-hosting-cli +ruff==0.12.10 \ + --hash=sha256:059e863ea3a9ade41407ad71c1de2badfbe01539117f38f763ba42a1206f7559 \ + --hash=sha256:141ce3d88803c625257b8a6debf4a0473eb6eed9643a6189b68838b43e78165a \ + --hash=sha256:189ab65149d11ea69a2d775343adf5f49bb2426fc4780f65ee33b423ad2e47f9 \ + --hash=sha256:1bef6161e297c68908b7218fa6e0e93e99a286e5ed9653d4be71e687dff101cf \ + --hash=sha256:1f68433c4fbc63efbfa3ba5db31727db229fa4e61000f452c540474b03de52a9 \ + --hash=sha256:2c6f4064c69d2542029b2a61d39920c85240c39837599d7f2e32e80d36401d6e \ + --hash=sha256:37b4a64f4062a50c75019c61c7017ff598cb444984b638511f48539d3a1c98db \ + --hash=sha256:4f1345fbf8fb0531cd722285b5f15af49b2932742fc96b633e883da8d841896b \ + --hash=sha256:7837eca8787f076f67aba2ca559cefd9c5cbc3a9852fd66186f4201b87c1563e \ + --hash=sha256:7d1a4e0bdfafcd2e3e235ecf50bf0176f74dd37902f241588ae1f6c827a36c56 \ + --hash=sha256:822d9677b560f1fdeab69b89d1f444bf5459da4aa04e06e766cf0121771ab844 \ + --hash=sha256:8b593cb0fb55cc8692dac7b06deb29afda78c721c7ccfed22db941201b7b8f7b \ + --hash=sha256:9de785e95dc2f09846c5e6e1d3a3d32ecd0b283a979898ad427a9be7be22b266 \ + --hash=sha256:ae479e1a18b439c59138f066ae79cc0f3ee250712a873d00dbafadaad9481e5b \ + --hash=sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc \ + --hash=sha256:d59e58586829f8e4a9920788f6efba97a13d1fa320b047814e8afede381c6839 \ + --hash=sha256:e67d96827854f50b9e3e8327b031647e7bcc090dbe7bb11101a81a3a2cbf1cc9 \ + --hash=sha256:ebb7333a45d56efc7c110a46a69a1b32365d5c5161e7244aaf3aa20ce62399c1 \ + --hash=sha256:f3fc21178cd44c98142ae7590f42ddcb587b8e09a3b849cbc84edb62ee95de60 +simple-websocket==1.1.0 \ + --hash=sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c \ + --hash=sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4 + # via python-engineio +sniffio==1.3.1 \ + --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ + --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc + # via anyio +sqlalchemy==2.0.43 \ + --hash=sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa \ + --hash=sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc \ + --hash=sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9 \ + --hash=sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738 \ + --hash=sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417 \ + --hash=sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d \ + --hash=sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197 \ + --hash=sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f \ + --hash=sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164 \ + --hash=sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3 + # via + # alembic + # sqlmodel +sqlmodel==0.0.24 \ + --hash=sha256:6778852f09370908985b667d6a3ab92910d0d5ec88adcaf23dbc242715ff7193 \ + --hash=sha256:cc5c7613c1a5533c9c7867e1aab2fd489a76c9e8a061984da11b4e613c182423 + # via reflex +starlette==0.47.2 \ + --hash=sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8 \ + --hash=sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b + # via reflex +typing-extensions==4.14.1 \ + --hash=sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36 \ + --hash=sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76 + # via + # alembic + # pydantic + # pydantic-core + # reflex + # sqlalchemy + # typing-inspection +typing-inspection==0.4.1 \ + --hash=sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51 \ + --hash=sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28 + # via pydantic +watchfiles==1.1.0 \ + --hash=sha256:12b0a02a91762c08f7264e2e79542f76870c3040bbc847fb67410ab81474932a \ + --hash=sha256:17ab167cca6339c2b830b744eaf10803d2a5b6683be4d79d8475d88b4a8a4be1 \ + --hash=sha256:20ecc8abbd957046f1fe9562757903f5eaf57c3bce70929fda6c7711bb58074a \ + --hash=sha256:239736577e848678e13b201bba14e89718f5c2133dfd6b1f7846fa1b58a8532b \ + --hash=sha256:29e7bc2eee15cbb339c68445959108803dc14ee0c7b4eea556400131a8de462b \ + --hash=sha256:328dbc9bff7205c215a7807da7c18dce37da7da718e798356212d22696404339 \ + --hash=sha256:32d6d4e583593cb8576e129879ea0991660b935177c0f93c6681359b3654bfa9 \ + --hash=sha256:5007f860c7f1f8df471e4e04aaa8c43673429047d63205d1630880f7637bca30 \ + --hash=sha256:5366164391873ed76bfdf618818c82084c9db7fac82b64a20c44d335eec9ced5 \ + --hash=sha256:60022527e71d1d1fda67a33150ee42869042bce3d0fcc9cc49be009a9cded3fb \ + --hash=sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575 \ + --hash=sha256:80f811146831c8c86ab17b640801c25dc0a88c630e855e2bef3568f30434d52b \ + --hash=sha256:8c5701dc474b041e2934a26d31d39f90fac8a3dee2322b39f7729867f932b1d4 \ + --hash=sha256:9c733cda03b6d636b4219625a4acb5c6ffb10803338e437fb614fef9516825ef \ + --hash=sha256:a8f6f72974a19efead54195bc9bed4d850fc047bb7aa971268fd9a8387c89011 \ + --hash=sha256:adb4167043d3a78280d5d05ce0ba22055c266cf8655ce942f2fb881262ff3cdf \ + --hash=sha256:b067915e3c3936966a8607f6fe5487df0c9c4afb85226613b520890049deea20 \ + --hash=sha256:cc08ef8b90d78bfac66f0def80240b0197008e4852c9f285907377b2947ffdcb \ + --hash=sha256:d181ef50923c29cf0450c3cd47e2f0557b62218c50b2ab8ce2ecaa02bd97e670 \ + --hash=sha256:d9481174d3ed982e269c090f780122fb59cee6c3796f74efe74e70f7780ed94c \ + --hash=sha256:eff4b8d89f444f7e49136dc695599a591ff769300734446c0a86cba2eb2f9895 \ + --hash=sha256:f21af781a4a6fbad54f03c598ab620e3a77032c5878f3d780448421a6e1818c7 \ + --hash=sha256:f2f0498b7d2a3c072766dba3274fe22a183dbea1f99d188f1c6c72209a1063dc \ + --hash=sha256:f7208ab6e009c627b7557ce55c465c98967e8caa8b11833531fdf95799372633 + # via granian +wrapt==1.17.3 \ + --hash=sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d \ + --hash=sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7 \ + --hash=sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb \ + --hash=sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa \ + --hash=sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8 \ + --hash=sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77 \ + --hash=sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277 \ + --hash=sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22 \ + --hash=sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16 \ + --hash=sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0 \ + --hash=sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050 \ + --hash=sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0 + # via reflex +wsproto==1.2.0 \ + --hash=sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065 \ + --hash=sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736 + # via simple-websocket diff --git a/rxconfig.py b/rxconfig.py index 112327f..ff5d9d4 100644 --- a/rxconfig.py +++ b/rxconfig.py @@ -10,4 +10,5 @@ ], telemetry_enabled=False, env_file='.env', + show_built_with_reflex=False, ) From abc55c938e56cb18704688c170b255216f793dd1 Mon Sep 17 00:00:00 2001 From: Leon Date: Thu, 11 Sep 2025 16:31:42 +0200 Subject: [PATCH 04/24] logging + one port docker --- .dockerignore | 9 +-- Caddy.Dockerfile | 4 - Caddyfile | 4 +- Dockerfile | 78 +++++++++++--------- compose.prod.yaml | 25 ------- compose.yaml | 41 ---------- factorialhr_analysis/components/navbar.py | 1 + factorialhr_analysis/constants.py | 1 + factorialhr_analysis/factorialhr_analysis.py | 3 + factorialhr_analysis/pages/oauth_page.py | 6 +- factorialhr_analysis/states/data_state.py | 4 + factorialhr_analysis/states/oauth_state.py | 3 +- 12 files changed, 64 insertions(+), 115 deletions(-) delete mode 100644 Caddy.Dockerfile delete mode 100644 compose.prod.yaml delete mode 100644 compose.yaml diff --git a/.dockerignore b/.dockerignore index e86a29e..2352827 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,8 +1,3 @@ .web -.git -__pycache__/* -Dockerfile -Caddy.Dockerfile -compose.yaml -compose.*.yaml -uploaded_files \ No newline at end of file +!.web/bun.lockb +!.web/package.json \ No newline at end of file diff --git a/Caddy.Dockerfile b/Caddy.Dockerfile deleted file mode 100644 index 76ae320..0000000 --- a/Caddy.Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -FROM library/caddy - -COPY --from=local/reflex-app /app/.web/build/client /srv -ADD Caddyfile /etc/caddy/Caddyfile \ No newline at end of file diff --git a/Caddyfile b/Caddyfile index a77063d..13d94ce 100644 --- a/Caddyfile +++ b/Caddyfile @@ -1,10 +1,10 @@ -{$DOMAIN} +:{$PORT} encode gzip @backend_routes path /_event/* /ping /_upload /_upload/* handle @backend_routes { - reverse_proxy app:8000 + reverse_proxy localhost:8000 } root * /srv diff --git a/Dockerfile b/Dockerfile index 803f070..0c6d579 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,52 +1,62 @@ -# This docker file is intended to be used with docker compose to deploy a production -# instance of a Reflex app. +# This Dockerfile is used to deploy a single-container Reflex app instance +# to services like Render, Railway, Heroku, GCP, and others. -# Stage 1: init -FROM python:3.13 as init +# If the service expects a different port, provide it here (f.e Render expects port 10000) +ARG PORT=8080 +# Only set for local/direct access. When TLS is used, the API_URL is assumed to be the same as the frontend. +ARG API_URL -ARG uv=/root/.local/bin/uv +# It uses a reverse proxy to serve the frontend statically and proxy to backend +# from a single exposed port, expecting TLS termination to be handled at the +# edge by the given platform. +FROM python:3.13 as builder -# Install `uv` for faster package bootstrapping -ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh -RUN /install.sh && rm /install.sh +RUN mkdir -p /app/.web +RUN python -m venv /app/.venv +ENV PATH="/app/.venv/bin:$PATH" -# Copy local context to `/app` inside container (see .dockerignore) WORKDIR /app -COPY . . -RUN mkdir -p /app/data /app/uploaded_files -# Create virtualenv which will be copied into final container -ENV VIRTUAL_ENV=/app/.venv -ENV PATH="$VIRTUAL_ENV/bin:$PATH" -RUN $uv venv +# Install python app requirements and reflex in the container +COPY requirements.txt . +RUN pip install -r requirements.txt + +# Install reflex helper utilities like bun/node +COPY rxconfig.py ./ +RUN reflex init -# Install app requirements and reflex inside virtualenv -RUN $uv sync --frozen +# Install pre-cached frontend dependencies (if exist) +COPY *.web/bun.lockb *.web/package.json .web/ +RUN if [ -f .web/bun.lockb ]; then cd .web && ~/.local/share/reflex/bun/bin/bun install --frozen-lockfile; fi -# Deploy templates and prepare app -RUN $uv run python -m reflex init +# Copy local context to `/app` inside container (see .dockerignore) +COPY . . -# Export static copy of frontend to /app/.web/build/client -RUN $uv run python -m reflex export --frontend-only --no-zip +ARG PORT API_URL +# Download other npm dependencies and compile frontend +RUN REFLEX_API_URL=${API_URL:-http://localhost:$PORT} reflex export --loglevel debug --frontend-only --no-zip && mv .web/build/client/* /srv/ && rm -rf .web -# Copy static files out of /app to save space in backend image -RUN mv .web/build/client /tmp/client -RUN rm -rf .web && mkdir -p .web/build -RUN mv /tmp/client .web/build/client -# Stage 2: copy artifacts into slim image +# Final image with only necessary files FROM python:3.13-slim + +# Install Caddy and redis server inside image +RUN apt-get update -y && apt-get install -y caddy redis-server && rm -rf /var/lib/apt/lists/* + +ARG PORT API_URL +ENV PATH="/app/.venv/bin:$PATH" PORT=$PORT REFLEX_API_URL=${API_URL:-http://localhost:$PORT} REFLEX_REDIS_URL=redis://localhost PYTHONUNBUFFERED=1 + WORKDIR /app -RUN adduser --disabled-password --home /app reflex -COPY --chown=reflex --from=init /app /app -# Install libpq-dev for psycopg (skip if not using postgres). -RUN apt-get update -y && apt-get install -y libpq-dev && rm -rf /var/lib/apt/lists/* -USER reflex -ENV PATH="/app/.venv/bin:$PATH" PYTHONUNBUFFERED=1 +COPY --from=builder /app /app +COPY --from=builder /srv /srv # Needed until Reflex properly passes SIGTERM on backend. STOPSIGNAL SIGKILL -# Always apply migrations before starting the backend. +EXPOSE $PORT + +# Apply migrations before starting the backend. CMD [ -d alembic ] && reflex db migrate; \ - exec python -m reflex run --env prod --backend-only + caddy start && \ + redis-server --daemonize yes && \ + exec reflex run --env prod --backend-only \ No newline at end of file diff --git a/compose.prod.yaml b/compose.prod.yaml deleted file mode 100644 index d728727..0000000 --- a/compose.prod.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# Use this override file to run the app in prod mode with postgres and redis -# docker compose -f compose.yaml -f compose.prod.yaml up -d -services: - db: - image: postgres - restart: always - environment: - POSTGRES_PASSWORD: secret - volumes: - - postgres-data:/var/lib/postgresql/data - - redis: - image: redis - restart: always - - app: - environment: - REFLEX_DB_URL: postgresql+psycopg://postgres:secret@db/postgres - REFLEX_REDIS_URL: redis://redis:6379 - depends_on: - - db - - redis - -volumes: - postgres-data: \ No newline at end of file diff --git a/compose.yaml b/compose.yaml deleted file mode 100644 index 8974f43..0000000 --- a/compose.yaml +++ /dev/null @@ -1,41 +0,0 @@ -# Base compose file production deployment of reflex app with Caddy webserver -# providing TLS termination and reverse proxying. -# -# See `compose.prod.yaml` for more robust and performant deployment option. -# -# During build and run, set environment DOMAIN pointing -# to publicly accessible domain where app will be hosted -services: - app: - image: local/reflex-app - environment: - REFLEX_DB_URL: sqlite:///data/reflex.db - build: - context: . - volumes: - - db-data:/app/data - - upload-data:/app/uploaded_files - restart: always - - webserver: - environment: - DOMAIN: ${DOMAIN:-localhost} - ports: - - 443:443 - - 80:80 # For acme-challenge via HTTP. - build: - context: . - dockerfile: Caddy.Dockerfile - volumes: - - caddy-data:/root/.caddy - restart: always - depends_on: - - app - -volumes: - # SQLite data - db-data: - # Uploaded files - upload-data: - # TLS keys and certificates - caddy-data: \ No newline at end of file diff --git a/factorialhr_analysis/components/navbar.py b/factorialhr_analysis/components/navbar.py index 6c0e337..7142a43 100644 --- a/factorialhr_analysis/components/navbar.py +++ b/factorialhr_analysis/components/navbar.py @@ -46,6 +46,7 @@ def refresh_data(): rx.icon('refresh-ccw'), on_click=states.DataState.refresh_data, loading=states.DataState.is_loading, + aria_label='Refresh data', ), rx.text( 'Last data update: ', diff --git a/factorialhr_analysis/constants.py b/factorialhr_analysis/constants.py index 8c481ab..d685865 100644 --- a/factorialhr_analysis/constants.py +++ b/factorialhr_analysis/constants.py @@ -8,3 +8,4 @@ ENVIRONMENT_URL: str = os.environ.get('FACTORIALHR_ENVIRONMENT_URL', '') API_KEY: str = os.environ.get('FACTORIALHR_API_KEY', '') SCOPE = 'read' +API_TIMEOUT = 60 diff --git a/factorialhr_analysis/factorialhr_analysis.py b/factorialhr_analysis/factorialhr_analysis.py index 9ea8a5c..39238ee 100644 --- a/factorialhr_analysis/factorialhr_analysis.py +++ b/factorialhr_analysis/factorialhr_analysis.py @@ -3,6 +3,9 @@ import reflex as rx from factorialhr_analysis import pages, routes +import logging + +logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) # TODO: check if env variables in constants have been set diff --git a/factorialhr_analysis/pages/oauth_page.py b/factorialhr_analysis/pages/oauth_page.py index 4f30f03..cbf2ba9 100644 --- a/factorialhr_analysis/pages/oauth_page.py +++ b/factorialhr_analysis/pages/oauth_page.py @@ -1,4 +1,5 @@ import functools +import logging import secrets import urllib.parse from collections.abc import Callable @@ -46,11 +47,14 @@ async def process_oauth_response(self): oauth_session = await self.get_state(states.OAuthSessionState) try: await oauth_session.create_session(code, grant_type='authorization_code') - except httpx.HTTPStatusError as e: + except (httpx.RequestError, httpx.HTTPStatusError) as e: + logging.getLogger(__name__).exception('error creating oauth session') self.error = str(e) else: + logging.getLogger(__name__).info('created oauth session') yield states.DataState.refresh_data finally: + self.error = '' self.expected_state = '' diff --git a/factorialhr_analysis/states/data_state.py b/factorialhr_analysis/states/data_state.py index 21085e3..09eb3e7 100644 --- a/factorialhr_analysis/states/data_state.py +++ b/factorialhr_analysis/states/data_state.py @@ -1,4 +1,5 @@ import datetime +import logging import anyio.abc import anyio.from_thread @@ -71,11 +72,14 @@ async def poll_data(self): tg.start_soon(self._load_employees, client) tg.start_soon(self._load_shifts, client) tg.start_soon(self._load_credentials, client) + except Exception: + logging.getLogger(__name__).exception('error loading data') finally: async with self: self.is_loading = False async with self: self.last_updated = datetime.datetime.now(tz=datetime.UTC) + logging.getLogger(__name__).info('data loaded') @rx.event def clear(self): diff --git a/factorialhr_analysis/states/oauth_state.py b/factorialhr_analysis/states/oauth_state.py index 3502b34..af36ca7 100644 --- a/factorialhr_analysis/states/oauth_state.py +++ b/factorialhr_analysis/states/oauth_state.py @@ -40,6 +40,7 @@ class OAuthSessionState(rx.State): api_session_cookie: str = rx.Cookie( name='api_session', same_site='strict', + secure=True, ) _redirect_to: str = '' @@ -94,7 +95,7 @@ async def refresh_session(self) -> bool: if api_session.is_access_token_expired(): try: await self.create_session(token=api_session.refresh_token, grant_type='refresh_token') - except httpx.HTTPStatusError: + except (httpx.RequestError, httpx.HTTPError): return False return True From 924530e52c000128f1befbedc49b0af821dd814e Mon Sep 17 00:00:00 2001 From: Leon Date: Thu, 11 Sep 2025 16:43:25 +0200 Subject: [PATCH 05/24] docker oneport + requirements without hashes --- Dockerfile | 49 +-- factorialhr_analysis/states/oauth_state.py | 1 + requirements.txt | 402 +-------------------- 3 files changed, 21 insertions(+), 431 deletions(-) diff --git a/Dockerfile b/Dockerfile index 0c6d579..5ed77a8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,54 +1,33 @@ # This Dockerfile is used to deploy a single-container Reflex app instance # to services like Render, Railway, Heroku, GCP, and others. +# It uses a reverse proxy to serve the frontend statically and proxy to backend +# from a single exposed port, expecting TLS termination to be handled at the +# edge by the given platform. +FROM python:3.13 + # If the service expects a different port, provide it here (f.e Render expects port 10000) ARG PORT=8080 # Only set for local/direct access. When TLS is used, the API_URL is assumed to be the same as the frontend. ARG API_URL +ENV PORT=$PORT REFLEX_API_URL=${API_URL:-http://localhost:$PORT} REFLEX_REDIS_URL=redis://localhost PYTHONUNBUFFERED=1 -# It uses a reverse proxy to serve the frontend statically and proxy to backend -# from a single exposed port, expecting TLS termination to be handled at the -# edge by the given platform. -FROM python:3.13 as builder - -RUN mkdir -p /app/.web -RUN python -m venv /app/.venv -ENV PATH="/app/.venv/bin:$PATH" +# Install Caddy and redis server inside image +RUN apt-get update -y && apt-get install -y caddy redis-server && rm -rf /var/lib/apt/lists/* WORKDIR /app -# Install python app requirements and reflex in the container -COPY requirements.txt . -RUN pip install -r requirements.txt - -# Install reflex helper utilities like bun/node -COPY rxconfig.py ./ -RUN reflex init - -# Install pre-cached frontend dependencies (if exist) -COPY *.web/bun.lockb *.web/package.json .web/ -RUN if [ -f .web/bun.lockb ]; then cd .web && ~/.local/share/reflex/bun/bin/bun install --frozen-lockfile; fi - # Copy local context to `/app` inside container (see .dockerignore) COPY . . -ARG PORT API_URL -# Download other npm dependencies and compile frontend -RUN REFLEX_API_URL=${API_URL:-http://localhost:$PORT} reflex export --loglevel debug --frontend-only --no-zip && mv .web/build/client/* /srv/ && rm -rf .web - - -# Final image with only necessary files -FROM python:3.13-slim - -# Install Caddy and redis server inside image -RUN apt-get update -y && apt-get install -y caddy redis-server && rm -rf /var/lib/apt/lists/* +# Install app requirements and reflex in the container +RUN pip install -r requirements.txt -ARG PORT API_URL -ENV PATH="/app/.venv/bin:$PATH" PORT=$PORT REFLEX_API_URL=${API_URL:-http://localhost:$PORT} REFLEX_REDIS_URL=redis://localhost PYTHONUNBUFFERED=1 +# Deploy templates and prepare app +RUN reflex init -WORKDIR /app -COPY --from=builder /app /app -COPY --from=builder /srv /srv +# Download all npm dependencies and compile frontend +RUN reflex export --frontend-only --no-zip && mv .web/build/client/* /srv/ && rm -rf .web # Needed until Reflex properly passes SIGTERM on backend. STOPSIGNAL SIGKILL diff --git a/factorialhr_analysis/states/oauth_state.py b/factorialhr_analysis/states/oauth_state.py index af36ca7..919cbf6 100644 --- a/factorialhr_analysis/states/oauth_state.py +++ b/factorialhr_analysis/states/oauth_state.py @@ -41,6 +41,7 @@ class OAuthSessionState(rx.State): name='api_session', same_site='strict', secure=True, + max_age=7 * 24 * 60 * 60, ) _redirect_to: str = '' diff --git a/requirements.txt b/requirements.txt index 5d0f76f..19ce30d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,396 +1,6 @@ -# This file was autogenerated by uv via the following command: -# uv export --format requirements-txt --e . -alembic==1.16.4 \ - --hash=sha256:b05e51e8e82efc1abd14ba2af6392897e145930c3e0a2faf2b0da2f7f7fd660d \ - --hash=sha256:efab6ada0dd0fae2c92060800e0bf5c1dc26af15a10e02fb4babff164b4725e2 - # via reflex -annotated-types==0.7.0 \ - --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \ - --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89 - # via pydantic -anyio==4.10.0 \ - --hash=sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6 \ - --hash=sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1 - # via - # factorialhr - # fwtv - # httpx - # starlette - # watchfiles -bidict==0.23.1 \ - --hash=sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71 \ - --hash=sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5 - # via python-socketio -certifi==2025.8.3 \ - --hash=sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407 \ - --hash=sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5 - # via - # httpcore - # httpx -click==8.2.1 \ - --hash=sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202 \ - --hash=sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b - # via - # granian - # reflex - # reflex-hosting-cli -colorama==0.4.6 ; sys_platform == 'win32' \ - --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ - --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 - # via - # click - # pytest -coverage==7.10.5 \ - --hash=sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7 \ - --hash=sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae \ - --hash=sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a \ - --hash=sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4 \ - --hash=sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760 \ - --hash=sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5 \ - --hash=sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2 \ - --hash=sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44 \ - --hash=sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee \ - --hash=sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235 \ - --hash=sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c \ - --hash=sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db \ - --hash=sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14 \ - --hash=sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f \ - --hash=sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc \ - --hash=sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e \ - --hash=sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff \ - --hash=sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869 \ - --hash=sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b \ - --hash=sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5 \ - --hash=sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84 \ - --hash=sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b \ - --hash=sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c \ - --hash=sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6 - # via pytest-cov -dotenv==0.9.9 \ - --hash=sha256:29cf74a087b31dafdb5a446b6d7e11cbce8ed2741540e2339c69fbef92c94ce9 - # via fwtv -factorialhr==4.1.0 \ - --hash=sha256:594f52d0e134bec2e02fce9282b236a2a8329d163576333f85462177e7aee6cc \ - --hash=sha256:b0eee684c32982fd297de2d7fcc8c70a34e856121ec5aa19cd3da8837907563c - # via fwtv -granian==2.5.0 \ - --hash=sha256:3152037d799ea97e5736de054a48bf75368fb79b7cfee7e6aa46de1076a43882 \ - --hash=sha256:31705782cd616b9b70536c1b61b7f15815ebc4dcccdb72f58aa806ba7ac5dfa1 \ - --hash=sha256:36493c4f2b672d027eb11b05ca6660f9fd4944452841d213cb0cb64da869539b \ - --hash=sha256:50d4dc74ab763c1bf396cf85d93a8202bf1bfb74150b03f9fd62b600cd0c777c \ - --hash=sha256:74601bda3aedb249a3d5059d48108acfa61d6f71686162bda0bedc013a443efb \ - --hash=sha256:76dc084d1c528683a88c2d1a00786c9bc013b695b1776ad8a3c71419c45e1df0 \ - --hash=sha256:879fdeb71fe279175a25d709d95dd2db01eb67cd12d300e51e3dc704ca5e52fd \ - --hash=sha256:8f9918bee3c21eb1410f4323440d76eaa0c2d2e6ca4fa3e3a20d07cc54b788f6 \ - --hash=sha256:944ea3bd400a7ccc8129835eda65bd6a37f8fb77828f4e6ded2f06827d6ec25f \ - --hash=sha256:9a53151c2d31dbcf1acbe6af89ce0282387614b6401650d511ca4260ba0e03c1 \ - --hash=sha256:9f6d080e45735dd93e4c60a79e42ee9ed37124a9580a08292d83b0961c705e39 \ - --hash=sha256:af272218076663280fdc293b7da3adb716f23d54211cefad92fcf7e01b3eed19 \ - --hash=sha256:afafac4908d5931e4b2c2a09612e063d7ccd05e531f16b7f11e3bccc4ca8972c \ - --hash=sha256:bbc4ebc727202ad4b3073ca8148c2af49904710d6fce84872191b2dd5cd36916 \ - --hash=sha256:bed0d047c9c0c6c6a5a85ee5b3c7e2683fc63e03ac032eaf3d7654fa96bde102 \ - --hash=sha256:c28a34951c1ed8eea97948882bdbc374ce111be5a59293693613d25043ba1313 \ - --hash=sha256:f371dd9eedae26158901fee3eb934e8fa61491cc78d234470ce364b989c78a1f \ - --hash=sha256:f7bf7ed30bcda9bbc9962f187081c5dfa6aa07e06c3a59486bc573b5def35914 \ - --hash=sha256:fb157c3d66301ffad4113da4c51aed4d56006b9ebe9d0892c682a634b5fff773 - # via reflex -greenlet==3.2.4 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' \ - --hash=sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b \ - --hash=sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d \ - --hash=sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31 \ - --hash=sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671 \ - --hash=sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b \ - --hash=sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc \ - --hash=sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a \ - --hash=sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945 \ - --hash=sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae \ - --hash=sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504 - # via sqlalchemy -h11==0.16.0 \ - --hash=sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1 \ - --hash=sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86 - # via - # httpcore - # wsproto -httpcore==1.0.9 \ - --hash=sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55 \ - --hash=sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8 - # via httpx -httpx==0.28.1 \ - --hash=sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc \ - --hash=sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad - # via - # factorialhr - # fwtv - # reflex - # reflex-hosting-cli -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via - # anyio - # httpx -iniconfig==2.1.0 \ - --hash=sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7 \ - --hash=sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760 - # via pytest -jinja2==3.1.6 \ - --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ - --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 - # via pytest-html -mako==1.3.10 \ - --hash=sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28 \ - --hash=sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59 - # via alembic -markdown-it-py==4.0.0 \ - --hash=sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147 \ - --hash=sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3 - # via rich -markupsafe==3.0.2 \ - --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ - --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ - --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ - --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ - --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ - --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ - --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ - --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ - --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ - --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ - --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ - --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ - --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ - --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ - --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ - --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ - --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ - --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ - --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \ - --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ - --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 - # via - # jinja2 - # mako -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -packaging==25.0 \ - --hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484 \ - --hash=sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f - # via - # pytest - # reflex - # reflex-hosting-cli -platformdirs==4.3.8 \ - --hash=sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc \ - --hash=sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4 - # via - # reflex - # reflex-hosting-cli -pluggy==1.6.0 \ - --hash=sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3 \ - --hash=sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746 - # via - # pytest - # pytest-cov -psutil==7.0.0 ; sys_platform == 'win32' \ - --hash=sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553 \ - --hash=sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456 \ - --hash=sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99 - # via reflex -pydantic==2.11.7 \ - --hash=sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db \ - --hash=sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b - # via - # factorialhr - # reflex - # sqlmodel -pydantic-core==2.33.2 \ - --hash=sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56 \ - --hash=sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef \ - --hash=sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a \ - --hash=sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f \ - --hash=sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916 \ - --hash=sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a \ - --hash=sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849 \ - --hash=sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e \ - --hash=sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac \ - --hash=sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162 \ - --hash=sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc \ - --hash=sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5 \ - --hash=sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d \ - --hash=sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9 \ - --hash=sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9 \ - --hash=sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5 \ - --hash=sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9 \ - --hash=sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6 - # via pydantic -pygments==2.19.2 \ - --hash=sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887 \ - --hash=sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b - # via - # pytest - # rich -pytest==8.4.1 \ - --hash=sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7 \ - --hash=sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c - # via - # pytest-cov - # pytest-html - # pytest-metadata -pytest-cov==6.2.1 \ - --hash=sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2 \ - --hash=sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5 -pytest-html==4.1.1 \ - --hash=sha256:70a01e8ae5800f4a074b56a4cb1025c8f4f9b038bba5fe31e3c98eb996686f07 \ - --hash=sha256:c8152cea03bd4e9bee6d525573b67bbc6622967b72b9628dda0ea3e2a0b5dd71 -pytest-metadata==3.1.1 \ - --hash=sha256:c8e0844db684ee1c798cfa38908d20d67d0463ecb6137c72e91f418558dd5f4b \ - --hash=sha256:d2a29b0355fbc03f168aa96d41ff88b1a3b44a3b02acbe491801c98a048017c8 - # via pytest-html -python-dotenv==1.1.1 \ - --hash=sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc \ - --hash=sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab - # via dotenv -python-engineio==4.12.2 \ - --hash=sha256:8218ab66950e179dfec4b4bbb30aecf3f5d86f5e58e6fc1aa7fde2c698b2804f \ - --hash=sha256:e7e712ffe1be1f6a05ee5f951e72d434854a32fcfc7f6e4d9d3cae24ec70defa - # via python-socketio -python-multipart==0.0.20 \ - --hash=sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104 \ - --hash=sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13 - # via reflex -python-socketio==5.13.0 \ - --hash=sha256:51f68d6499f2df8524668c24bcec13ba1414117cfb3a90115c559b601ab10caf \ - --hash=sha256:ac4e19a0302ae812e23b712ec8b6427ca0521f7c582d6abb096e36e24a263029 - # via reflex -redis==6.4.0 \ - --hash=sha256:b01bc7282b8444e28ec36b261df5375183bb47a07eb9c603f284e89cbc5ef010 \ - --hash=sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f - # via - # fwtv - # reflex -reflex==0.8.9 \ - --hash=sha256:244b06078acf60d81515e89835eba9d25f981bd4bd4537fcbca18aac1b0c6135 \ - --hash=sha256:c9c7c4d23770269e7e2ca04e19d106ffe6d0e5dacc5dc0b5f830958f5b79687e - # via fwtv -reflex-hosting-cli==0.1.55 \ - --hash=sha256:84e78715a1f112996a0a1f8c5503958931a7fc6f0f61b7332645b8fde96c3b17 \ - --hash=sha256:f5d5b0c26cefdc7ba3356954b7be6bfa8ec50267c13a2247f6cf18a140e95918 - # via reflex -rich==14.1.0 \ - --hash=sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f \ - --hash=sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8 - # via - # reflex - # reflex-hosting-cli -ruff==0.12.10 \ - --hash=sha256:059e863ea3a9ade41407ad71c1de2badfbe01539117f38f763ba42a1206f7559 \ - --hash=sha256:141ce3d88803c625257b8a6debf4a0473eb6eed9643a6189b68838b43e78165a \ - --hash=sha256:189ab65149d11ea69a2d775343adf5f49bb2426fc4780f65ee33b423ad2e47f9 \ - --hash=sha256:1bef6161e297c68908b7218fa6e0e93e99a286e5ed9653d4be71e687dff101cf \ - --hash=sha256:1f68433c4fbc63efbfa3ba5db31727db229fa4e61000f452c540474b03de52a9 \ - --hash=sha256:2c6f4064c69d2542029b2a61d39920c85240c39837599d7f2e32e80d36401d6e \ - --hash=sha256:37b4a64f4062a50c75019c61c7017ff598cb444984b638511f48539d3a1c98db \ - --hash=sha256:4f1345fbf8fb0531cd722285b5f15af49b2932742fc96b633e883da8d841896b \ - --hash=sha256:7837eca8787f076f67aba2ca559cefd9c5cbc3a9852fd66186f4201b87c1563e \ - --hash=sha256:7d1a4e0bdfafcd2e3e235ecf50bf0176f74dd37902f241588ae1f6c827a36c56 \ - --hash=sha256:822d9677b560f1fdeab69b89d1f444bf5459da4aa04e06e766cf0121771ab844 \ - --hash=sha256:8b593cb0fb55cc8692dac7b06deb29afda78c721c7ccfed22db941201b7b8f7b \ - --hash=sha256:9de785e95dc2f09846c5e6e1d3a3d32ecd0b283a979898ad427a9be7be22b266 \ - --hash=sha256:ae479e1a18b439c59138f066ae79cc0f3ee250712a873d00dbafadaad9481e5b \ - --hash=sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc \ - --hash=sha256:d59e58586829f8e4a9920788f6efba97a13d1fa320b047814e8afede381c6839 \ - --hash=sha256:e67d96827854f50b9e3e8327b031647e7bcc090dbe7bb11101a81a3a2cbf1cc9 \ - --hash=sha256:ebb7333a45d56efc7c110a46a69a1b32365d5c5161e7244aaf3aa20ce62399c1 \ - --hash=sha256:f3fc21178cd44c98142ae7590f42ddcb587b8e09a3b849cbc84edb62ee95de60 -simple-websocket==1.1.0 \ - --hash=sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c \ - --hash=sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4 - # via python-engineio -sniffio==1.3.1 \ - --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ - --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc - # via anyio -sqlalchemy==2.0.43 \ - --hash=sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa \ - --hash=sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc \ - --hash=sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9 \ - --hash=sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738 \ - --hash=sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417 \ - --hash=sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d \ - --hash=sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197 \ - --hash=sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f \ - --hash=sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164 \ - --hash=sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3 - # via - # alembic - # sqlmodel -sqlmodel==0.0.24 \ - --hash=sha256:6778852f09370908985b667d6a3ab92910d0d5ec88adcaf23dbc242715ff7193 \ - --hash=sha256:cc5c7613c1a5533c9c7867e1aab2fd489a76c9e8a061984da11b4e613c182423 - # via reflex -starlette==0.47.2 \ - --hash=sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8 \ - --hash=sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b - # via reflex -typing-extensions==4.14.1 \ - --hash=sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36 \ - --hash=sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76 - # via - # alembic - # pydantic - # pydantic-core - # reflex - # sqlalchemy - # typing-inspection -typing-inspection==0.4.1 \ - --hash=sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51 \ - --hash=sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28 - # via pydantic -watchfiles==1.1.0 \ - --hash=sha256:12b0a02a91762c08f7264e2e79542f76870c3040bbc847fb67410ab81474932a \ - --hash=sha256:17ab167cca6339c2b830b744eaf10803d2a5b6683be4d79d8475d88b4a8a4be1 \ - --hash=sha256:20ecc8abbd957046f1fe9562757903f5eaf57c3bce70929fda6c7711bb58074a \ - --hash=sha256:239736577e848678e13b201bba14e89718f5c2133dfd6b1f7846fa1b58a8532b \ - --hash=sha256:29e7bc2eee15cbb339c68445959108803dc14ee0c7b4eea556400131a8de462b \ - --hash=sha256:328dbc9bff7205c215a7807da7c18dce37da7da718e798356212d22696404339 \ - --hash=sha256:32d6d4e583593cb8576e129879ea0991660b935177c0f93c6681359b3654bfa9 \ - --hash=sha256:5007f860c7f1f8df471e4e04aaa8c43673429047d63205d1630880f7637bca30 \ - --hash=sha256:5366164391873ed76bfdf618818c82084c9db7fac82b64a20c44d335eec9ced5 \ - --hash=sha256:60022527e71d1d1fda67a33150ee42869042bce3d0fcc9cc49be009a9cded3fb \ - --hash=sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575 \ - --hash=sha256:80f811146831c8c86ab17b640801c25dc0a88c630e855e2bef3568f30434d52b \ - --hash=sha256:8c5701dc474b041e2934a26d31d39f90fac8a3dee2322b39f7729867f932b1d4 \ - --hash=sha256:9c733cda03b6d636b4219625a4acb5c6ffb10803338e437fb614fef9516825ef \ - --hash=sha256:a8f6f72974a19efead54195bc9bed4d850fc047bb7aa971268fd9a8387c89011 \ - --hash=sha256:adb4167043d3a78280d5d05ce0ba22055c266cf8655ce942f2fb881262ff3cdf \ - --hash=sha256:b067915e3c3936966a8607f6fe5487df0c9c4afb85226613b520890049deea20 \ - --hash=sha256:cc08ef8b90d78bfac66f0def80240b0197008e4852c9f285907377b2947ffdcb \ - --hash=sha256:d181ef50923c29cf0450c3cd47e2f0557b62218c50b2ab8ce2ecaa02bd97e670 \ - --hash=sha256:d9481174d3ed982e269c090f780122fb59cee6c3796f74efe74e70f7780ed94c \ - --hash=sha256:eff4b8d89f444f7e49136dc695599a591ff769300734446c0a86cba2eb2f9895 \ - --hash=sha256:f21af781a4a6fbad54f03c598ab620e3a77032c5878f3d780448421a6e1818c7 \ - --hash=sha256:f2f0498b7d2a3c072766dba3274fe22a183dbea1f99d188f1c6c72209a1063dc \ - --hash=sha256:f7208ab6e009c627b7557ce55c465c98967e8caa8b11833531fdf95799372633 - # via granian -wrapt==1.17.3 \ - --hash=sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d \ - --hash=sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7 \ - --hash=sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb \ - --hash=sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa \ - --hash=sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8 \ - --hash=sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77 \ - --hash=sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277 \ - --hash=sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22 \ - --hash=sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16 \ - --hash=sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0 \ - --hash=sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050 \ - --hash=sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0 - # via reflex -wsproto==1.2.0 \ - --hash=sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065 \ - --hash=sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736 - # via simple-websocket +anyio>=4.9.0 +dotenv>=0.9.9 +factorialhr>=4.1.0 +httpx>=0.28.1 +redis>=6.4.0 +reflex==0.8.9 \ No newline at end of file From c1265fa0abd236b4ab1de3fce90dbb7cf4ad0cc7 Mon Sep 17 00:00:00 2001 From: Leon Date: Fri, 12 Sep 2025 10:42:11 +0200 Subject: [PATCH 06/24] fixes --- .dockerignore | 9 ++- Caddy.Dockerfile | 4 ++ Caddyfile | 2 +- Dockerfile | 61 ++++++++++---------- cloud.yml | 4 -- docker-compose.yaml | 37 ++++++++++++ factorialhr_analysis/constants.py | 4 +- factorialhr_analysis/factorialhr_analysis.py | 3 +- factorialhr_analysis/states/data_state.py | 14 +++-- factorialhr_analysis/states/oauth_state.py | 32 +++++----- pyproject.toml | 1 + requirements.txt | 6 -- rxconfig.py | 2 +- uv.lock | 24 ++++++++ 14 files changed, 136 insertions(+), 67 deletions(-) create mode 100644 Caddy.Dockerfile delete mode 100644 cloud.yml create mode 100644 docker-compose.yaml delete mode 100644 requirements.txt diff --git a/.dockerignore b/.dockerignore index 2352827..642bcb9 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,3 +1,6 @@ -.web -!.web/bun.lockb -!.web/package.json \ No newline at end of file +.* +__pycache__/* +Dockerfile +Caddy.Dockerfile +docker-compose.yaml +uploaded_files \ No newline at end of file diff --git a/Caddy.Dockerfile b/Caddy.Dockerfile new file mode 100644 index 0000000..76ae320 --- /dev/null +++ b/Caddy.Dockerfile @@ -0,0 +1,4 @@ +FROM library/caddy + +COPY --from=local/reflex-app /app/.web/build/client /srv +ADD Caddyfile /etc/caddy/Caddyfile \ No newline at end of file diff --git a/Caddyfile b/Caddyfile index 13d94ce..24d4542 100644 --- a/Caddyfile +++ b/Caddyfile @@ -4,7 +4,7 @@ encode gzip @backend_routes path /_event/* /ping /_upload /_upload/* handle @backend_routes { - reverse_proxy localhost:8000 + reverse_proxy app:8000 } root * /srv diff --git a/Dockerfile b/Dockerfile index 5ed77a8..16e1412 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,41 +1,44 @@ -# This Dockerfile is used to deploy a single-container Reflex app instance -# to services like Render, Railway, Heroku, GCP, and others. +# This docker file is intended to be used with docker compose to deploy a production +# instance of a Reflex app. -# It uses a reverse proxy to serve the frontend statically and proxy to backend -# from a single exposed port, expecting TLS termination to be handled at the -# edge by the given platform. -FROM python:3.13 - -# If the service expects a different port, provide it here (f.e Render expects port 10000) -ARG PORT=8080 -# Only set for local/direct access. When TLS is used, the API_URL is assumed to be the same as the frontend. -ARG API_URL -ENV PORT=$PORT REFLEX_API_URL=${API_URL:-http://localhost:$PORT} REFLEX_REDIS_URL=redis://localhost PYTHONUNBUFFERED=1 - -# Install Caddy and redis server inside image -RUN apt-get update -y && apt-get install -y caddy redis-server && rm -rf /var/lib/apt/lists/* - -WORKDIR /app +# Stage 1: init +FROM ghcr.io/astral-sh/uv:debian AS init # Copy local context to `/app` inside container (see .dockerignore) +WORKDIR /app COPY . . +RUN mkdir -p /app/data /app/uploaded_files + +RUN uv venv -# Install app requirements and reflex in the container -RUN pip install -r requirements.txt +# Install app requirements and reflex inside virtualenv +RUN uv sync --frozen # Deploy templates and prepare app -RUN reflex init +RUN uv run reflex init -# Download all npm dependencies and compile frontend -RUN reflex export --frontend-only --no-zip && mv .web/build/client/* /srv/ && rm -rf .web +# Export static copy of frontend to /app/.web/build/client +RUN uv run reflex export --frontend-only --no-zip + +# Copy static files out of /app to save space in backend image +RUN mv .web/build/client /tmp/client +RUN rm -rf .web && mkdir -p .web/build +RUN mv /tmp/client .web/build/client + +# Stage 2: copy artifacts into slim image +FROM ghcr.io/astral-sh/uv:debian-slim +WORKDIR /app +RUN adduser --disabled-password --home /app reflex +COPY --chown=reflex --from=init /app /app +# Install libpq-dev for psycopg (skip if not using postgres). +RUN apt-get update -y && apt-get install -y libpq-dev && rm -rf /var/lib/apt/lists/* + +RUN uv build +ENV PYTHONUNBUFFERED=1 # Needed until Reflex properly passes SIGTERM on backend. STOPSIGNAL SIGKILL -EXPOSE $PORT - -# Apply migrations before starting the backend. -CMD [ -d alembic ] && reflex db migrate; \ - caddy start && \ - redis-server --daemonize yes && \ - exec reflex run --env prod --backend-only \ No newline at end of file +# Always apply migrations before starting the backend. +RUN if [ -d alembic ]; then uv run reflex db migrate; fi +CMD ["uv", "run", "--no-sync", "reflex", "run", "--env", "prod", "--backend-only"] diff --git a/cloud.yml b/cloud.yml deleted file mode 100644 index 2ba0f67..0000000 --- a/cloud.yml +++ /dev/null @@ -1,4 +0,0 @@ -name: factorialhr_analysis -regions: - fra: 1 -envfile: .env.production \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..872c6a9 --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,37 @@ +services: + #db: + # image: postgres + # restart: unless-stopped + # environment: + # POSTGRES_PASSWORD: secret + # volumes: + # - postgres-data:/var/lib/postgresql/data + + redis: + image: redis + restart: unless-stopped + app: + image: local/reflex-app + environment: + #REFLEX_DB_URL: postgresql+psycopg://postgres:secret@db/postgres + REFLEX_REDIS_URL: redis://redis:6379 + build: + context: . + volumes: + - upload-data:/app/uploaded_files + restart: unless-stopped + + webserver: + ports: + - 8080:80 + build: + context: . + dockerfile: Caddy.Dockerfile + restart: unless-stopped + depends_on: + - app + +volumes: + #postgres-data: + # Uploaded files + upload-data: \ No newline at end of file diff --git a/factorialhr_analysis/constants.py b/factorialhr_analysis/constants.py index d685865..a9bc6e8 100644 --- a/factorialhr_analysis/constants.py +++ b/factorialhr_analysis/constants.py @@ -1,11 +1,9 @@ import os -# dotenv.load_dotenv() - CLIENT_ID: str = os.environ.get('FACTORIALHR_CLIENT_ID', '') CLIENT_SECRET: str = os.environ.get('FACTORIALHR_CLIENT_SECRET', '') REDIRECT_URI: str = os.environ.get('FACTORIALHR_REDIRECT_URI', '') -ENVIRONMENT_URL: str = os.environ.get('FACTORIALHR_ENVIRONMENT_URL', '') +ENVIRONMENT_URL: str = os.environ.get('FACTORIALHR_ENVIRONMENT_URL', 'https://api.factorialhr.com') API_KEY: str = os.environ.get('FACTORIALHR_API_KEY', '') SCOPE = 'read' API_TIMEOUT = 60 diff --git a/factorialhr_analysis/factorialhr_analysis.py b/factorialhr_analysis/factorialhr_analysis.py index 39238ee..b6d0165 100644 --- a/factorialhr_analysis/factorialhr_analysis.py +++ b/factorialhr_analysis/factorialhr_analysis.py @@ -1,9 +1,10 @@ """Main app file for the FactorialHR Analysis application.""" +import logging + import reflex as rx from factorialhr_analysis import pages, routes -import logging logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) diff --git a/factorialhr_analysis/states/data_state.py b/factorialhr_analysis/states/data_state.py index 09eb3e7..6399239 100644 --- a/factorialhr_analysis/states/data_state.py +++ b/factorialhr_analysis/states/data_state.py @@ -1,8 +1,9 @@ +"""State for managing data.""" + import datetime import logging -import anyio.abc -import anyio.from_thread +import anyio import factorialhr import reflex as rx @@ -47,7 +48,7 @@ async def _load_credentials(self, api_client: factorialhr.ApiClient): self._credentials = next(iter(credentials.data()), None) @rx.event - async def refresh_data(self): + async def refresh_data(self): # noqa: ANN201 """Refresh the data.""" auth_state = await self.get_state(states.OAuthSessionState) if await auth_state.refresh_session(): @@ -65,8 +66,8 @@ async def poll_data(self): auth = (await self.get_state(states.OAuthSessionState)).get_auth() try: async with ( - factorialhr.ApiClient(constants.ENVIRONMENT_URL, auth=auth) as client, - anyio.from_thread.create_task_group() as tg, + factorialhr.ApiClient(constants.ENVIRONMENT_URL, auth=auth) as client, # pyright: ignore[reportArgumentType] + anyio.create_task_group() as tg, ): tg.start_soon(self._load_teams, client) tg.start_soon(self._load_employees, client) @@ -74,6 +75,7 @@ async def poll_data(self): tg.start_soon(self._load_credentials, client) except Exception: logging.getLogger(__name__).exception('error loading data') + raise finally: async with self: self.is_loading = False @@ -83,7 +85,9 @@ async def poll_data(self): @rx.event def clear(self): + """Clear the data.""" self.last_updated = None self._employees.clear() self._teams.clear() self._shifts.clear() + self._credentials = None diff --git a/factorialhr_analysis/states/oauth_state.py b/factorialhr_analysis/states/oauth_state.py index 919cbf6..d97a09a 100644 --- a/factorialhr_analysis/states/oauth_state.py +++ b/factorialhr_analysis/states/oauth_state.py @@ -55,23 +55,27 @@ def api_session(self) -> ApiSession | None: return None @rx.event - async def create_session(self, token: str, *, grant_type: typing.Literal['refresh_token', 'authorization_code']): + async def create_session(self, token: str, grant_type: typing.Literal['refresh_token', 'authorization_code']): """Log in to the API and store the session cookie.""" + data = { + 'client_id': constants.CLIENT_ID, + 'client_secret': constants.CLIENT_SECRET, + } if grant_type == 'refresh_token': - data = { - 'client_id': constants.CLIENT_ID, - 'client_secret': constants.CLIENT_SECRET, - 'grant_type': 'refresh_token', - 'refresh_token': token, - } + data.update( + { + 'grant_type': 'refresh_token', + 'refresh_token': token, + } + ) else: - data = { - 'client_id': constants.CLIENT_ID, - 'client_secret': constants.CLIENT_SECRET, - 'code': token, - 'grant_type': 'authorization_code', - 'redirect_uri': constants.REDIRECT_URI, - } + data.update( + { + 'code': token, + 'grant_type': 'authorization_code', + 'redirect_uri': constants.REDIRECT_URI, + } + ) async with httpx.AsyncClient() as client: response = await client.post( f'{constants.ENVIRONMENT_URL}/oauth/token', diff --git a/pyproject.toml b/pyproject.toml index 8df8aba..2a38cb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ test = [ "pytest-cov>=5.0.0", ] dev = [ + "pyright>=1.1.405", "ruff>=0.12.9", { include-group = "test" }, ] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 19ce30d..0000000 --- a/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -anyio>=4.9.0 -dotenv>=0.9.9 -factorialhr>=4.1.0 -httpx>=0.28.1 -redis>=6.4.0 -reflex==0.8.9 \ No newline at end of file diff --git a/rxconfig.py b/rxconfig.py index ff5d9d4..79ea5cc 100644 --- a/rxconfig.py +++ b/rxconfig.py @@ -9,6 +9,6 @@ rx.plugins.TailwindV4Plugin(), ], telemetry_enabled=False, - env_file='.env', + env_file='.env', # ignored if not found show_built_with_reflex=False, ) diff --git a/uv.lock b/uv.lock index d860a3c..42372ad 100644 --- a/uv.lock +++ b/uv.lock @@ -147,6 +147,7 @@ dependencies = [ [package.dev-dependencies] dev = [ + { name = "pyright" }, { name = "pytest" }, { name = "pytest-cov" }, { name = "pytest-html" }, @@ -170,6 +171,7 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ + { name = "pyright", specifier = ">=1.1.405" }, { name = "pytest", specifier = ">=8.4.1" }, { name = "pytest-cov", specifier = ">=5.0.0" }, { name = "pytest-html", specifier = ">=4" }, @@ -360,6 +362,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -449,6 +460,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pyright" +version = "1.1.405" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/6c/ba4bbee22e76af700ea593a1d8701e3225080956753bee9750dcc25e2649/pyright-1.1.405.tar.gz", hash = "sha256:5c2a30e1037af27eb463a1cc0b9f6d65fec48478ccf092c1ac28385a15c55763", size = 4068319, upload-time = "2025-09-04T03:37:06.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/1a/524f832e1ff1962a22a1accc775ca7b143ba2e9f5924bb6749dce566784a/pyright-1.1.405-py3-none-any.whl", hash = "sha256:a2cb13700b5508ce8e5d4546034cb7ea4aedb60215c6c33f56cec7f53996035a", size = 5905038, upload-time = "2025-09-04T03:37:04.913Z" }, +] + [[package]] name = "pytest" version = "8.4.1" From 22281f3e448bfe83c3bb999e8e175da7500f4d4c Mon Sep 17 00:00:00 2001 From: Leon Date: Fri, 12 Sep 2025 12:22:52 +0200 Subject: [PATCH 07/24] Refactor Docker setup: remove Caddyfile, update docker-compose for single app service, and enhance .dockerignore to retain specific hidden files. Adjust Caddyfile for backend proxying and modify Dockerfile for improved build process and dependency management. --- .dockerignore | 10 +++++----- Caddy.Dockerfile | 4 ---- Caddyfile | 4 ++-- Dockerfile | 47 ++++++++++++++++++++++++++------------------- docker-compose.yaml | 17 ++++------------ 5 files changed, 38 insertions(+), 44 deletions(-) delete mode 100644 Caddy.Dockerfile diff --git a/.dockerignore b/.dockerignore index 642bcb9..2a26c77 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,6 @@ +# Ignore all hidden files and folders .* -__pycache__/* -Dockerfile -Caddy.Dockerfile -docker-compose.yaml -uploaded_files \ No newline at end of file + +# But keep these specific hidden files/folders +!.web/bun.lockb +!.web/package.json diff --git a/Caddy.Dockerfile b/Caddy.Dockerfile deleted file mode 100644 index 76ae320..0000000 --- a/Caddy.Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -FROM library/caddy - -COPY --from=local/reflex-app /app/.web/build/client /srv -ADD Caddyfile /etc/caddy/Caddyfile \ No newline at end of file diff --git a/Caddyfile b/Caddyfile index 24d4542..96219a4 100644 --- a/Caddyfile +++ b/Caddyfile @@ -1,10 +1,10 @@ -:{$PORT} +http://:8080 # has to match the frontend port (exposed port by docker) encode gzip @backend_routes path /_event/* /ping /_upload /_upload/* handle @backend_routes { - reverse_proxy app:8000 + reverse_proxy localhost:8000 # has to match the backend port of the reflex server } root * /srv diff --git a/Dockerfile b/Dockerfile index 16e1412..b5be787 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,44 +1,51 @@ # This docker file is intended to be used with docker compose to deploy a production # instance of a Reflex app. -# Stage 1: init -FROM ghcr.io/astral-sh/uv:debian AS init +# Stage 1: builder +FROM ghcr.io/astral-sh/uv:debian AS builder # Copy local context to `/app` inside container (see .dockerignore) WORKDIR /app COPY . . -RUN mkdir -p /app/data /app/uploaded_files +# Create virtual environment and install dependencies RUN uv venv - -# Install app requirements and reflex inside virtualenv RUN uv sync --frozen +ENV UV_NO_SYNC=1 + # Deploy templates and prepare app RUN uv run reflex init -# Export static copy of frontend to /app/.web/build/client -RUN uv run reflex export --frontend-only --no-zip +# Install pre-cached frontend dependencies (if exist) +RUN if [ -f .web/bun.lockb ]; then cd .web && ~/.local/share/reflex/bun/bin/bun install --frozen-lockfile; fi -# Copy static files out of /app to save space in backend image -RUN mv .web/build/client /tmp/client -RUN rm -rf .web && mkdir -p .web/build -RUN mv /tmp/client .web/build/client +# Export static copy of frontend to /srv +RUN uv run reflex export --loglevel debug --frontend-only --no-zip && mv .web/build/client/* /srv/ && rm -rf .web -# Stage 2: copy artifacts into slim image +# Stage 2: final image FROM ghcr.io/astral-sh/uv:debian-slim WORKDIR /app -RUN adduser --disabled-password --home /app reflex -COPY --chown=reflex --from=init /app /app -# Install libpq-dev for psycopg (skip if not using postgres). -RUN apt-get update -y && apt-get install -y libpq-dev && rm -rf /var/lib/apt/lists/* +ENV UV_NO_SYNC=1 + +# Install libpq-dev for psycopg (skip if not using postgres) +RUN apt-get update -y && apt-get install -y caddy libpq-dev && rm -rf /var/lib/apt/lists/* + +# Copy application and virtual environment from builder +COPY --from=builder /app /app +COPY --from=builder /srv /srv + +# Create data directories +RUN mkdir -p /app/data /app/uploaded_files -RUN uv build ENV PYTHONUNBUFFERED=1 # Needed until Reflex properly passes SIGTERM on backend. STOPSIGNAL SIGKILL -# Always apply migrations before starting the backend. -RUN if [ -d alembic ]; then uv run reflex db migrate; fi -CMD ["uv", "run", "--no-sync", "reflex", "run", "--env", "prod", "--backend-only"] +RUN uv sync --frozen + +# has to match the port specified in the Caddyfile +EXPOSE 8080 + +CMD ["sh", "-c", "[ -d alembic ] && uv run reflex db migrate; caddy start && exec uv run reflex run --env prod --backend-only"] diff --git a/docker-compose.yaml b/docker-compose.yaml index 872c6a9..9e7a802 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -11,26 +11,17 @@ services: image: redis restart: unless-stopped app: - image: local/reflex-app + build: + context: . environment: #REFLEX_DB_URL: postgresql+psycopg://postgres:secret@db/postgres REFLEX_REDIS_URL: redis://redis:6379 - build: - context: . + ports: + - 8080:8080 volumes: - upload-data:/app/uploaded_files restart: unless-stopped - webserver: - ports: - - 8080:80 - build: - context: . - dockerfile: Caddy.Dockerfile - restart: unless-stopped - depends_on: - - app - volumes: #postgres-data: # Uploaded files From ca46b4395e31ad1fe51b185062e0f5463b86c165 Mon Sep 17 00:00:00 2001 From: Leon Date: Fri, 12 Sep 2025 13:17:00 +0200 Subject: [PATCH 08/24] more debugging --- docker-compose.yaml | 1 + factorialhr_analysis/pages/oauth_page.py | 20 ++++++++++++-------- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 9e7a802..373c2b6 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -13,6 +13,7 @@ services: app: build: context: . + env_file: .env environment: #REFLEX_DB_URL: postgresql+psycopg://postgres:secret@db/postgres REFLEX_REDIS_URL: redis://redis:6379 diff --git a/factorialhr_analysis/pages/oauth_page.py b/factorialhr_analysis/pages/oauth_page.py index cbf2ba9..ca508b2 100644 --- a/factorialhr_analysis/pages/oauth_page.py +++ b/factorialhr_analysis/pages/oauth_page.py @@ -13,8 +13,8 @@ class OAuthProcessState(rx.State): """State to handle OAuth token processing.""" - error: str = '' - expected_state: str = '' + error: rx.Field[str | None] = rx.field(default=None) + expected_state: rx.Field[str | None] = rx.field(default=None) @rx.event async def start_oauth_process(self): @@ -34,15 +34,19 @@ async def start_oauth_process(self): @rx.event async def process_oauth_response(self): """Process the OAuth response to exchange code for an access token.""" - # states missmatch - if self.expected_state != self.router.url.query_parameters.get('state'): - self.error = 'State mismatch error.' - self.expected_state = '' + expected_state = self.router.url.query_parameters.get('state') + if not expected_state: + self.error = 'State is missing.' + self.expected_state = None + return + if self.expected_state != expected_state: + self.error = f'State mismatch error. Expected {self.expected_state} but got {expected_state}.' + self.expected_state = None return - code = self.router.url.query_parameters.get('code', '') + code = self.router.url.query_parameters.get('code') if not code: self.error = 'Authorization code is missing.' - self.expected_state = '' + self.expected_state = None return oauth_session = await self.get_state(states.OAuthSessionState) try: From e1b057b57e5f859c8970d3c1936f4235b5a626d7 Mon Sep 17 00:00:00 2001 From: Leon Date: Fri, 12 Sep 2025 13:48:44 +0200 Subject: [PATCH 09/24] Add redirect to main page after successful OAuth authentication --- factorialhr_analysis/pages/oauth_page.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/factorialhr_analysis/pages/oauth_page.py b/factorialhr_analysis/pages/oauth_page.py index ca508b2..8204db3 100644 --- a/factorialhr_analysis/pages/oauth_page.py +++ b/factorialhr_analysis/pages/oauth_page.py @@ -57,6 +57,8 @@ async def process_oauth_response(self): else: logging.getLogger(__name__).info('created oauth session') yield states.DataState.refresh_data + # Redirect to the main page after successful authentication + yield states.OAuthSessionState.redir finally: self.error = '' self.expected_state = '' @@ -85,7 +87,6 @@ def start_oauth_process(): return rx.text('Redirecting to factorialhr...', on_mount=OAuthProcessState.start_oauth_process) -@redirect_if_authenticated def authorize_oauth_page(): return rx.box( rx.text('Validating response...'), From 8ab089897afeb56110f226076b07e9dbf65182e6 Mon Sep 17 00:00:00 2001 From: Leon Date: Fri, 12 Sep 2025 14:31:55 +0200 Subject: [PATCH 10/24] fix redirect paths --- Caddyfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Caddyfile b/Caddyfile index 96219a4..3d8ab1f 100644 --- a/Caddyfile +++ b/Caddyfile @@ -9,6 +9,6 @@ handle @backend_routes { root * /srv route { - try_files {path} {path}/ /404.html + try_files {path} {path} /404.html file_server } \ No newline at end of file From 36077eb8eb11738bde7377d24936d71b151b312c Mon Sep 17 00:00:00 2001 From: Leon Date: Fri, 12 Sep 2025 15:51:14 +0200 Subject: [PATCH 11/24] Refactor CSV download functionality to use async methods and improve file naming logic based on settings state. --- .../pages/working_time_verification_page.py | 29 ++++++++++--------- factorialhr_analysis/states/data_state.py | 4 ++- 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/factorialhr_analysis/pages/working_time_verification_page.py b/factorialhr_analysis/pages/working_time_verification_page.py index 149e999..2ac6fb2 100644 --- a/factorialhr_analysis/pages/working_time_verification_page.py +++ b/factorialhr_analysis/pages/working_time_verification_page.py @@ -246,28 +246,31 @@ def _convert_to_csv(self, indices: Container[int]) -> str: # Get the CSV data as a string return output.getvalue() - @rx.event - def download(self, data: str): - """Download the given data as a CSV file.""" - file_name = ( - f'{self.start_date}-{self.end_date}_errors.csv' if self.start_date and self.end_date else 'errors.csv' - ) - yield rx.download( - data=data, - filename=file_name, + async def _file_name(self) -> str: + settings_state = await self.get_state(SettingsState) + return ( + f'{settings_state.start_date}-{settings_state.end_date}_errors.csv' + if settings_state.start_date and settings_state.end_date + else 'errors.csv' ) @rx.event - def download_all_errors(self): + async def download_all_errors(self): """Download all errors as a CSV file.""" csv_data = self._convert_to_csv(range(len(self.errors_to_show))) - yield self.download(csv_data) + yield rx.download( + data=csv_data, + filename=await self._file_name(), + ) @rx.event - def download_selected_errors(self): + async def download_selected_errors(self): """Download selected errors as a CSV file.""" csv_data = self._convert_to_csv(self.selected_error_ids) - yield self.download(csv_data) + yield rx.download( + data=csv_data, + filename=await self._file_name(), + ) def render_input() -> rx.Component: diff --git a/factorialhr_analysis/states/data_state.py b/factorialhr_analysis/states/data_state.py index 6399239..ee47193 100644 --- a/factorialhr_analysis/states/data_state.py +++ b/factorialhr_analysis/states/data_state.py @@ -50,10 +50,12 @@ async def _load_credentials(self, api_client: factorialhr.ApiClient): @rx.event async def refresh_data(self): # noqa: ANN201 """Refresh the data.""" + self.clear() + if constants.API_KEY: + return DataState.poll_data auth_state = await self.get_state(states.OAuthSessionState) if await auth_state.refresh_session(): return DataState.poll_data - self.clear() return states.OAuthSessionState.redir @rx.event(background=True) From a37116cf70df6da709a5f3a3b00c37a07528aefd Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 10:45:44 +0200 Subject: [PATCH 12/24] Add docstrings to components and functions for better clarity; refactor exception handling in the main application file. --- .../components/authentication_component.py | 2 + factorialhr_analysis/components/footer.py | 7 +- factorialhr_analysis/components/navbar.py | 6 +- factorialhr_analysis/constants.py | 3 +- factorialhr_analysis/factorialhr_analysis.py | 16 +++- factorialhr_analysis/pages/index_page.py | 20 +--- factorialhr_analysis/pages/oauth_page.py | 8 +- .../pages/working_time_verification_page.py | 95 +++++++++++++------ factorialhr_analysis/states/oauth_state.py | 2 + 9 files changed, 107 insertions(+), 52 deletions(-) diff --git a/factorialhr_analysis/components/authentication_component.py b/factorialhr_analysis/components/authentication_component.py index 2436880..c7c23a8 100644 --- a/factorialhr_analysis/components/authentication_component.py +++ b/factorialhr_analysis/components/authentication_component.py @@ -1,3 +1,5 @@ +"""Authentication component.""" + import functools from collections.abc import Callable diff --git a/factorialhr_analysis/components/footer.py b/factorialhr_analysis/components/footer.py index 7846e80..503470b 100644 --- a/factorialhr_analysis/components/footer.py +++ b/factorialhr_analysis/components/footer.py @@ -1,9 +1,12 @@ +"""Footer component.""" + import reflex as rx from factorialhr_analysis import states -def refresh_data(): +def refresh_data() -> rx.Component: + """Refresh data button.""" return rx.hstack( rx.button( rx.icon('refresh-ccw'), @@ -24,6 +27,7 @@ def refresh_data(): def footer() -> rx.Component: + """Footer component.""" return rx.el.footer( refresh_data(), position='fixed', @@ -31,5 +35,4 @@ def footer() -> rx.Component: bottom='0', width='100%', bg=rx.color('gray', 3), - # color="white", ) diff --git a/factorialhr_analysis/components/navbar.py b/factorialhr_analysis/components/navbar.py index 7142a43..030d0bf 100644 --- a/factorialhr_analysis/components/navbar.py +++ b/factorialhr_analysis/components/navbar.py @@ -40,7 +40,8 @@ def navbar_link(text: str, url: str) -> rx.Component: return rx.link(rx.text(text, size='4', weight='medium'), href=url) -def refresh_data(): +def refresh_data() -> rx.Component: + """Refresh data button.""" return rx.hstack( rx.button( rx.icon('refresh-ccw'), @@ -60,7 +61,8 @@ def refresh_data(): ) -def icon_menu(): +def icon_menu() -> rx.Component: + """Icon menu.""" return ( rx.menu.root( rx.menu.trigger( diff --git a/factorialhr_analysis/constants.py b/factorialhr_analysis/constants.py index a9bc6e8..8c05c9e 100644 --- a/factorialhr_analysis/constants.py +++ b/factorialhr_analysis/constants.py @@ -1,3 +1,5 @@ +"""Constants for the application.""" + import os CLIENT_ID: str = os.environ.get('FACTORIALHR_CLIENT_ID', '') @@ -6,4 +8,3 @@ ENVIRONMENT_URL: str = os.environ.get('FACTORIALHR_ENVIRONMENT_URL', 'https://api.factorialhr.com') API_KEY: str = os.environ.get('FACTORIALHR_API_KEY', '') SCOPE = 'read' -API_TIMEOUT = 60 diff --git a/factorialhr_analysis/factorialhr_analysis.py b/factorialhr_analysis/factorialhr_analysis.py index b6d0165..54e6851 100644 --- a/factorialhr_analysis/factorialhr_analysis.py +++ b/factorialhr_analysis/factorialhr_analysis.py @@ -8,9 +8,23 @@ logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) -# TODO: check if env variables in constants have been set + +def backend_exception_handler(exc: Exception) -> None: + """Handle backend exceptions.""" + logger = logging.getLogger(__name__) + logger.exception('Backend exception', exc_info=exc) + + +def frontend_exception_handler(exc: Exception) -> None: + """Handle frontend exceptions.""" + logger = logging.getLogger(__name__) + logger.exception('Frontend exception', exc_info=exc) + app = rx.App() +# app.backend_exception_handler = backend_exception_handler # noqa: ERA001 +# app.frontend_exception_handler = frontend_exception_handler # noqa: ERA001 + app.add_page(pages.index_page, route=routes.INDEX) app.add_page(pages.working_time_verification_page, route=routes.VERIFICATION_ROUTE) app.add_page(pages.authorize_oauth_page, route=routes.OAUTH_AUTHORIZE_ROUTE) diff --git a/factorialhr_analysis/pages/index_page.py b/factorialhr_analysis/pages/index_page.py index 5cfc2ca..f6e303b 100644 --- a/factorialhr_analysis/pages/index_page.py +++ b/factorialhr_analysis/pages/index_page.py @@ -1,7 +1,8 @@ -import factorialhr +"""Index page of the application.""" + import reflex as rx -from factorialhr_analysis import constants, states, templates +from factorialhr_analysis import states, templates class IndexState(rx.State): @@ -9,23 +10,10 @@ class IndexState(rx.State): is_loading: rx.Field[bool] = rx.field(default=False) - @rx.event - async def get_credentials(self): - self.is_loading = True - yield - oauth_state = await self.get_state(states.OAuthSessionState) - try: - async with factorialhr.ApiClient( - base_url=constants.ENVIRONMENT_URL, auth=oauth_state.get_auth() - ) as api_client: - shifts = await factorialhr.ShiftsEndpoint(api_client).all(timeout=100) - finally: - self.is_loading = False - @templates.template def index_page() -> rx.Component: - """The index page of the application.""" + """Index page of the application.""" return rx.vstack( rx.heading('Welcome to FactorialHR Analysis', size='4'), rx.hstack( diff --git a/factorialhr_analysis/pages/oauth_page.py b/factorialhr_analysis/pages/oauth_page.py index 8204db3..f86024d 100644 --- a/factorialhr_analysis/pages/oauth_page.py +++ b/factorialhr_analysis/pages/oauth_page.py @@ -1,3 +1,5 @@ +"""Pages for the OAuth process.""" + import functools import logging import secrets @@ -83,11 +85,13 @@ def login_page_wrapper() -> rx.Component: @redirect_if_authenticated -def start_oauth_process(): +def start_oauth_process() -> rx.Component: + """Page to start the OAuth process.""" return rx.text('Redirecting to factorialhr...', on_mount=OAuthProcessState.start_oauth_process) -def authorize_oauth_page(): +def authorize_oauth_page() -> rx.Component: + """Page to authorize the OAuth process.""" return rx.box( rx.text('Validating response...'), rx.text(OAuthProcessState.error, color='red'), diff --git a/factorialhr_analysis/pages/working_time_verification_page.py b/factorialhr_analysis/pages/working_time_verification_page.py index 2ac6fb2..7df8c64 100644 --- a/factorialhr_analysis/pages/working_time_verification_page.py +++ b/factorialhr_analysis/pages/working_time_verification_page.py @@ -3,6 +3,7 @@ import csv import datetime import io +import logging import typing from collections.abc import Container, Iterable, Sequence @@ -15,6 +16,8 @@ class SettingsState(rx.State): + """State for managing verification settings.""" + _start_date: datetime.date | None = None _end_date: datetime.date | None = None _tolerance: datetime.timedelta | None = None @@ -72,7 +75,15 @@ def set_only_active(self, active: bool): # noqa: FBT001 def time_to_moment(time_: datetime.time | None) -> rx.MomentDelta: - """Convert a datetime.time to a rx.MomentDelta.""" + """Convert a datetime.time to a rx.MomentDelta. + + Args: + time_: The time to convert, or None for empty delta. + + Returns: + A MomentDelta representing the time. + + """ if time_ is None: return rx.MomentDelta() return rx.MomentDelta(hours=time_.hour, minutes=time_.minute, seconds=time_.second) @@ -101,13 +112,23 @@ class ErrorToShow(typing.TypedDict): def _filter_error(filter_value: str, error: ErrorToShow) -> bool: + """Filter error based on name or team names. + + Args: + filter_value: The filter string to search for. + error: The error to check against the filter. + + Returns: + True if the error matches the filter, False otherwise. + + """ return filter_value in error['name'].lower() or any( filter_value in team_name.lower() for team_name in error['team_names'] ) class DataStateDeprecated(rx.State): - """State holding all the data.""" + """State holding all the data for working time verification.""" errors_to_show: rx.Field[list[ErrorToShow]] = rx.field(default_factory=list) _calculated_errors: list[ErrorToShow] = [] # noqa: RUF012 @@ -167,40 +188,54 @@ async def calculate_errors(self): self.errors_to_show.clear() self._calculated_errors.clear() self.processed_employees = 0 + + # Get states once and store references data_state = await self.get_state(states.DataState) settings_state = await self.get_state(SettingsState) + # Filter employees and shifts outside of async context for better performance + employees = [ + employee + for employee in data_state._employees.values() # noqa: SLF001 + if not settings_state.only_active or employee.active + ] + + shifts = [ + shift + for shift in data_state._shifts.values() # noqa: SLF001 + if settings_state._start_date <= shift.date <= settings_state._end_date # noqa: SLF001 + ] + + # Update total count async with self: - employees = [ - employee - for employee in data_state._employees.values() - if not settings_state.only_active or employee.active - ] self.total_amount_of_employees = len(employees) - shifts = [ - shift - for shift in data_state._shifts.values() - if settings_state._start_date <= shift.date <= settings_state._end_date - ] - async with anyio.from_thread.create_task_group() as tg: - for employee in employees: - tg.start_soon( - self._handle_single_employee, - employee, - data_state._teams.values(), - shifts, - settings_state._tolerance, - ) - if not self.filter_value: + # Process employees concurrently with proper error handling + try: + async with anyio.from_thread.create_task_group() as tg: + for employee in employees: + tg.start_soon( + self._handle_single_employee, + employee, + data_state._teams.values(), # noqa: SLF001 + shifts, + settings_state._tolerance, # noqa: SLF001 + ) + except ExceptionGroup as e: + # Log error and reset loading state + logging.getLogger(__name__).exception('error calculating errors', exc_info=e) async with self: - self.errors_to_show = self._calculated_errors[:] - else: - for error_to_show in self._calculated_errors: - if not self.filter_value or _filter_error(self.filter_value.lower(), error_to_show): - async with self: - self.errors_to_show.append(error_to_show) + self.is_loading = False + return + + # Apply filtering async with self: + if not self.filter_value: + self.errors_to_show = self._calculated_errors[:] + else: + self.errors_to_show = [ + error for error in self._calculated_errors if _filter_error(self.filter_value.lower(), error) + ] self.is_loading = False @rx.event @@ -273,6 +308,7 @@ async def download_selected_errors(self): ) +@rx.memo def render_input() -> rx.Component: """Render the date input form.""" return rx.hstack( @@ -340,6 +376,7 @@ def render_input() -> rx.Component: ) +@rx.memo def render_export_buttons() -> rx.Component: """Render the export buttons.""" return rx.hstack( @@ -359,6 +396,7 @@ def render_export_buttons() -> rx.Component: ) +@rx.memo def render_search() -> rx.Component: """Render the search input.""" return rx.hstack( @@ -471,6 +509,7 @@ def render_table() -> rx.Component: ) +@rx.memo def live_progress() -> rx.Component: """Show a live progress bar when loading data.""" return rx.cond( diff --git a/factorialhr_analysis/states/oauth_state.py b/factorialhr_analysis/states/oauth_state.py index d97a09a..085cc7e 100644 --- a/factorialhr_analysis/states/oauth_state.py +++ b/factorialhr_analysis/states/oauth_state.py @@ -1,3 +1,5 @@ +"""State for managing OAuth session and authentication.""" + import time import typing From 660ac14208d935ec88cd6f873bad64a37f572827 Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 11:16:42 +0200 Subject: [PATCH 13/24] Update CHANGELOG and README for Docker support and CI/CD enhancements; improve documentation on compliance features and usage instructions. --- .github/workflows/docker-build.yml | 97 ++++++++++++++++ .github/workflows/release.yml | 139 ++++++++++++++--------- CHANGELOG.md | 18 ++- README.md | 175 ++++++++++++++++++++++++++--- 4 files changed, 359 insertions(+), 70 deletions(-) create mode 100644 .github/workflows/docker-build.yml diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 0000000..6fbf399 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,97 @@ +name: Build and Push Docker Image + +on: + workflow_call: + inputs: + image_tag: + description: 'Docker image tag' + required: true + type: string + is_release: + description: 'Whether this is a release build' + required: false + type: boolean + default: false + push: + branches: + - main + pull_request: + branches: + - main + types: [opened, synchronize, reopened] + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + build-and-push: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set image tag + id: tag + run: | + if [ "${{ github.event_name }}" = "workflow_call" ]; then + echo "tag=${{ inputs.image_tag }}" >> $GITHUB_OUTPUT + echo "is_release=${{ inputs.is_release }}" >> $GITHUB_OUTPUT + # Extract version without 'v' prefix for release builds + if [ "${{ inputs.is_release }}" = "true" ]; then + VERSION_WITHOUT_V=${inputs.image_tag#v} + echo "version_without_v=$VERSION_WITHOUT_V" >> $GITHUB_OUTPUT + fi + elif [ "${{ github.ref }}" = "refs/heads/main" ]; then + echo "tag=dev" >> $GITHUB_OUTPUT + echo "is_release=false" >> $GITHUB_OUTPUT + else + echo "tag=pr-${{ github.event.number }}" >> $GITHUB_OUTPUT + echo "is_release=false" >> $GITHUB_OUTPUT + fi + + - name: Extract metadata + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=sha,prefix={{branch}}- + type=raw,value=${{ steps.tag.outputs.tag }},enable={{ steps.tag.outputs.is_release == 'true' }} + type=raw,value=${{ steps.tag.outputs.version_without_v }},enable={{ steps.tag.outputs.is_release == 'true' }} + type=raw,value=latest,enable={{ steps.tag.outputs.is_release == 'true' }} + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Output image details + run: | + echo "Built and pushed images:" + echo "${{ steps.meta.outputs.tags }}" + echo "" + echo "Image digest:" + echo "${{ steps.meta.outputs.json }}" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fc67055..9efb6d4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,68 +1,105 @@ -name: Release package +name: Release Docker Image on: push: tags: - - "v[0-9]+.[0-9]+.[0-9]+" # normal release - - "v[0-9]+.[0-9]+.[0-9]+rc[0-9]+" # release candidate - - "v[0-9]+.[0-9]+.[0-9]+[ab][0-9]+" # alpha or beta release + - 'v*' + workflow_dispatch: + inputs: + version: + description: 'Version to release (e.g., v1.0.0)' + required: true + type: string jobs: - build: - uses: ./.github/workflows/build.yml - - upload: + release: runs-on: ubuntu-latest - needs: build - outputs: - DO_GITHUB_RELEASE: ${{ steps.detect-release.outputs.DO_GITHUB_RELEASE }} + permissions: + contents: read + packages: write steps: - - name: Download Artifact - uses: actions/download-artifact@v4 - with: - name: distributions - path: dist + - name: Checkout repository + uses: actions/checkout@v4 - - name: Publish package to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.PYPI_API_TOKEN }} - - name: Detect release version - id: detect-release + - name: Extract version from tag + id: version run: | - do_github_release=$((echo "${GITHUB_REF}" | grep -Eq "^refs\/tags\/v[0-9]+\.[0-9]+\.[0-9]+(rc[0-9]+)?$") && echo 1 || echo 0) - echo DO_GITHUB_RELEASE=$do_github_release >> $GITHUB_OUTPUT - echo DO_GITHUB_RELEASE=$do_github_release + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + VERSION="${{ github.event.inputs.version }}" + else + VERSION="${{ github.ref_name }}" + fi + # Remove 'v' prefix if present + VERSION=${VERSION#v} + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "tag=v$VERSION" >> $GITHUB_OUTPUT + echo "Released version: $VERSION" - publish: - runs-on: ubuntu-latest - needs: upload - if: needs.upload.outputs.DO_GITHUB_RELEASE == '1' - permissions: - contents: write + - name: Call Docker Build Workflow + uses: actions/github-script@v7 + with: + script: | + const { data } = await github.rest.actions.createWorkflowDispatch({ + owner: context.repo.owner, + repo: context.repo.repo, + workflow_id: 'docker-build.yml', + ref: context.ref, + inputs: { + image_tag: '${{ steps.version.outputs.tag }}', + is_release: 'true' + } + }); + console.log('Docker build workflow triggered for release'); - steps: - - name: Check out Git repository - uses: actions/checkout@v4 + - name: Wait for Docker build to complete + uses: actions/github-script@v7 + with: + script: | + const { data: runs } = await github.rest.actions.listWorkflowRuns({ + owner: context.repo.owner, + repo: context.repo.repo, + workflow_id: 'docker-build.yml', + status: 'in_progress', + per_page: 1 + }); + + if (runs.length > 0) { + console.log('Waiting for Docker build workflow to complete...'); + // Wait for workflow to complete (simplified - in practice you might want more sophisticated waiting) + await new Promise(resolve => setTimeout(resolve, 30000)); // Wait 30 seconds + } - - name: Download Build - uses: actions/download-artifact@v4 + - name: Create GitHub Release + if: github.event_name == 'push' + uses: actions/create-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: - name: distributions - path: dist + tag_name: ${{ steps.version.outputs.tag }} + release_name: Release ${{ steps.version.outputs.tag }} + body: | + ## Docker Image + + The Docker image has been built and pushed to GitHub Container Registry: + + ```bash + docker pull ghcr.io/${{ github.repository }}:${{ steps.version.outputs.tag }} + ``` + + ### Available Tags + - `${{ steps.version.outputs.tag }}` - Specific version + - `${{ steps.version.outputs.version }}` - Version without 'v' prefix + - `latest` - Latest release (if this is the default branch) + + ### Multi-Architecture Support + This image supports both `linux/amd64` and `linux/arm64` architectures. + draft: false + prerelease: ${{ contains(steps.version.outputs.tag, 'alpha') || contains(steps.version.outputs.tag, 'beta') || contains(steps.version.outputs.tag, 'rc') }} - - name: Detect prerelease + - name: Output release details run: | - do_prerelease=$((echo "${GITHUB_REF}" | grep -Eq "^refs\/tags\/v[0-9]+\.[0-9]+\.[0-9]+rc[0-9]+$") && echo 1 || echo 0) - echo DO_PRERELEASE=$do_prerelease >> $GITHUB_ENV - echo DO_PRERELEASE=$do_prerelease - - - name: Attach artifacts to github release - uses: softprops/action-gh-release@v2 - with: - files: | - dist/*.whl - CHANGELOG.md - prerelease: ${{ env.DO_PRERELEASE == '1' }} - body_path: CHANGELOG.md + echo "Release details:" + echo "Version: ${{ steps.version.outputs.version }}" + echo "Tag: ${{ steps.version.outputs.tag }}" + echo "Docker image: ghcr.io/${{ github.repository }}:${{ steps.version.outputs.tag }}" \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 105ce54..dd94311 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,9 +5,23 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0) ## [Unreleased] -## [v3.0.0] - +### Added + +- **Docker Support**: Complete containerization with multi-architecture support (AMD64 + ARM64) +- **CI/CD Pipeline**: Automated GitHub Actions workflows for testing, building, and deployment +- **GitHub Container Registry**: Automated Docker image publishing to `ghcr.io/leon1995/fwtv` +- **Multi-Platform Testing**: Automated testing across Python 3.11, 3.12, 3.13 on Linux, Windows, and macOS +- **Release Automation**: Automated release creation with versioned Docker images +- **Development Docker Images**: Pre-built development images tagged as `dev` +- **Release Docker Images**: Versioned images with tags like `v1.0.0`, `1.0.0`, and `latest` + +### Changed -### Changed app to run in the browser using reflex +- **Deployment**: Added Docker-first deployment option for easier production setup +- **Documentation**: Comprehensive README update with Docker usage instructions +- **Workflow Optimization**: Optimized release pipeline to reuse Docker build workflow (DRY principle) +- **Build Process**: Enhanced build workflow to trigger Docker builds on main branch pushes +- Changed app to run in the browser using reflex ## [2.4.1] - 2025-07-07 diff --git a/README.md b/README.md index 658f77f..6c2dd97 100644 --- a/README.md +++ b/README.md @@ -1,28 +1,169 @@ -# FactorialHR work time verification +# FactorialHR Work Time Verification -![pyversions](https://img.shields.io/pypi/pyversions/fwtv) ![implementation](https://img.shields.io/pypi/implementation/fwtv) ![status](https://img.shields.io/pypi/status/fwtv) ![pypi](https://img.shields.io/pypi/v/fwtv) ![dpm](https://img.shields.io/pypi/dm/fwtv) +![pyversions](https://img.shields.io/pypi/pyversions/fwtv) ![implementation](https://img.shields.io/pypi/implementation/fwtv) ![status](https://img.shields.io/pypi/status/fwtv) ![pypi](https://img.shields.io/pypi/v/fwtv) ![dpm](https://img.shields.io/pypi/dm/fwtv) ![docker](https://img.shields.io/docker/pulls/ghcr.io/leon1995/fwtv) -This script verifies attendances whether they comply with german law. In particular, the following rules are verified: -- Whether the work time is longer than 6 hours without a break of 30 min -- Whether the work time is longer than 9 hours without a break of 45 min -- Whether the work time is longer than 10 hours without a break of 11 hours -- Whether the work time is within the time of 6am and 10pm +A web application built with [Reflex](https://reflex.dev) that verifies employee attendance records against German labor law requirements. The application integrates with FactorialHR's API to fetch attendance data and provides compliance checking with an intuitive web interface. + +## ๐ŸŽฏ Features + +- **German Labor Law Compliance**: Automatically verifies attendance against German work time regulations +- **FactorialHR Integration**: Seamless connection to FactorialHR API for data retrieval +- **Modern Web Interface**: Built with Reflex for a responsive, modern UI +- **Docker Support**: Containerized deployment with multi-architecture support +- **CI/CD Pipeline**: Automated testing, building, and deployment + +## ๐Ÿ“‹ Compliance Rules + +The application verifies the following German labor law requirements: + +- โฐ **6-hour rule**: Work time longer than 6 hours requires a 30-minute break +- โฐ **9-hour rule**: Work time longer than 9 hours requires a 45-minute break +- โฐ **10-hour rule**: Work time longer than 10 hours requires an 11-hour rest period +- ๐Ÿ•• **Time window**: Work time must be within 6:00 AM and 10:00 PM ![main_window](./docs/images/working_time_verification.png "Main Window") -## Disclaimer +## ๐Ÿš€ Quick Start + +### Option 1: Docker (Recommended) + +```bash +# Pull the latest development image +docker pull ghcr.io/leon1995/fwtv:dev + +# Run the container +docker run -p 8080:8080 \ + -e FACTORIAL_API_KEY=your_api_key \ + -e FACTORIAL_COMPANY_ID=your_company_id \ + ghcr.io/leon1995/fwtv:dev +``` + +### Option 2: Local Development + +```bash +# Clone the repository +git clone https://github.com/leon1995/fwtv.git +cd fwtv + +# Install dependencies +uv sync --frozen + +# Configure environment +cp .env.sample .env +# Edit .env with your FactorialHR credentials + +# Run the application +uv run reflex run --env prod +``` + +## ๐Ÿณ Docker Images + +The project provides pre-built Docker images for easy deployment: + +| Tag | Description | Usage | +|-----|-------------|-------| +| `dev` | Latest development build | `ghcr.io/leon1995/fwtv:dev` | +| `v1.0.0` | Specific release version | `ghcr.io/leon1995/fwtv:v1.0.0` | +| `latest` | Latest stable release | `ghcr.io/leon1995/fwtv:latest` | + +### Multi-Architecture Support + +All Docker images support both `linux/amd64` and `linux/arm64` architectures, making them compatible with: +- Intel/AMD x86_64 systems +- ARM64 systems (Apple Silicon, ARM servers) + +## โš™๏ธ Configuration + +### Environment Variables + +Create a `.env` file with the following variables from [`.env.sample`](.env.sample) + +### FactorialHR Setup + +1. Log in to your FactorialHR account +2. Navigate to Settings โ†’ API +3. Generate an API key +4. Note your Company ID from the URL or settings + +## ๐Ÿ—๏ธ CI/CD Pipeline + +The project includes automated CI/CD pipelines: + +### Development Pipeline +- **Triggers**: Push to main branch, pull requests +- **Actions**: Linting, testing, Docker image building +- **Output**: Development Docker images tagged as `dev` + +### Release Pipeline +- **Triggers**: Git tags (e.g., `v1.0.0`) +- **Actions**: Version extraction, Docker image building, GitHub release creation +- **Output**: Versioned Docker images and GitHub releases + +### Workflow Features +- โœ… Multi-architecture Docker builds (AMD64 + ARM64) +- โœ… Automated testing across Python 3.11, 3.12, 3.13 +- โœ… Cross-platform compatibility (Linux, Windows, macOS) +- โœ… GitHub Container Registry integration +- โœ… Automated release management + +## ๐Ÿงช Development + +### Prerequisites + +- Python 3.13 +- [uv](https://docs.astral.sh/uv/) package manager +- Docker (optional) + +### Setup Development Environment + +```bash +# Install development dependencies +uv sync --group dev + +# Run tests +uv run pytest + +# Run linting +uv run ruff check . +uv run ruff format . + +# Run pre-commit hooks +uv run pre-commit run --all-files +``` + +## ๐Ÿค Contributing + +We welcome contributions! Please follow these steps: + +1. **Fork** the repository +2. **Create** a feature branch (`git checkout -b feature/amazing-feature`) +3. **Install** development dependencies (`uv sync --group dev`) +4. **Make** your changes +5. **Run** tests (`uv run pytest`) +6. **Commit** your changes (`git commit -m 'Add amazing feature'`) +7. **Push** to the branch (`git push origin feature/amazing-feature`) +8. **Open** a Pull Request + +### Development Guidelines + +- Follow the existing code style (enforced by Ruff) +- Add tests for new functionality +- Update documentation as needed +- Ensure all CI checks pass + +## โš ๏ธ Disclaimer -I do not guarantee that this package complies with german law all the time. Changes may occur anytime. Use at your own risk. +**Important**: This application is provided for informational purposes only. While it implements German labor law requirements, I do not guarantee complete compliance with current regulations. Labor laws may change, and this tool should not be considered legal advice. -Errors where the time attended is 1 min above the limit are ignored, because factorial's automated time tracking is not precises enough. +**Use at your own risk**: Always consult with legal professionals for official compliance verification. -## Usage +## ๐Ÿ“„ License -- clone this repository -- install dependencies using `uv sync --frozen` -- copy [`.env.sample`](.env.sample) to `.env` and adjust the environment variables -- run app `uv run reflex run --env prod` +This project is licensed under the GNU Affero General Public License v3 - see the [LICENSE](LICENSE) file for details. -## Contributing +## ๐Ÿ”— Links -Feel free to contribute! Please fork this repository, install the development dependencies with `pip install -e ".[dev]"` and create pull request. +- **Homepage**: https://github.com/leon1995/fwtv +- **Bug Tracker**: https://github.com/leon1995/fwtv/issues +- **Changelog**: https://github.com/leon1995/fwtv/blob/main/CHANGELOG.md +- **Docker Hub**: https://github.com/leon1995/fwtv/pkgs/container/fwtv From 8c79c6044d11876a9019a368f1d06f2638d3a7c8 Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 11:19:03 +0200 Subject: [PATCH 14/24] Fix formatting issues in Caddyfile, docker-compose.yaml, README.md, and release.yml; ensure consistent newline usage and improve readability. --- .github/workflows/release.yml | 12 ++++++------ Caddyfile | 2 +- README.md | 4 ++-- docker-compose.yaml | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9efb6d4..98b62eb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -63,7 +63,7 @@ jobs: status: 'in_progress', per_page: 1 }); - + if (runs.length > 0) { console.log('Waiting for Docker build workflow to complete...'); // Wait for workflow to complete (simplified - in practice you might want more sophisticated waiting) @@ -80,18 +80,18 @@ jobs: release_name: Release ${{ steps.version.outputs.tag }} body: | ## Docker Image - + The Docker image has been built and pushed to GitHub Container Registry: - + ```bash docker pull ghcr.io/${{ github.repository }}:${{ steps.version.outputs.tag }} ``` - + ### Available Tags - `${{ steps.version.outputs.tag }}` - Specific version - `${{ steps.version.outputs.version }}` - Version without 'v' prefix - `latest` - Latest release (if this is the default branch) - + ### Multi-Architecture Support This image supports both `linux/amd64` and `linux/arm64` architectures. draft: false @@ -102,4 +102,4 @@ jobs: echo "Release details:" echo "Version: ${{ steps.version.outputs.version }}" echo "Tag: ${{ steps.version.outputs.tag }}" - echo "Docker image: ghcr.io/${{ github.repository }}:${{ steps.version.outputs.tag }}" \ No newline at end of file + echo "Docker image: ghcr.io/${{ github.repository }}:${{ steps.version.outputs.tag }}" diff --git a/Caddyfile b/Caddyfile index 3d8ab1f..3e08274 100644 --- a/Caddyfile +++ b/Caddyfile @@ -11,4 +11,4 @@ root * /srv route { try_files {path} {path} /404.html file_server -} \ No newline at end of file +} diff --git a/README.md b/README.md index 6c2dd97..3e5e667 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ A web application built with [Reflex](https://reflex.dev) that verifies employee The application verifies the following German labor law requirements: - โฐ **6-hour rule**: Work time longer than 6 hours requires a 30-minute break -- โฐ **9-hour rule**: Work time longer than 9 hours requires a 45-minute break +- โฐ **9-hour rule**: Work time longer than 9 hours requires a 45-minute break - โฐ **10-hour rule**: Work time longer than 10 hours requires an 11-hour rest period - ๐Ÿ•• **Time window**: Work time must be within 6:00 AM and 10:00 PM @@ -94,7 +94,7 @@ The project includes automated CI/CD pipelines: - **Actions**: Linting, testing, Docker image building - **Output**: Development Docker images tagged as `dev` -### Release Pipeline +### Release Pipeline - **Triggers**: Git tags (e.g., `v1.0.0`) - **Actions**: Version extraction, Docker image building, GitHub release creation - **Output**: Versioned Docker images and GitHub releases diff --git a/docker-compose.yaml b/docker-compose.yaml index 373c2b6..601552e 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -26,4 +26,4 @@ services: volumes: #postgres-data: # Uploaded files - upload-data: \ No newline at end of file + upload-data: From 1c1e0ce3d9a8acdd90cc2cae48995d4ce584d496 Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 11:23:46 +0200 Subject: [PATCH 15/24] fix --- .github/workflows/docker-build.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index 6fbf399..a595007 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -73,9 +73,9 @@ jobs: type=ref,event=branch type=ref,event=pr type=sha,prefix={{branch}}- - type=raw,value=${{ steps.tag.outputs.tag }},enable={{ steps.tag.outputs.is_release == 'true' }} - type=raw,value=${{ steps.tag.outputs.version_without_v }},enable={{ steps.tag.outputs.is_release == 'true' }} - type=raw,value=latest,enable={{ steps.tag.outputs.is_release == 'true' }} + type=raw,value=${{ steps.tag.outputs.tag }},enable=${{ steps.tag.outputs.is_release == 'true' }} + type=raw,value=${{ steps.tag.outputs.version_without_v }},enable=${{ steps.tag.outputs.is_release == 'true' }} + type=raw,value=latest,enable=${{ steps.tag.outputs.is_release == 'true' }} - name: Build and push Docker image uses: docker/build-push-action@v5 From 463493226684da54c48420b612b9e7bf0f9e96bf Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 11:33:18 +0200 Subject: [PATCH 16/24] Update Docker build workflow to change SHA tag prefix from branch name to 'sha'; streamline tagging process for releases. --- .github/workflows/docker-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index a595007..ddde004 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -72,7 +72,7 @@ jobs: tags: | type=ref,event=branch type=ref,event=pr - type=sha,prefix={{branch}}- + type=sha,prefix=sha- type=raw,value=${{ steps.tag.outputs.tag }},enable=${{ steps.tag.outputs.is_release == 'true' }} type=raw,value=${{ steps.tag.outputs.version_without_v }},enable=${{ steps.tag.outputs.is_release == 'true' }} type=raw,value=latest,enable=${{ steps.tag.outputs.is_release == 'true' }} From f216b950deb6c5a58497551cc261c9ae09cec433 Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 11:39:13 +0200 Subject: [PATCH 17/24] do not test incompatable python versions --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4551d89..038e58d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -67,7 +67,7 @@ jobs: strategy: matrix: - python-version: [ "3.11", "3.12", "3.13" ] + python-version: [ "3.13" ] os: [ ubuntu-latest, windows-latest, macos-latest ] distribution: [ "${{ needs.build.outputs.WHL }}", "${{ needs.build.outputs.TARGZ }}" ] From cd24961e9623de28e71c64d049241bcd95570ede Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 12:06:05 +0200 Subject: [PATCH 18/24] Refactor OAuth session handling to ensure error state is reset correctly; update secure cookie setting based on environment mode. --- factorialhr_analysis/pages/oauth_page.py | 2 +- factorialhr_analysis/states/oauth_state.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/factorialhr_analysis/pages/oauth_page.py b/factorialhr_analysis/pages/oauth_page.py index f86024d..fb6e86e 100644 --- a/factorialhr_analysis/pages/oauth_page.py +++ b/factorialhr_analysis/pages/oauth_page.py @@ -58,11 +58,11 @@ async def process_oauth_response(self): self.error = str(e) else: logging.getLogger(__name__).info('created oauth session') + self.error = '' yield states.DataState.refresh_data # Redirect to the main page after successful authentication yield states.OAuthSessionState.redir finally: - self.error = '' self.expected_state = '' diff --git a/factorialhr_analysis/states/oauth_state.py b/factorialhr_analysis/states/oauth_state.py index 085cc7e..37392f4 100644 --- a/factorialhr_analysis/states/oauth_state.py +++ b/factorialhr_analysis/states/oauth_state.py @@ -1,5 +1,6 @@ """State for managing OAuth session and authentication.""" +import os import time import typing @@ -42,7 +43,7 @@ class OAuthSessionState(rx.State): api_session_cookie: str = rx.Cookie( name='api_session', same_site='strict', - secure=True, + secure=os.environ.get('REFLEX_ENV_MODE') == rx.constants.Env.PROD, max_age=7 * 24 * 60 * 60, ) _redirect_to: str = '' From 6f8241903c929e9813e6bc688cfa4a1f29c44010 Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 12:17:08 +0200 Subject: [PATCH 19/24] Enhance Docker build workflow by adding a wait mechanism for build completion; improve release creation process with better logging and formatting for release notes. --- .github/workflows/release.yml | 95 +++++++++++++++++++++++------------ 1 file changed, 63 insertions(+), 32 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 98b62eb..a6779ef 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -37,6 +37,7 @@ jobs: echo "Released version: $VERSION" - name: Call Docker Build Workflow + id: docker-build uses: actions/github-script@v7 with: script: | @@ -51,51 +52,81 @@ jobs: } }); console.log('Docker build workflow triggered for release'); + return data; - name: Wait for Docker build to complete uses: actions/github-script@v7 with: script: | - const { data: runs } = await github.rest.actions.listWorkflowRuns({ - owner: context.repo.owner, - repo: context.repo.repo, - workflow_id: 'docker-build.yml', - status: 'in_progress', - per_page: 1 - }); - - if (runs.length > 0) { - console.log('Waiting for Docker build workflow to complete...'); - // Wait for workflow to complete (simplified - in practice you might want more sophisticated waiting) - await new Promise(resolve => setTimeout(resolve, 30000)); // Wait 30 seconds + const maxWaitTime = 10 * 60 * 1000; // 10 minutes + const checkInterval = 30 * 1000; // 30 seconds + const startTime = Date.now(); + + console.log('Waiting for Docker build workflow to complete...'); + + while (Date.now() - startTime < maxWaitTime) { + // Get the most recent workflow runs for docker-build.yml + const { data: runs } = await github.rest.actions.listWorkflowRuns({ + owner: context.repo.owner, + repo: context.repo.repo, + workflow_id: 'docker-build.yml', + per_page: 5 + }); + + // Find the run that was triggered by our dispatch + const ourRun = runs.find(run => + run.event === 'workflow_dispatch' && + run.status !== 'completed' && + run.created_at > new Date(Date.now() - 5 * 60 * 1000).toISOString() // Within last 5 minutes + ); + + if (!ourRun) { + console.log('No matching workflow run found, assuming completed'); + break; + } + + console.log(`Workflow run ${ourRun.id} status: ${ourRun.status}, conclusion: ${ourRun.conclusion || 'pending'}`); + + if (ourRun.status === 'completed') { + if (ourRun.conclusion === 'success') { + console.log('Docker build completed successfully'); + break; + } else { + throw new Error(`Docker build failed with conclusion: ${ourRun.conclusion}`); + } + } + + console.log(`Waiting ${checkInterval / 1000} seconds before next check...`); + await new Promise(resolve => setTimeout(resolve, checkInterval)); + } + + if (Date.now() - startTime >= maxWaitTime) { + throw new Error('Timeout waiting for Docker build to complete'); } - name: Create GitHub Release if: github.event_name == 'push' - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: ${{ steps.version.outputs.tag }} - release_name: Release ${{ steps.version.outputs.tag }} - body: | - ## Docker Image + run: | + gh release create "${{ steps.version.outputs.tag }}" \ + --title "Release ${{ steps.version.outputs.tag }}" \ + --notes "## Docker Image - The Docker image has been built and pushed to GitHub Container Registry: + The Docker image has been built and pushed to GitHub Container Registry: - ```bash - docker pull ghcr.io/${{ github.repository }}:${{ steps.version.outputs.tag }} - ``` + \`\`\`bash + docker pull ghcr.io/${{ github.repository }}:${{ steps.version.outputs.tag }} + \`\`\` - ### Available Tags - - `${{ steps.version.outputs.tag }}` - Specific version - - `${{ steps.version.outputs.version }}` - Version without 'v' prefix - - `latest` - Latest release (if this is the default branch) + ### Available Tags + - \`${{ steps.version.outputs.tag }}\` - Specific version + - \`${{ steps.version.outputs.version }}\` - Version without 'v' prefix + - \`latest\` - Latest release (if this is the default branch) - ### Multi-Architecture Support - This image supports both `linux/amd64` and `linux/arm64` architectures. - draft: false - prerelease: ${{ contains(steps.version.outputs.tag, 'alpha') || contains(steps.version.outputs.tag, 'beta') || contains(steps.version.outputs.tag, 'rc') }} + ### Multi-Architecture Support + This image supports both \`linux/amd64\` and \`linux/arm64\` architectures." \ + ${{ contains(steps.version.outputs.tag, 'alpha') || contains(steps.version.outputs.tag, 'beta') || contains(steps.version.outputs.tag, 'rc') && '--prerelease' || '' }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Output release details run: | From 9d76fc1dce4af219cd2d19781e2748ec594dcb51 Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 12:23:55 +0200 Subject: [PATCH 20/24] lint --- .github/workflows/release.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a6779ef..15880b5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -61,9 +61,9 @@ jobs: const maxWaitTime = 10 * 60 * 1000; // 10 minutes const checkInterval = 30 * 1000; // 30 seconds const startTime = Date.now(); - + console.log('Waiting for Docker build workflow to complete...'); - + while (Date.now() - startTime < maxWaitTime) { // Get the most recent workflow runs for docker-build.yml const { data: runs } = await github.rest.actions.listWorkflowRuns({ @@ -72,21 +72,21 @@ jobs: workflow_id: 'docker-build.yml', per_page: 5 }); - + // Find the run that was triggered by our dispatch - const ourRun = runs.find(run => - run.event === 'workflow_dispatch' && + const ourRun = runs.find(run => + run.event === 'workflow_dispatch' && run.status !== 'completed' && run.created_at > new Date(Date.now() - 5 * 60 * 1000).toISOString() // Within last 5 minutes ); - + if (!ourRun) { console.log('No matching workflow run found, assuming completed'); break; } - + console.log(`Workflow run ${ourRun.id} status: ${ourRun.status}, conclusion: ${ourRun.conclusion || 'pending'}`); - + if (ourRun.status === 'completed') { if (ourRun.conclusion === 'success') { console.log('Docker build completed successfully'); @@ -95,11 +95,11 @@ jobs: throw new Error(`Docker build failed with conclusion: ${ourRun.conclusion}`); } } - + console.log(`Waiting ${checkInterval / 1000} seconds before next check...`); await new Promise(resolve => setTimeout(resolve, checkInterval)); } - + if (Date.now() - startTime >= maxWaitTime) { throw new Error('Timeout waiting for Docker build to complete'); } From fe45a3a3b631e2b1b8f4aaec91b39c64472d1414 Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 12:27:00 +0200 Subject: [PATCH 21/24] Fix logical condition in release workflow for prerelease tagging; update secure cookie setting in OAuth session state to use correct environment mode value. --- .github/workflows/release.yml | 2 +- factorialhr_analysis/states/oauth_state.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 15880b5..67d558e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -124,7 +124,7 @@ jobs: ### Multi-Architecture Support This image supports both \`linux/amd64\` and \`linux/arm64\` architectures." \ - ${{ contains(steps.version.outputs.tag, 'alpha') || contains(steps.version.outputs.tag, 'beta') || contains(steps.version.outputs.tag, 'rc') && '--prerelease' || '' }} + ${{ (contains(steps.version.outputs.tag, 'alpha') || contains(steps.version.outputs.tag, 'beta') || contains(steps.version.outputs.tag, 'rc')) && '--prerelease' || '' }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/factorialhr_analysis/states/oauth_state.py b/factorialhr_analysis/states/oauth_state.py index 37392f4..994d86a 100644 --- a/factorialhr_analysis/states/oauth_state.py +++ b/factorialhr_analysis/states/oauth_state.py @@ -43,7 +43,7 @@ class OAuthSessionState(rx.State): api_session_cookie: str = rx.Cookie( name='api_session', same_site='strict', - secure=os.environ.get('REFLEX_ENV_MODE') == rx.constants.Env.PROD, + secure=os.environ.get('REFLEX_ENV_MODE') == rx.constants.Env.PROD.value, max_age=7 * 24 * 60 * 60, ) _redirect_to: str = '' From bec37dd9c3ca16c766b15525e2d0a5ae6ec3d811 Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 13:20:57 +0200 Subject: [PATCH 22/24] Improve team filtering logic in working time verification page; add checks for None values in employee IDs and ensure early return if date settings are not defined. --- .../pages/working_time_verification_page.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/factorialhr_analysis/pages/working_time_verification_page.py b/factorialhr_analysis/pages/working_time_verification_page.py index 7df8c64..04277a4 100644 --- a/factorialhr_analysis/pages/working_time_verification_page.py +++ b/factorialhr_analysis/pages/working_time_verification_page.py @@ -158,7 +158,11 @@ async def _handle_single_employee( async with self: error_to_show = ErrorToShow( name=employee.full_name, - team_names=[team.name for team in teams if employee.id in team.employee_ids], + team_names=[ + team.name + for team in teams + if team.employee_ids is not None and employee.id in team.employee_ids + ], affected_days=', '.join(str(d) for d in error.days_affected), error=error.reason, cumulated_break=error.break_time, @@ -193,6 +197,9 @@ async def calculate_errors(self): data_state = await self.get_state(states.DataState) settings_state = await self.get_state(SettingsState) + if settings_state._start_date is None or settings_state._end_date is None: # noqa: SLF001 + return + # Filter employees and shifts outside of async context for better performance employees = [ employee From f20b5b4faf8b4359ba2b3e1b6188137ed04748cd Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 13:27:10 +0200 Subject: [PATCH 23/24] Refactor release workflow to enhance run detection logic; increase per_page limit for workflow runs and improve handling of recent workflow_dispatch events for better build status reporting. --- .github/workflows/release.yml | 52 +++++++++++++++++++++++++---------- 1 file changed, 38 insertions(+), 14 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 67d558e..dcf120f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -70,29 +70,53 @@ jobs: owner: context.repo.owner, repo: context.repo.repo, workflow_id: 'docker-build.yml', - per_page: 5 + per_page: 10 }); // Find the run that was triggered by our dispatch - const ourRun = runs.find(run => + // Look for runs within the last 5 minutes that are workflow_dispatch events + const recentRuns = runs.filter(run => run.event === 'workflow_dispatch' && - run.status !== 'completed' && - run.created_at > new Date(Date.now() - 5 * 60 * 1000).toISOString() // Within last 5 minutes + run.created_at > new Date(Date.now() - 5 * 60 * 1000).toISOString() ); - if (!ourRun) { - console.log('No matching workflow run found, assuming completed'); - break; - } + console.log(`Found ${recentRuns.length} recent workflow_dispatch runs`); - console.log(`Workflow run ${ourRun.id} status: ${ourRun.status}, conclusion: ${ourRun.conclusion || 'pending'}`); + // Find the most recent run that matches our criteria + const ourRun = recentRuns.find(run => { + // Check if this run has the expected inputs (is_release: true) + // We can't directly access inputs from the API, so we'll use heuristics + // Look for runs that are either in progress or recently completed + return run.status === 'in_progress' || + run.status === 'queued' || + (run.status === 'completed' && run.conclusion); + }); - if (ourRun.status === 'completed') { - if (ourRun.conclusion === 'success') { - console.log('Docker build completed successfully'); - break; + if (!ourRun) { + // If no active run found, check if there's a recently completed run + const completedRun = recentRuns.find(run => run.status === 'completed'); + if (completedRun) { + console.log(`Found completed run ${completedRun.id} with conclusion: ${completedRun.conclusion}`); + if (completedRun.conclusion === 'success') { + console.log('Docker build completed successfully'); + break; + } else { + throw new Error(`Docker build failed with conclusion: ${completedRun.conclusion}`); + } } else { - throw new Error(`Docker build failed with conclusion: ${ourRun.conclusion}`); + console.log('No matching workflow run found, assuming completed'); + break; + } + } else { + console.log(`Workflow run ${ourRun.id} status: ${ourRun.status}, conclusion: ${ourRun.conclusion || 'pending'}`); + + if (ourRun.status === 'completed') { + if (ourRun.conclusion === 'success') { + console.log('Docker build completed successfully'); + break; + } else { + throw new Error(`Docker build failed with conclusion: ${ourRun.conclusion}`); + } } } From 2d8b1146162d765d0ae78b23a7710c2262ad0f1c Mon Sep 17 00:00:00 2001 From: Leon Date: Sun, 21 Sep 2025 13:27:27 +0200 Subject: [PATCH 24/24] lint --- .github/workflows/release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index dcf120f..7dff981 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -87,8 +87,8 @@ jobs: // Check if this run has the expected inputs (is_release: true) // We can't directly access inputs from the API, so we'll use heuristics // Look for runs that are either in progress or recently completed - return run.status === 'in_progress' || - run.status === 'queued' || + return run.status === 'in_progress' || + run.status === 'queued' || (run.status === 'completed' && run.conclusion); });