diff --git a/.gitignore b/.gitignore index 4a2cd9974..b469a8613 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ .DS_Store /target /.env -/data /uploads /nb-configuration.xml /node @@ -12,4 +11,5 @@ /ssl /http-tests/ssl /http-tests/datasets -/http-tests/uploads \ No newline at end of file +/http-tests/uploads +/fuseki diff --git a/CHANGELOG.md b/CHANGELOG.md index bc6bea103..cc263639f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,67 @@ +## [5.1.0] - 2025-12-12 +### Added +- ORCID OpenID Connect login support with JWT token verification +- `CORSFilter` response filter for cross-origin resource sharing on static assets +- Cache invalidation (BAN requests) for agent and user account lookup queries +- New `Application::normalizeOrigin` method for origin normalization +- `ldh:parent-origin` XPath function for parent origin retrieval +- HTTP tests for CORS functionality, internal IP blocking, and form proxying +- `ForbiddenExceptionMapper` for handling forbidden exceptions +- `Content-Security-Policy` header for uploaded files to prevent XSS attacks +- Sticky left and right navigation panels +- Support for recursive content blocks +- Docker volume for Varnish cache file persistence + +### Changed +- **BREAKING**: Admin application moved from `/admin/` path to `admin.` subdomain +- **BREAKING**: Replaced `ldt:base` with `ldh:origin` in configuration (now uses absolute URIs with full domain names) +- Refactored OAuth2 authentication with extracted base classes `AuthorizeBase`, `LoginBase`, and `JWTVerifier` +- Provider-specific implementations for Google and ORCID OAuth flows in separate packages +- Authorization queries now isolated by dataspace using `FILTER(strstarts(str(?g), str($base)))` +- Optimized Varnish caching for authenticated requests with proper cache bypass for user-specific content +- Root domain extraction logic replaced with configured `BASE_URI` from `Application.getBaseURI()` +- Eliminated unnecessary wrapper methods (`getRootContextURI()`) in favor of direct `getSystem().getBaseURI()` calls +- Client-side XSLT now uses `ldt:base()` function instead of `$ldt:base` parameter +- OAuth and access request endpoints moved to end-user dataspace (no longer extend `GraphStoreImpl` or `SPARQLEndpointImpl`) +- ID tokens now returned via URL fragment instead of query parameters +- CLI scripts refactored: `--fragment` parameter renamed to `--uri` +- Nginx configuration now exempts internal requests from rate limiting +- Parameterized nginx and Varnish configurations for better flexibility +- Improved `ClientUriRewriteFilter` to use configured host instead of hardcoded localhost +- Agent metadata and authorizations now managed per-app in entrypoint.sh +- Separated templates for owner and secretary authorizations +- Fuseki data directory changed in Docker configuration +- `$ORIGIN` environment variable now excludes default ports (80/443) +- WYMEditor cross-origin compatibility fixes +- Replaced `ldh:new` with `ixsl:new` in client-side code + +### Fixed +- Fixed security vulnerability [LNK-002 (cache poisoning)](https://github.com/AtomGraph/LinkedDataHub/issues/253) +- Fixed security vulnerability [LNK-004 (path traversal)](https://github.com/AtomGraph/LinkedDataHub/issues/252) +- Fixed security vulnerability [LNK-009 (SSRF - internal IP address proxying)](https://github.com/AtomGraph/LinkedDataHub/issues/250) +- Fixed security vulnerability [LNK-011 (XSS via uploaded files)](https://github.com/AtomGraph/LinkedDataHub/issues/254) +- Fixed Billion Laughs [XML entity expansion exploit](https://github.com/AtomGraph/LinkedDataHub/issues/249) by excluding Xerces dependency +- Fixed OpenLayers map dragging functionality +- Fixed graph layout rendering issues +- Fixed SPARQL update and `application/x-www-form-urlencoded` proxying +- Fixed access request URL building and modal form display +- Fixed `ldh:Shape` mode in XSLT +- Fixed HTML reloading after OAuth login +- Improved SHACL support in UI with better form controls +- Fixed performance regression in `ClientUriRewriteFilter` for production deployments +- Fixed agent and user account duplicate creation via proper cache invalidation +- Fixed same-site URI resolution for XSLT document loading across subdomains +- Fixed entrypoint to load datasets for all configured apps +- Fixed authorization filter to handle non-existent dataspaces (throws `NotFoundException`) + +### Removed +- Removed `RequestAccess` resource from admin package (moved to end-user) +- Removed `admin/oauth2` package (OAuth moved to end-user dataspace) +- Removed XOM dependency +- Removed rate limiting tests from HTTP test suite +- Removed debug output from entrypoint and test scripts +- Removed unused namespace declarations + ## [5.0.23] - 2025-09-11 ### Added - Drag handles for content blocks - blocks can now only be dragged by their dedicated drag handles diff --git a/Dockerfile b/Dockerfile index b7d00c864..e5df99152 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,7 +22,7 @@ RUN mvn -Pstandalone clean install # ============================== -FROM atomgraph/letsencrypt-tomcat:10.1.34 +FROM atomgraph/letsencrypt-tomcat:10.1.46 LABEL maintainer="martynas@atomgraph.com" @@ -72,14 +72,12 @@ ENV OWNER_CERT_ALIAS=root-owner ENV OWNER_KEYSTORE=/var/linkeddatahub/ssl/owner/keystore.p12 ENV OWNER_CERT=/var/linkeddatahub/ssl/owner/cert.pem ENV OWNER_PUBLIC_KEY=/var/linkeddatahub/ssl/owner/public.pem -ENV OWNER_PRIVATE_KEY=/var/linkeddatahub/ssl/owner/private.key ENV SECRETARY_COMMON_NAME=LinkedDataHub ENV SECRETARY_CERT_ALIAS=root-secretary ENV SECRETARY_KEYSTORE=/var/linkeddatahub/ssl/secretary/keystore.p12 ENV SECRETARY_CERT=/var/linkeddatahub/ssl/secretary/cert.pem ENV SECRETARY_PUBLIC_KEY=/var/linkeddatahub/ssl/secretary/public.pem -ENV SECRETARY_PRIVATE_KEY=/var/linkeddatahub/ssl/secretary/private.key ENV CLIENT_KEYSTORE_MOUNT=/var/linkeddatahub/ssl/secretary/keystore.p12 ENV CLIENT_KEYSTORE="$CATALINA_HOME/webapps/ROOT/WEB-INF/keystore.p12" @@ -147,12 +145,22 @@ COPY platform/import-letsencrypt-stg-roots.sh import-letsencrypt-stg-roots.sh COPY platform/select-root-services.rq select-root-services.rq -# copy the metadata of the built-in secretary agent +COPY platform/select-agent-metadata.rq select-agent-metadata.rq + +# copy the metadata of built-in agents COPY platform/root-secretary.trig.template root-secretary.trig.template COPY platform/root-owner.trig.template root-owner.trig.template +COPY platform/root-secretary-authorization.trig.template root-secretary-authorization.trig.template + +COPY platform/root-owner-authorization.trig.template root-owner-authorization.trig.template + +# copy the metadata of the namespace ontology + +COPY platform/namespace-ontology.trig.template namespace-ontology.trig.template + # copy default datasets COPY platform/datasets/admin.trig /var/linkeddatahub/datasets/admin.trig @@ -197,7 +205,7 @@ RUN useradd --no-log-init -U ldh && \ RUN ./import-letsencrypt-stg-roots.sh HEALTHCHECK --start-period=80s --retries=5 \ - CMD curl -f -I "http://localhost:${HTTP_PORT}/ns" -H "Accept: application/n-triples" || exit 1 # relies on public access to the namespace document + CMD curl -f -I "http://localhost:7070/ns" -H "Accept: application/n-triples" || exit 1 # relies on public access to the namespace document USER ldh diff --git a/README.md b/README.md index fe1f520e3..18ee07e3e 100644 --- a/README.md +++ b/README.md @@ -28,10 +28,17 @@ It takes a few clicks and filling out a form to install the product into your ow ### Prerequisites * `bash` shell 4.x. It should be included by default on Linux. On Windows you can install the [Windows Subsystem for Linux](https://docs.microsoft.com/en-us/windows/wsl/install-win10). -* [`openssl`](https://www.openssl.org/) available on `$PATH` * [Docker](https://docs.docker.com/install/) installed. At least 8GB of memory dedicated to Docker is recommended. * [Docker Compose](https://docs.docker.com/compose/install/) installed +#### CLI scripts + +The following tools are required for CLI scripts in the `bin/` directory: + +* [`curl`](https://curl.se/) +* [`openssl`](https://www.openssl.org/) +* `python` 3.x + ### Steps 1. [Fork](https://guides.github.com/activities/forking/) this repository and clone the fork into a folder @@ -270,11 +277,11 @@ LinkedDataHub includes an HTTP [test suite](https://github.com/AtomGraph/LinkedD * [SPARQLBuilder](https://github.com/AtomGraph/sparql-builder) * [OpenLayers](https://openlayers.org) * [Google Charts](https://developers.google.com/chart) +* [xml-c14n-sync](https://github.com/AtomGraph/xml-c14n-sync) ### Java * [Jersey](https://eclipse-ee4j.github.io/jersey/) -* [XOM](http://www.xom.nu) * [JavaMail](https://javaee.github.io/javamail/) * [Guava](https://github.com/google/guava) * [java-jwt](https://github.com/auth0/java-jwt) diff --git a/bin/add-generic-service.sh b/bin/add-generic-service.sh index 645c33998..98f0086f1 100755 --- a/bin/add-generic-service.sh +++ b/bin/add-generic-service.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -16,6 +17,7 @@ print_usage() printf " --description DESCRIPTION Description of the service (optional)\n" printf " --slug SLUG String that will be used as URI path segment (optional)\n" printf "\n" + printf " --uri URI URI of the service (optional)\n" printf " --endpoint ENDPOINT_URI Endpoint URI\n" printf " --graph-store GRAPH_STORE_URI Graph Store URI (optional)\n" printf " --auth-user AUTH_USER Authorization username (optional)\n" @@ -43,6 +45,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --title) title="$2" shift # past argument @@ -53,8 +60,8 @@ do shift # past argument shift # past value ;; - --fragment) - fragment="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -69,7 +76,8 @@ do shift # past value ;; --auth-user) - auth_user=true + auth_user="$2" + shift # past argument shift # past value ;; --auth-pwd) @@ -85,6 +93,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -112,10 +122,13 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi -if [ -n "$fragment" ] ; then - # relative URI that will be resolved against the request URI - subject="<#${fragment}>" +if [ -n "$uri" ] ; then + subject="<${uri}>" else subject="_:subject" fi @@ -142,8 +155,8 @@ if [ -n "$auth_pwd" ] ; then turtle+="${subject} a:authPwd \"${auth_pwd}\" .\n" fi if [ -n "$description" ] ; then - turtle+="_:query dct:description \"${description}\" .\n" + turtle+="${subject} dct:description \"${description}\" .\n" fi # submit Turtle doc to the server -echo -e "$turtle" | post.sh "${args[@]}" +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" diff --git a/bin/add-result-set-chart.sh b/bin/add-result-set-chart.sh index b4a0c7d7e..5cb08da22 100755 --- a/bin/add-result-set-chart.sh +++ b/bin/add-result-set-chart.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -14,7 +15,7 @@ print_usage() printf "\n" printf " --title TITLE Title of the chart\n" printf " --description DESCRIPTION Description of the chart (optional)\n" - printf " --fragment STRING String that will be used as URI fragment identifier (optional)\n" + printf " --uri URI URI of the chart (optional)\n" printf "\n" printf " --query QUERY_URI URI of the SELECT query\n" printf " --chart-type TYPE_URI URI of the chart type\n" @@ -43,6 +44,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --title) title="$2" shift # past argument @@ -53,8 +59,8 @@ do shift # past argument shift # past value ;; - --fragment) - fragment="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -86,6 +92,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -125,10 +133,13 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi -if [ -n "$fragment" ] ; then - # relative URI that will be resolved against the request URI - subject="<#${fragment}>" +if [ -n "$uri" ] ; then + subject="<${uri}>" else subject="_:subject" fi @@ -148,4 +159,4 @@ if [ -n "$description" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | post.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/add-select.sh b/bin/add-select.sh index 0d48ab075..db1d995da 100755 --- a/bin/add-select.sh +++ b/bin/add-select.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -14,7 +15,7 @@ print_usage() printf "\n" printf " --title TITLE Title of the chart\n" printf " --description DESCRIPTION Description of the chart (optional)\n" - printf " --fragment STRING String that will be used as URI fragment identifier (optional)\n" + printf " --uri URI URI of the query (optional)\n" printf "\n" printf " --query-file ABS_PATH Absolute path to the text file with the SPARQL query string\n" printf " --service SERVICE_URI URI of the SPARQL service specific to this query (optional)\n" @@ -41,6 +42,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --title) title="$2" shift # past argument @@ -51,8 +57,8 @@ do shift # past argument shift # past value ;; - --fragment) - fragment="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -74,6 +80,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -103,10 +111,13 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi -if [ -n "$fragment" ] ; then - # relative URI that will be resolved against the request URI - subject="<#${fragment}>" +if [ -n "$uri" ] ; then + subject="<${uri}>" else subject="_:subject" fi @@ -126,4 +137,4 @@ if [ -n "$description" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | post.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/add-view.sh b/bin/add-view.sh index 24827c982..42fed9e99 100755 --- a/bin/add-view.sh +++ b/bin/add-view.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -14,7 +15,7 @@ print_usage() printf "\n" printf " --title TITLE Title of the view\n" printf " --description DESCRIPTION Description of the view (optional)\n" - printf " --fragment STRING String that will be used as URI fragment identifier (optional)\n" + printf " --uri URI URI of the view (optional)\n" printf "\n" printf " --query QUERY_URI URI of the SELECT query\n" printf " --mode MODE_URI URI of the block mode (list, grid etc.) (optional)\n" @@ -41,6 +42,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --title) title="$2" shift # past argument @@ -51,8 +57,8 @@ do shift # past argument shift # past value ;; - --fragment) - fragment="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -74,6 +80,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -97,10 +105,13 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi -if [ -n "$fragment" ] ; then - # relative URI that will be resolved against the request URI - subject="<#${fragment}>" +if [ -n "$uri" ] ; then + subject="<${uri}>" else subject="_:subject" fi @@ -123,4 +134,4 @@ if [ -n "$mode" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | post.sh "${args[@]}" +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" diff --git a/bin/admin/acl/add-agent-to-group.sh b/bin/admin/acl/add-agent-to-group.sh index 0cc212b19..b7e8abb3a 100755 --- a/bin/admin/acl/add-agent-to-group.sh +++ b/bin/admin/acl/add-agent-to-group.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -79,4 +80,4 @@ sparql+="}\n" # PATCH SPARQL to the named graph -echo -e "$sparql" | curl -X PATCH --data-binary @- -s -k -E "$cert_pem_file":"$cert_password" "$target" -H "Content-Type: application/sparql-update" \ No newline at end of file +echo -e "$sparql" | curl -f -X PATCH --data-binary @- -s -k -E "$cert_pem_file":"$cert_password" "$target" -H "Content-Type: application/sparql-update" \ No newline at end of file diff --git a/bin/admin/acl/create-authorization.sh b/bin/admin/acl/create-authorization.sh index 82e5cd909..eb10a21ef 100755 --- a/bin/admin/acl/create-authorization.sh +++ b/bin/admin/acl/create-authorization.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -56,6 +57,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --label) label="$2" shift # past argument @@ -172,13 +178,19 @@ else auth="_:auth" # blank node fi +target="${container}${encoded_slug}/" + args+=("-f") args+=("$cert_pem_file") args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("${container}${encoded_slug}/") +args+=("$target") +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi turtle+="@prefix dh: .\n" turtle+="@prefix rdfs: .\n" @@ -187,9 +199,9 @@ turtle+="@prefix dct: .\n" turtle+="@prefix foaf: .\n" turtle+="${auth} a acl:Authorization .\n" turtle+="${auth} rdfs:label \"${label}\" .\n" -turtle+="<${container}${encoded_slug}/> a dh:Item .\n" -turtle+="<${container}${encoded_slug}/> foaf:primaryTopic ${auth} .\n" -turtle+="<${container}${encoded_slug}/> dct:title \"${label}\" .\n" +turtle+="<${target}> a dh:Item .\n" +turtle+="<${target}> foaf:primaryTopic ${auth} .\n" +turtle+="<${target}> dct:title \"${label}\" .\n" if [ -n "$comment" ] ; then turtle+="${auth} rdfs:comment \"${comment}\" .\n" @@ -230,4 +242,4 @@ if [ -n "$write" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file diff --git a/bin/admin/acl/create-group.sh b/bin/admin/acl/create-group.sh index 12972ddf6..c283f8e0a 100755 --- a/bin/admin/acl/create-group.sh +++ b/bin/admin/acl/create-group.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -48,6 +49,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --name) name="$2" shift # past argument @@ -116,22 +122,28 @@ else group="_:auth" # blank node fi +target="${container}${encoded_slug}/" + args+=("-f") args+=("$cert_pem_file") args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("${container}${encoded_slug}/") +args+=("$target") +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi turtle+="@prefix dh: .\n" turtle+="@prefix dct: .\n" turtle+="@prefix foaf: .\n" turtle+="${group} a foaf:Group .\n" turtle+="${group} foaf:name \"${label}\" .\n" -turtle+="<${container}${encoded_slug}/> a dh:Item .\n" -turtle+="<${container}${encoded_slug}/> foaf:primaryTopic ${group} .\n" -turtle+="<${container}${encoded_slug}/> dct:title \"${label}\" .\n" +turtle+="<${target}> a dh:Item .\n" +turtle+="<${target}> foaf:primaryTopic ${group} .\n" +turtle+="<${target}> dct:title \"${label}\" .\n" if [ -n "$description" ] ; then turtle+="${group} dct:description \"${description}\" .\n" @@ -143,4 +155,4 @@ do done # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file diff --git a/bin/admin/acl/make-public.sh b/bin/admin/acl/make-public.sh index 95b1ee1ec..c0f5d39c8 100755 --- a/bin/admin/acl/make-public.sh +++ b/bin/admin/acl/make-public.sh @@ -9,7 +9,7 @@ print_usage() printf "Options:\n" printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" printf " -p, --cert-password CERT_PASSWORD Password of the WebID certificate\n" - printf " -b, --base BASE_URI Base URI of the admin application\n" + printf " -b, --base BASE_URI Base URI of the end-user application\n" printf " --proxy PROXY_URL The host this request will be proxied through (optional)\n" } @@ -58,12 +58,19 @@ if [ -z "$base" ] ; then exit 1 fi -target="${base}admin/acl/authorizations/public/" +admin_uri() { + local uri="$1" + echo "$uri" | sed 's|://|://admin.|' +} + +admin_base=$(admin_uri "$base") +target="${admin_base}acl/authorizations/public/" if [ -n "$proxy" ]; then - # rewrite target hostname to proxy hostname + # rewrite target hostname to proxy hostname (also convert proxy to admin subdomain) + admin_proxy=$(admin_uri "$proxy") target_host=$(echo "$target" | cut -d '/' -f 1,2,3) - proxy_host=$(echo "$proxy" | cut -d '/' -f 1,2,3) + proxy_host=$(echo "$admin_proxy" | cut -d '/' -f 1,2,3) target="${target/$target_host/$proxy_host}" fi @@ -73,7 +80,7 @@ curl -X PATCH \ -H "Content-Type: application/sparql-update" \ "$target" \ --data-binary @- < +BASE <${admin_base}> PREFIX acl: PREFIX def: @@ -84,10 +91,10 @@ PREFIX foaf: INSERT { acl:accessToClass def:Root, dh:Container, dh:Item, nfo:FileDataObject ; - acl:accessTo <../sparql> . + acl:accessTo <${base}sparql> . a acl:Authorization ; - acl:accessTo <../sparql> ; + acl:accessTo <${base}sparql> ; acl:mode acl:Append ; acl:agentClass foaf:Agent, acl:AuthenticatedAgent . # hacky way to allow queries over POST } diff --git a/bin/admin/add-ontology-import.sh b/bin/admin/add-ontology-import.sh index 3333c9ab4..df5ce800d 100755 --- a/bin/admin/add-ontology-import.sh +++ b/bin/admin/add-ontology-import.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -80,4 +81,4 @@ sparql+="}\n" # PATCH SPARQL to the named graph -echo -e "$sparql" | curl -X PATCH --data-binary @- -v -k -E "$cert_pem_file":"$cert_password" "$target" -H "Content-Type: application/sparql-update" \ No newline at end of file +echo -e "$sparql" | curl -f -X PATCH --data-binary @- -v -k -E "$cert_pem_file":"$cert_password" "$target" -H "Content-Type: application/sparql-update" \ No newline at end of file diff --git a/bin/admin/model/add-class.sh b/bin/admin/model/add-class.sh index 9d8abd639..b729e988a 100755 --- a/bin/admin/model/add-class.sh +++ b/bin/admin/model/add-class.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -46,6 +47,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --label) label="$2" shift # past argument @@ -84,6 +90,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -118,6 +126,10 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi turtle+="@prefix dh: .\n" turtle+="@prefix owl: .\n" @@ -146,4 +158,4 @@ do done # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | post.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/admin/model/add-construct.sh b/bin/admin/model/add-construct.sh index b3a00cd1c..f6c59f65e 100755 --- a/bin/admin/model/add-construct.sh +++ b/bin/admin/model/add-construct.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -42,6 +43,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --label) label="$2" shift # past argument @@ -70,6 +76,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -110,6 +118,10 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi turtle+="@prefix sp: .\n" turtle+="@prefix rdfs: .\n" @@ -122,4 +134,4 @@ if [ -n "$comment" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | post.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/admin/model/add-property-constraint.sh b/bin/admin/model/add-property-constraint.sh index 05787dc11..382f998aa 100755 --- a/bin/admin/model/add-property-constraint.sh +++ b/bin/admin/model/add-property-constraint.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -42,6 +43,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --label) label="$2" shift # past argument @@ -70,6 +76,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -108,6 +116,10 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi turtle+="@prefix ldh: .\n" turtle+="@prefix rdfs: .\n" @@ -121,4 +133,4 @@ if [ -n "$comment" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | post.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/admin/model/add-restriction.sh b/bin/admin/model/add-restriction.sh index 6284baae9..05985e9ca 100755 --- a/bin/admin/model/add-restriction.sh +++ b/bin/admin/model/add-restriction.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -44,6 +45,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --label) label="$2" shift # past argument @@ -82,6 +88,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -116,6 +124,10 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi turtle+="@prefix owl: .\n" turtle+="@prefix rdfs: .\n" @@ -138,4 +150,4 @@ if [ -n "$has_value" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | post.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/admin/model/add-select.sh b/bin/admin/model/add-select.sh index db82da765..03f89049e 100755 --- a/bin/admin/model/add-select.sh +++ b/bin/admin/model/add-select.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -43,6 +44,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --label) label="$2" shift # past argument @@ -76,6 +82,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -116,6 +124,10 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi turtle+="@prefix ldh: .\n" turtle+="@prefix sp: .\n" @@ -133,4 +145,4 @@ if [ -n "$service" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | post.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/admin/model/create-ontology.sh b/bin/admin/model/create-ontology.sh index d691b286d..c0a8e2df6 100755 --- a/bin/admin/model/create-ontology.sh +++ b/bin/admin/model/create-ontology.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -47,6 +48,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --label) label="$2" shift # past argument @@ -75,6 +81,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -113,6 +121,10 @@ args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type args+=("${container}${encoded_slug}/") +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi turtle+="@prefix dh: .\n" turtle+="@prefix owl: .\n" @@ -131,4 +143,4 @@ if [ -n "$comment" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file diff --git a/bin/admin/model/import-ontology.sh b/bin/admin/model/import-ontology.sh index ef116dd73..4aa33fdd7 100755 --- a/bin/admin/model/import-ontology.sh +++ b/bin/admin/model/import-ontology.sh @@ -98,4 +98,4 @@ turtle+="_:arg <${graph}> turtle+="_:arg <${base}queries/construct-constructors/#this> .\n" # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | curl -s -k -E "$cert_pem_file":"$cert_password" -d @- -H "Content-Type: $content_type" -H "Accept: text/turtle" "$target" -s -D - \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | curl -s -k -E "$cert_pem_file":"$cert_password" -d @- -H "Content-Type: $content_type" -H "Accept: text/turtle" "$target" -s -D - \ No newline at end of file diff --git a/bin/content/add-object-block.sh b/bin/content/add-object-block.sh index 16b64532d..1ecaa997f 100755 --- a/bin/content/add-object-block.sh +++ b/bin/content/add-object-block.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -14,7 +15,7 @@ print_usage() printf "\n" printf " --title TITLE Title\n" printf " --description DESCRIPTION Description(optional)\n" - printf " --fragment STRING String that will be used as URI fragment identifier (optional)\n" + printf " --uri URI URI of the object block (optional)\n" printf "\n" printf " --value RESOURCE_URI URI of the object resource\n" printf " --mode MODE_URI URI of the block mode (list, grid etc.) (optional)\n" @@ -56,8 +57,8 @@ do shift # past argument shift # past value ;; - --fragment) - fragment="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -106,7 +107,13 @@ ntriples=$(get.sh \ "$target") # extract the numbers from the sequence properties -sequence_number=$(echo "$ntriples" | grep "<${target}> ' -f 1 | sort -nr | head -n1) +sequence_number=$(echo "$ntriples" | grep "<${target}> ' -f 1 | sort -nr | head -n1 || echo "0") + +# Handle empty sequence_number (no existing sequence properties) +if [ -z "$sequence_number" ]; then + sequence_number=0 +fi + sequence_number=$((sequence_number + 1)) # increase the counter sequence_property="http://www.w3.org/1999/02/22-rdf-syntax-ns#_${sequence_number}" @@ -116,12 +123,13 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("--proxy") -args+=("$proxy") # tunnel the proxy param +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi -if [ -n "$fragment" ] ; then - # relative URI that will be resolved against the request URI - subject="<#${fragment}>" +if [ -n "$uri" ] ; then + subject="<${uri}>" else subject="_:subject" fi @@ -145,4 +153,4 @@ if [ -n "$mode" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | post.sh "${args[@]}" +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" diff --git a/bin/content/add-xhtml-block.sh b/bin/content/add-xhtml-block.sh index 3af9cf508..4ca7c2f05 100755 --- a/bin/content/add-xhtml-block.sh +++ b/bin/content/add-xhtml-block.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -14,7 +15,7 @@ print_usage() printf "\n" printf " --title TITLE Title\n" printf " --description DESCRIPTION Description(optional)\n" - printf " --fragment STRING String that will be used as URI fragment identifier (optional)\n" + printf " --uri URI URI of the XHTML block (optional)\n" printf "\n" printf " --value XHTML XHTML as canonical XML\n" } @@ -55,8 +56,8 @@ do shift # past argument shift # past value ;; - --fragment) - fragment="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -100,7 +101,13 @@ ntriples=$(get.sh \ "$target") # extract the numbers from the sequence properties -sequence_number=$(echo "$ntriples" | grep "<${target}> ' -f 1 | sort -nr | head -n1) +sequence_number=$(echo "$ntriples" | grep "<${target}> ' -f 1 | sort -nr | head -n1 || echo "0") + +# Handle empty sequence_number (no existing sequence properties) +if [ -z "$sequence_number" ]; then + sequence_number=0 +fi + sequence_number=$((sequence_number + 1)) # increase the counter sequence_property="http://www.w3.org/1999/02/22-rdf-syntax-ns#_${sequence_number}" @@ -110,12 +117,13 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("--proxy") -args+=("$proxy") # tunnel the proxy param +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi -if [ -n "$fragment" ] ; then - # relative URI that will be resolved against the request URI - subject="<#${fragment}>" +if [ -n "$uri" ] ; then + subject="<${uri}>" else subject="_:subject" fi @@ -135,4 +143,4 @@ if [ -n "$description" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | post.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/create-container.sh b/bin/create-container.sh index 29ef0bdde..029098de4 100755 --- a/bin/create-container.sh +++ b/bin/create-container.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -144,4 +145,4 @@ if [ -n "$description" ] ; then turtle+="<${target}> dct:description \"${description}\" .\n" fi -echo -e "$turtle" | turtle --base="$base" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file diff --git a/bin/create-item.sh b/bin/create-item.sh index e66398cb7..b5470eb65 100755 --- a/bin/create-item.sh +++ b/bin/create-item.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -119,4 +120,4 @@ if [ -n "$description" ] ; then turtle+="<${target}> dct:description \"${description}\" .\n" fi -echo -e "$turtle" | turtle --base="$base" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file diff --git a/bin/get.sh b/bin/get.sh index 50f43f3c9..09ebb262f 100755 --- a/bin/get.sh +++ b/bin/get.sh @@ -84,7 +84,7 @@ fi # GET RDF document if [ -n "$head" ] ; then - curl -v -k -E "$cert_pem_file":"$cert_password" -H "Accept: ${accept}" "$target" --head + curl -f -v -k -E "$cert_pem_file":"$cert_password" -H "Accept: ${accept}" "$target" --head else - curl -v -k -E "$cert_pem_file":"$cert_password" -H "Accept: ${accept}" "$target" + curl -f -v -k -E "$cert_pem_file":"$cert_password" -H "Accept: ${accept}" "$target" fi \ No newline at end of file diff --git a/bin/imports/create-csv-import.sh b/bin/imports/create-csv-import.sh index f77e41a3b..f7edac6cd 100755 --- a/bin/imports/create-csv-import.sh +++ b/bin/imports/create-csv-import.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -49,6 +50,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --title) title="$2" shift # past argument @@ -123,13 +129,19 @@ encoded_slug=$(urlencode "$slug") container="${base}imports/" +target="${container}${encoded_slug}/" + args+=("-f") args+=("$cert_pem_file") args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("${container}${encoded_slug}/") +args+=("$target") +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi turtle+="@prefix ldh: .\n" turtle+="@prefix dh: .\n" @@ -141,13 +153,13 @@ turtle+="_:import dct:title \"${title}\" .\n" turtle+="_:import spin:query <${query}> .\n" turtle+="_:import ldh:file <${file}> .\n" turtle+="_:import ldh:delimiter \"${delimiter}\" .\n" -turtle+="<${container}${encoded_slug}/> a dh:Item .\n" -turtle+="<${container}${encoded_slug}/> foaf:primaryTopic _:import .\n" -turtle+="<${container}${encoded_slug}/> dct:title \"${title}\" .\n" +turtle+="<${target}> a dh:Item .\n" +turtle+="<${target}> foaf:primaryTopic _:import .\n" +turtle+="<${target}> dct:title \"${title}\" .\n" if [ -n "$description" ] ; then turtle+="_:import dct:description \"${description}\" .\n" fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file diff --git a/bin/imports/create-file.sh b/bin/imports/create-file.sh index bbb21670d..36413d34c 100755 --- a/bin/imports/create-file.sh +++ b/bin/imports/create-file.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -176,7 +177,7 @@ if [ -n "$proxy" ]; then fi # POST RDF/POST multipart form and capture the effective URL -effective_url=$(echo -e "$rdf_post" | curl -w '%{url_effective}' -v -s -k -X PUT -H "Accept: text/turtle" -E "$cert_pem_file":"$cert_password" -o /dev/null --config - "$target") +effective_url=$(echo -e "$rdf_post" | curl -w '%{url_effective}' -f -v -s -k -X PUT -H "Accept: text/turtle" -E "$cert_pem_file":"$cert_password" -o /dev/null --config - "$target") # If using proxy, rewrite the effective URL back to original hostname if [ -n "$proxy" ]; then diff --git a/bin/imports/create-query.sh b/bin/imports/create-query.sh index 990edf959..f9d793498 100755 --- a/bin/imports/create-query.sh +++ b/bin/imports/create-query.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -47,6 +48,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --title) title="$2" shift # past argument @@ -104,13 +110,19 @@ encoded_slug=$(urlencode "$slug") container="${base}queries/" query=$(<"$query_file") # read query string from file +target="${container}${encoded_slug}/" + args+=("-f") args+=("$cert_pem_file") args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("${container}${encoded_slug}/") +args+=("$target") +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi turtle+="@prefix ldh: .\n" turtle+="@prefix dh: .\n" @@ -120,13 +132,13 @@ turtle+="@prefix sp: .\n" turtle+="_:query a sp:Construct .\n" turtle+="_:query dct:title \"${title}\" .\n" turtle+="_:query sp:text \"\"\"${query}\"\"\" .\n" -turtle+="<${container}${encoded_slug}/> a dh:Item .\n" -turtle+="<${container}${encoded_slug}/> foaf:primaryTopic _:query .\n" -turtle+="<${container}${encoded_slug}/> dct:title \"${title}\" .\n" +turtle+="<${target}> a dh:Item .\n" +turtle+="<${target}> foaf:primaryTopic _:query .\n" +turtle+="<${target}> dct:title \"${title}\" .\n" if [ -n "$description" ] ; then turtle+="_:query dct:description \"${description}\" .\n" fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file diff --git a/bin/imports/create-rdf-import.sh b/bin/imports/create-rdf-import.sh index ccbb24c4a..8d76b5e48 100755 --- a/bin/imports/create-rdf-import.sh +++ b/bin/imports/create-rdf-import.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash +set -eo pipefail print_usage() { @@ -49,6 +50,11 @@ do shift # past argument shift # past value ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; --title) title="$2" shift # past argument @@ -115,13 +121,19 @@ encoded_slug=$(urlencode "$slug") container="${base}imports/" +target="${container}${encoded_slug}/" + args+=("-f") args+=("$cert_pem_file") args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("${container}${encoded_slug}/") +args+=("$target") +if [ -n "$proxy" ]; then + args+=("--proxy") + args+=("$proxy") +fi turtle+="@prefix ldh: .\n" turtle+="@prefix dh: .\n" @@ -130,9 +142,9 @@ turtle+="@prefix foaf: .\n" turtle+="_:import a ldh:RDFImport .\n" turtle+="_:import dct:title \"${title}\" .\n" turtle+="_:import ldh:file <${file}> .\n" -turtle+="<${container}${encoded_slug}/> a dh:Item .\n" -turtle+="<${container}${encoded_slug}/> foaf:primaryTopic _:import .\n" -turtle+="<${container}${encoded_slug}/> dct:title \"${title}\" .\n" +turtle+="<${target}> a dh:Item .\n" +turtle+="<${target}> foaf:primaryTopic _:import .\n" +turtle+="<${target}> dct:title \"${title}\" .\n" if [ -n "$graph" ] ; then turtle+="@prefix sd: .\n" @@ -147,4 +159,4 @@ if [ -n "$description" ] ; then fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$base" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file diff --git a/bin/patch.sh b/bin/patch.sh index 93c821c08..e4fa67b83 100755 --- a/bin/patch.sh +++ b/bin/patch.sh @@ -70,4 +70,4 @@ fi # resolve SPARQL update from stdin against base URL and PATCH it to the server # uparse currently does not support --base: https://github.com/apache/jena/issues/3296 -cat - | curl -v -k -E "$cert_pem_file":"$cert_password" --data-binary @- -H "Content-Type: application/sparql-update" -X PATCH -o /dev/null "$final_url" +cat - | curl -f -v -k -E "$cert_pem_file":"$cert_password" --data-binary @- -H "Content-Type: application/sparql-update" -X PATCH -o /dev/null "$final_url" diff --git a/bin/post.sh b/bin/post.sh index a820065d7..54e49eafe 100755 --- a/bin/post.sh +++ b/bin/post.sh @@ -80,7 +80,7 @@ else fi # resolve RDF document from stdin against base URL and POST to the server and print request URL -effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -v -k -E "$cert_pem_file":"$cert_password" -d @- -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") +effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -f -v -k -E "$cert_pem_file":"$cert_password" -d @- -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") || exit $? # If using proxy, rewrite the effective URL back to original hostname if [ -n "$proxy" ]; then diff --git a/bin/put.sh b/bin/put.sh index 3f890a369..799d81d2b 100755 --- a/bin/put.sh +++ b/bin/put.sh @@ -80,7 +80,7 @@ else fi # resolve RDF document from stdin against base URL and PUT to the server and print request URL -effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -v -k -E "$cert_pem_file":"$cert_password" -d @- -X PUT -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") +effective_url=$(cat - | turtle --base="$url" | curl -w '%{url_effective}' -f -v -k -E "$cert_pem_file":"$cert_password" -d @- -X PUT -H "Content-Type: ${content_type}" -H "Accept: text/turtle" -o /dev/null "$final_url") || exit $? # If using proxy, rewrite the effective URL back to original hostname if [ -n "$proxy" ]; then diff --git a/bin/webid-keygen-pem.sh b/bin/webid-keygen-pem.sh index cc7d8c2ee..d5b93ac82 100755 --- a/bin/webid-keygen-pem.sh +++ b/bin/webid-keygen-pem.sh @@ -4,7 +4,7 @@ if [ "$#" -ne 6 ]; then echo "Usage: $0" '$alias $cert_file $keystore_password $key_password $webid_uri $validity' >&2 - echo "Example: $0 martynas martynas.localhost.p12 Martynas Martynas https://localhost:4443/admin/acl/agents/ce84eb31-cc1e-41f4-9e29-dacd417b9818/#this 365" >&2 + echo "Example: $0 martynas martynas.localhost.p12 Martynas Martynas https://admin.localhost:4443/acl/agents/ce84eb31-cc1e-41f4-9e29-dacd417b9818/#this 365" >&2 exit 1 fi diff --git a/bin/webid-keygen.sh b/bin/webid-keygen.sh index 7d7fc8594..787180f22 100755 --- a/bin/webid-keygen.sh +++ b/bin/webid-keygen.sh @@ -4,7 +4,7 @@ if [ "$#" -ne 6 ]; then echo "Usage: $0" '$alias $cert_file $keystore_password $key_password $webid_uri $validity' >&2 - echo "Example: $0 martynas martynas.localhost.p12 Password Password https://localhost:4443/admin/acl/agents/ce84eb31-cc1e-41f4-9e29-dacd417b9818/#this 3650" >&2 + echo "Example: $0 martynas martynas.localhost.p12 Password Password https://admin.localhost:4443/acl/agents/ce84eb31-cc1e-41f4-9e29-dacd417b9818/#this 3650" >&2 exit 1 fi diff --git a/config/system.trig b/config/system.trig index 2fdf7c99c..647f582c7 100644 --- a/config/system.trig +++ b/config/system.trig @@ -1,4 +1,5 @@ @prefix lapp: . +@prefix ldh: . @prefix a: . @prefix ac: . @prefix rdf: . @@ -16,7 +17,8 @@ a lapp:Application, lapp:AdminApplication ; dct:title "LinkedDataHub admin" ; - ldt:base ; + # ldt:base ; + ldh:origin ; ldt:ontology ; ldt:service ; ac:stylesheet ; @@ -35,8 +37,9 @@ a lapp:Application, lapp:EndUserApplication ; dct:title "LinkedDataHub" ; - ldt:base <> ; - ldt:ontology ; + # ldt:base ; + ldh:origin ; + ldt:ontology ; ldt:service ; lapp:adminApplication ; lapp:frontendProxy ; diff --git a/docker-compose.yml b/docker-compose.yml index 0e6d3ce14..7b8fffd36 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,10 +5,16 @@ secrets: file: ./secrets/secretary_cert_password.txt client_truststore_password: file: ./secrets/client_truststore_password.txt - #google_client_id: - # file: ./secrets/google_client_id.txt - #google_client_secret: - # file: ./secrets/google_client_secret.txt + # google_client_id: + # file: ./secrets/google/client_id.txt + # google_client_secret: + # file: ./secrets/google/client_secret.txt + # orcid_client_id: + # file: ./secrets/orcid/client_id.txt + # orcid_client_secret: + # file: ./secrets/orcid/client_secret.txt +volumes: + varnish_frontend_cache: services: nginx: image: nginx:1.23.3 @@ -56,6 +62,11 @@ services: - MAIL_SMTP_HOST=email-server - MAIL_SMTP_PORT=25 - MAIL_USER=linkeddatahub@localhost + - REMOTE_IP_VALVE=true + - REMOTE_IP_VALVE_PROTOCOL_HEADER=X-Forwarded-Proto + - REMOTE_IP_VALVE_PORT_HEADER=X-Forwarded-Port + - REMOTE_IP_VALVE_REMOTE_IP_HEADER=X-Forwarded-For + - REMOTE_IP_VALVE_HOST_HEADER=X-Forwarded-Host - OWNER_MBOX=${OWNER_MBOX} #- OWNER_URI=${OWNER_URI} - OWNER_GIVEN_NAME=${OWNER_GIVEN_NAME} @@ -69,8 +80,10 @@ services: - owner_cert_password - secretary_cert_password - client_truststore_password - #- google_client_id - #- google_client_secret + # - google_client_id + # - google_client_secret + # - orcid_client_id + # - orcid_client_secret volumes: - /var/linkeddatahub/oidc - ./ssl/server:/var/linkeddatahub/ssl/server:ro @@ -88,7 +101,7 @@ services: - 3030 volumes: - ./config/fuseki/config.ttl:/fuseki/config.ttl:ro - - ./data/admin:/fuseki/databases + - ./fuseki/admin:/fuseki/databases command: [ "--config", "/fuseki/config.ttl" ] fuseki-end-user: image: atomgraph/fuseki:4.7.0 @@ -97,7 +110,7 @@ services: - 3030 volumes: - ./config/fuseki/config.ttl:/fuseki/config.ttl:ro - - ./data/end-user:/fuseki/databases + - ./fuseki/end-user:/fuseki/databases command: [ "--config", "/fuseki/config.ttl" ] varnish-frontend: image: varnish:7.3.0 @@ -108,10 +121,10 @@ services: depends_on: - linkeddatahub tmpfs: /var/lib/varnish/varnishd:exec - environment: - - VARNISH_HTTP_PORT=6060 - - VARNISH_SIZE=1G - command: [ "-t", "86400" ] # time to live + volumes: + - varnish_frontend_cache:/var/lib/varnish + entrypoint: varnishd + command: [ "-F", "-f", "/etc/varnish/default.vcl", "-a", "http=:6060,HTTP", "-a", "proxy=:8443,PROXY", "-p", "feature=+http2", "-s", "file,/var/lib/varnish/storage.bin,3G", "-t", "86400" ] # -F: foreground, -f: config, -a: listeners, -p: http2, -s: storage, -t: TTL varnish-admin: image: varnish:7.3.0 user: root # otherwise the varnish user does not have permissions to the mounted folder which is owner by root @@ -121,9 +134,8 @@ services: depends_on: - linkeddatahub tmpfs: /var/lib/varnish/varnishd:exec - environment: - - VARNISH_SIZE=1G - command: [ "-t", "86400", "-p", "timeout_idle=60s" ] # time to live + entrypoint: varnishd + command: [ "-F", "-f", "/etc/varnish/default.vcl", "-a", "http=:80,HTTP", "-a", "proxy=:8443,PROXY", "-p", "feature=+http2", "-s", "malloc,1G", "-t", "86400", "-p", "timeout_idle=60s" ] # -F: foreground, -f: config, -a: listeners, -p: http2 + idle timeout, -s: storage, -t: TTL varnish-end-user: image: varnish:7.3.0 user: root # otherwise varnish user does not have permissions to the mounted folder which is owner by root @@ -133,9 +145,8 @@ services: depends_on: - linkeddatahub tmpfs: /var/lib/varnish/varnishd:exec - environment: - - VARNISH_SIZE=1G - command: [ "-t", "86400", "-p", "timeout_idle=60s" ] # time to live + entrypoint: varnishd + command: [ "-F", "-f", "/etc/varnish/default.vcl", "-a", "http=:80,HTTP", "-a", "proxy=:8443,PROXY", "-p", "feature=+http2", "-s", "malloc,1G", "-t", "86400", "-p", "timeout_idle=60s" ] # -F: foreground, -f: config, -a: listeners, -p: http2 + idle timeout, -s: storage, -t: TTL email-server: image: namshi/smtp environment: @@ -151,11 +162,22 @@ configs: http { upstream linkeddatahub { - server varnish-frontend:6060; + server ${NGINX_UPSTREAM_SERVER:-varnish-frontend:6060}; + } + + # Exempt internal requests from the linkeddatahub container from rate limiting + # Use RFC 1918 private IP ranges (same logic as InetAddress.isSiteLocalAddress()) + map $$remote_addr $$limit_key { + default $$binary_remote_addr; + ~^10\. ""; # RFC 1918: 10.0.0.0/8 + ~^172\.1[6-9]\. ""; # RFC 1918: 172.16.0.0/12 (172.16-19.x.x) + ~^172\.2[0-9]\. ""; # RFC 1918: 172.16.0.0/12 (172.20-29.x.x) + ~^172\.3[0-1]\. ""; # RFC 1918: 172.16.0.0/12 (172.30-31.x.x) + ~^192\.168\. ""; # RFC 1918: 192.168.0.0/16 } - limit_req_zone $$binary_remote_addr zone=linked_data:10m rate=15r/s; - limit_req_zone $$binary_remote_addr zone=static_files:10m rate=20r/s; + limit_req_zone $$limit_key zone=linked_data:10m rate=15r/s; + limit_req_zone $$limit_key zone=static_files:10m rate=20r/s; limit_req_status 429; client_max_body_size ${MAX_CONTENT_LENGTH:-2097152}; @@ -163,18 +185,23 @@ configs: # server with optional client cert authentication server { listen 8443 ssl; - server_name ${HOST}; + server_name *.${HOST} ${HOST}; ssl_certificate /etc/nginx/ssl/server.crt; ssl_certificate_key /etc/nginx/ssl/server.key; ssl_session_cache shared:SSL:1m; ssl_prefer_server_ciphers on; - ssl_verify_client optional_no_ca; + ssl_verify_client ${NGINX_SSL_VERIFY_CLIENT:-optional_no_ca}; location / { proxy_pass http://linkeddatahub; #proxy_cache backcache; limit_req zone=linked_data burst=30 nodelay; + proxy_set_header Host $$host; + proxy_set_header X-Forwarded-Host $$host; + proxy_set_header X-Forwarded-Proto $$scheme; + proxy_set_header X-Forwarded-Port ${HTTPS_PORT}; + proxy_set_header Client-Cert ''; proxy_set_header Client-Cert $$ssl_client_escaped_cert; @@ -185,6 +212,11 @@ configs: proxy_pass http://linkeddatahub; limit_req zone=static_files burst=20 nodelay; + proxy_set_header Host $$host; + proxy_set_header X-Forwarded-Host $$host; + proxy_set_header X-Forwarded-Proto $$scheme; + proxy_set_header X-Forwarded-Port ${HTTPS_PORT}; + proxy_set_header Client-Cert ''; proxy_set_header Client-Cert $$ssl_client_escaped_cert; @@ -202,7 +234,7 @@ configs: # server with client cert authentication on server { listen 9443 ssl; - server_name ${HOST}; + server_name *.${HOST} ${HOST}; ssl_certificate /etc/nginx/ssl/server.crt; ssl_certificate_key /etc/nginx/ssl/server.key; ssl_session_cache shared:SSL:1m; @@ -214,6 +246,11 @@ configs: #proxy_cache backcache; limit_req zone=linked_data burst=30 nodelay; + proxy_set_header Host $$host; + proxy_set_header X-Forwarded-Host $$host; + proxy_set_header X-Forwarded-Proto $$scheme; + proxy_set_header X-Forwarded-Port ${HTTPS_PORT}; + proxy_set_header Client-Cert ''; proxy_set_header Client-Cert $$ssl_client_escaped_cert; } @@ -226,7 +263,7 @@ configs: server { listen 8080; - server_name ${HOST}; + server_name *.${HOST} ${HOST}; location / { return 301 https://$$server_name:${HTTPS_PORT}$$request_uri; @@ -241,13 +278,14 @@ configs: import std; backend default { - .host = "linkeddatahub"; + .host = "${VARNISH_FRONTEND_BACKEND_HOST:-linkeddatahub}"; .port = "7070"; .first_byte_timeout = 60s; } acl local { "localhost"; + "linkeddatahub"; } acl remote { @@ -289,10 +327,31 @@ configs: /* We only deal with GET and HEAD by default */ return (pass); } + if (req.http.Client-Cert) { - /* Authenticated requests are not cacheable */ - return (pass); + # Authenticated HTML is user-specific → never cache + if (req.http.Accept ~ "text/html" || + req.http.Accept ~ "application/xhtml+xml") { + return (pass); + } + + # Conditional requests must reach backend for validation + if (req.http.If-Match || req.http.If-None-Match || + req.http.If-Modified-Since || req.http.If-Unmodified-Since) { + return (pass); + } + + # /access endpoint returns agent-specific group memberships + if (req.url ~ "^/access") { + return (pass); + } + + # SPARQL referencing /acl/agents/ depends on agent identity → don't cache + if (req.url ~ "%2Facl%2Fagents%2F") { + return (pass); + } } + if (req.http.Cookie) { # explicitly allow only cookies required by LDH server-side set req.http.Cookie = ";" + req.http.Cookie; @@ -310,6 +369,15 @@ configs: } sub vcl_backend_response { + /* Add Vary: Origin for static files to enable proper CORS caching */ + if (bereq.url ~ "^/static/") { + if (beresp.http.Vary) { + set beresp.http.Vary = beresp.http.Vary + ", Origin"; + } else { + set beresp.http.Vary = "Origin"; + } + } + /* purge URLs after updates */ if ((beresp.status == 200 || beresp.status == 201 || beresp.status == 204) && bereq.method ~ "POST|PUT|DELETE|PATCH") { set beresp.http.X-LinkedDataHub = "Banned"; @@ -325,7 +393,7 @@ configs: import std; backend default { - .host = "fuseki-admin"; + .host = "${VARNISH_ADMIN_BACKEND_HOST:-fuseki-admin}"; .port = "3030"; .first_byte_timeout = 60s; } @@ -391,7 +459,7 @@ configs: import std; backend default { - .host = "fuseki-end-user"; + .host = "${VARNISH_END_USER_BACKEND_HOST:-fuseki-end-user}"; .port = "3030"; .first_byte_timeout = 60s; } diff --git a/http-tests/access/POST-request-access.sh b/http-tests/access/POST-request-access.sh index 8d618724b..76777e56e 100755 --- a/http-tests/access/POST-request-access.sh +++ b/http-tests/access/POST-request-access.sh @@ -30,5 +30,5 @@ curl -w "%{http_code}\n" -o /dev/null -k -s \ --data-urlencode "ol=Access request by Test Agent" \ --data-urlencode "pu=http://www.w3.org/ns/auth/acl#agent" \ --data-urlencode "ou=${AGENT_URI}" \ - "${ADMIN_BASE_URL}access/request" \ + "${END_USER_BASE_URL}access/request" \ | grep -q "$STATUS_OK" \ No newline at end of file diff --git a/http-tests/access/group-authorization.sh b/http-tests/access/group-authorization.sh index eb91aa837..76950bfed 100755 --- a/http-tests/access/group-authorization.sh +++ b/http-tests/access/group-authorization.sh @@ -16,10 +16,10 @@ ntriples=$(curl -k -s -G \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ -H "Accept: application/n-triples" \ --data "this=${container}" \ - "${ADMIN_BASE_URL}access" + "${END_USER_BASE_URL}access" ) -if echo "$ntriples" | grep -q ' '; then +if echo "$ntriples" | grep -q " <${ADMIN_BASE_URL}acl/groups/writers/#this>"; then exit 1 fi @@ -47,9 +47,9 @@ ntriples=$(curl -k -s -G \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ -H "Accept: application/n-triples" \ --data "this=${container}" \ - "${ADMIN_BASE_URL}access" + "${END_USER_BASE_URL}access" ) -if ! echo "$ntriples" | grep -q ' '; then +if ! echo "$ntriples" | grep -q " <${ADMIN_BASE_URL}acl/groups/writers/#this>"; then exit 1 fi diff --git a/http-tests/access/owner-authorization.sh b/http-tests/access/owner-authorization.sh index 8fe021186..b62766606 100755 --- a/http-tests/access/owner-authorization.sh +++ b/http-tests/access/owner-authorization.sh @@ -33,7 +33,7 @@ ntriples=$(curl -k -s -G \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ -H "Accept: application/n-triples" \ --data "this=${container}" \ - "${ADMIN_BASE_URL}access" + "${END_USER_BASE_URL}access" ) auth1=$(echo "$ntriples" | grep -F " " | cut -d' ' -f1) diff --git a/http-tests/admin/acl/add-delete-authorization.sh b/http-tests/admin/acl/add-delete-authorization.sh index 4e4cf1b19..0692735f7 100755 --- a/http-tests/admin/acl/add-delete-authorization.sh +++ b/http-tests/admin/acl/add-delete-authorization.sh @@ -28,7 +28,27 @@ container=$(create-container.sh \ --slug "$slug" \ --parent "$END_USER_BASE_URL") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake DELETE authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to "$container" \ + --write + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + -X DELETE \ + "$container" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -39,7 +59,7 @@ create-authorization.sh \ --to "$container" \ --write -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-delete-class-authorization.sh b/http-tests/admin/acl/add-delete-class-authorization.sh index a814147e0..b763c5c5b 100755 --- a/http-tests/admin/acl/add-delete-class-authorization.sh +++ b/http-tests/admin/acl/add-delete-class-authorization.sh @@ -28,7 +28,27 @@ container=$(create-container.sh \ --slug "$slug" \ --parent "$END_USER_BASE_URL") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake DELETE class authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to-all-in "https://www.w3.org/ns/ldt/document-hierarchy#Container" \ + --write + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + -X DELETE \ + "$container" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -39,7 +59,7 @@ create-authorization.sh \ --to-all-in "https://www.w3.org/ns/ldt/document-hierarchy#Container" \ --write -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-delete-group-authorization.sh b/http-tests/admin/acl/add-delete-group-authorization.sh index ae55921ca..c6fe39bff 100755 --- a/http-tests/admin/acl/add-delete-group-authorization.sh +++ b/http-tests/admin/acl/add-delete-group-authorization.sh @@ -44,7 +44,27 @@ container=$(create-container.sh \ --slug "$slug" \ --parent "$END_USER_BASE_URL") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake DELETE group authorization from test.localhost" \ + --agent-group "$group" \ + --to "$container" \ + --write + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + -X DELETE \ + "$container" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -55,7 +75,7 @@ create-authorization.sh \ --to "$container" \ --write -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-get-authorization.sh b/http-tests/admin/acl/add-get-authorization.sh index 9273104e9..5f9b0c701 100755 --- a/http-tests/admin/acl/add-get-authorization.sh +++ b/http-tests/admin/acl/add-get-authorization.sh @@ -15,7 +15,26 @@ curl -k -w "%{http_code}\n" -o /dev/null -s \ "$END_USER_BASE_URL" \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake GET authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to "$END_USER_BASE_URL" \ + --read + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + "$END_USER_BASE_URL" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -26,7 +45,7 @@ create-authorization.sh \ --to "$END_USER_BASE_URL" \ --read -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-get-class-authorization.sh b/http-tests/admin/acl/add-get-class-authorization.sh index 0f2b099c1..2d975c739 100755 --- a/http-tests/admin/acl/add-get-class-authorization.sh +++ b/http-tests/admin/acl/add-get-class-authorization.sh @@ -15,7 +15,26 @@ curl -k -w "%{http_code}\n" -o /dev/null -s \ "$END_USER_BASE_URL" \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake GET Container authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to-all-in "https://w3id.org/atomgraph/linkeddatahub/default#Root" \ + --read + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + "$END_USER_BASE_URL" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -26,7 +45,7 @@ create-authorization.sh \ --to-all-in "https://w3id.org/atomgraph/linkeddatahub/default#Root" \ --read -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-get-group-authorization.sh b/http-tests/admin/acl/add-get-group-authorization.sh index 8e99c9e8c..6c890a6ea 100755 --- a/http-tests/admin/acl/add-get-group-authorization.sh +++ b/http-tests/admin/acl/add-get-group-authorization.sh @@ -31,7 +31,26 @@ group=$(curl -s -k \ | cat \ | sed -rn "s/<${group_doc//\//\\/}> <(.*)> \./\1/p") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake GET group authorization from test.localhost" \ + --agent-group "$group" \ + --to "$END_USER_BASE_URL" \ + --read + +# access is still denied (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + "$END_USER_BASE_URL" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -42,7 +61,7 @@ create-authorization.sh \ --to "$END_USER_BASE_URL" \ --read -# access is allowed after authorization is created +# access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -s \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ diff --git a/http-tests/admin/acl/add-post-authorization.sh b/http-tests/admin/acl/add-post-authorization.sh index d6abdcf62..c07bcf864 100755 --- a/http-tests/admin/acl/add-post-authorization.sh +++ b/http-tests/admin/acl/add-post-authorization.sh @@ -22,7 +22,33 @@ EOF ) \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake POST authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to "$END_USER_BASE_URL" \ + --append + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X POST \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -33,7 +59,7 @@ create-authorization.sh \ --to "$END_USER_BASE_URL" \ --append -# access is allowed after authorization is created +# access is allowed after real authorization is created ( curl -k -w "%{http_code}\n" -o /dev/null -s \ diff --git a/http-tests/admin/acl/add-post-class-authorization.sh b/http-tests/admin/acl/add-post-class-authorization.sh index 50f4f304e..f09d3102c 100755 --- a/http-tests/admin/acl/add-post-class-authorization.sh +++ b/http-tests/admin/acl/add-post-class-authorization.sh @@ -22,7 +22,33 @@ EOF ) \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake POST class authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to-all-in "https://w3id.org/atomgraph/linkeddatahub/default#Root" \ + --append + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X POST \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -33,7 +59,7 @@ create-authorization.sh \ --to-all-in "https://w3id.org/atomgraph/linkeddatahub/default#Root" \ --append -# access is allowed after authorization is created +# access is allowed after real authorization is created ( curl -k -w "%{http_code}\n" -o /dev/null -s \ diff --git a/http-tests/admin/acl/add-post-group-authorization.sh b/http-tests/admin/acl/add-post-group-authorization.sh index e3e05ad9e..a6d048f7c 100755 --- a/http-tests/admin/acl/add-post-group-authorization.sh +++ b/http-tests/admin/acl/add-post-group-authorization.sh @@ -38,7 +38,33 @@ group=$(curl -s -k \ | cat \ | sed -rn "s/<${group_doc//\//\\/}> <(.*)> \./\1/p") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake POST group authorization from test.localhost" \ + --agent-group "$group" \ + --to "$END_USER_BASE_URL" \ + --append + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X POST \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -49,7 +75,7 @@ create-authorization.sh \ --to "$END_USER_BASE_URL" \ --append -# access is allowed after authorization is created +# access is allowed after real authorization is created ( curl -k -w "%{http_code}\n" -o /dev/null -s \ diff --git a/http-tests/admin/acl/add-put-authorization.sh b/http-tests/admin/acl/add-put-authorization.sh index 60340973a..f35bbc4b4 100755 --- a/http-tests/admin/acl/add-put-authorization.sh +++ b/http-tests/admin/acl/add-put-authorization.sh @@ -22,7 +22,33 @@ EOF ) \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake PUT authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to "$END_USER_BASE_URL" \ + --write + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X PUT \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -41,7 +67,7 @@ root_ntriples=$(get.sh \ --accept 'application/n-triples' \ "$END_USER_BASE_URL") -# access is allowed after authorization is created +# access is allowed after real authorization is created # request body with document instance is required echo "$root_ntriples" \ diff --git a/http-tests/admin/acl/add-put-class-authorization.sh b/http-tests/admin/acl/add-put-class-authorization.sh index 86a782969..a23c4cb75 100755 --- a/http-tests/admin/acl/add-put-class-authorization.sh +++ b/http-tests/admin/acl/add-put-class-authorization.sh @@ -22,7 +22,33 @@ EOF ) \ | grep -q "$STATUS_FORBIDDEN" -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake PUT class authorization from test.localhost" \ + --agent "$AGENT_URI" \ + --to-all-in "https://w3id.org/atomgraph/linkeddatahub/default#Root" \ + --write + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X PUT \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -41,7 +67,7 @@ root_ntriples=$(get.sh \ --accept 'application/n-triples' \ "$END_USER_BASE_URL") -# access is allowed after authorization is created +# access is allowed after real authorization is created # request body with document instance is required echo "$root_ntriples" \ diff --git a/http-tests/admin/acl/add-put-group-authorization.sh b/http-tests/admin/acl/add-put-group-authorization.sh index d97d92caf..1d5ccf9d3 100755 --- a/http-tests/admin/acl/add-put-group-authorization.sh +++ b/http-tests/admin/acl/add-put-group-authorization.sh @@ -38,7 +38,33 @@ group=$(curl -s -k \ | cat \ | sed -rn "s/<${group_doc//\//\\/}> <(.*)> \./\1/p") -# create authorization +# create fake test.localhost authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake PUT group authorization from test.localhost" \ + --agent-group "$group" \ + --to "$END_USER_BASE_URL" \ + --write + +# access is still denied (fake authorization filtered out) + +( +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Content-Type: application/n-triples" \ + -H "Accept: application/n-triples" \ + -X PUT \ + --data-binary @- \ + "$END_USER_BASE_URL" < . +EOF +) \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -57,7 +83,7 @@ root_ntriples=$(get.sh \ --accept 'application/n-triples' \ "$END_USER_BASE_URL") -# access is allowed after authorization is created +# access is allowed after real authorization is created # request body with document instance is required echo "$root_ntriples" \ diff --git a/http-tests/admin/acl/make-public.sh b/http-tests/admin/acl/make-public.sh index b9398e029..a3900b107 100755 --- a/http-tests/admin/acl/make-public.sh +++ b/http-tests/admin/acl/make-public.sh @@ -14,7 +14,25 @@ curl -k -w "%{http_code}\n" -o /dev/null -v \ "$END_USER_BASE_URL" \ | grep -q "$STATUS_FORBIDDEN" -# create public authorization +# create fake test.localhost public authorization (should be filtered out) + +create-authorization.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "https://admin.test.localhost:4443/" \ + --label "Fake public access from test.localhost" \ + --agent-class 'http://xmlns.com/foaf/0.1/Agent' \ + --to "$END_USER_BASE_URL" \ + --read + +# public access is still forbidden (fake authorization filtered out) + +curl -k -w "%{http_code}\n" -o /dev/null -v \ + -H "Accept: application/n-triples" \ + "$END_USER_BASE_URL" \ +| grep -q "$STATUS_FORBIDDEN" + +# create real localhost public authorization create-authorization.sh \ -f "$OWNER_CERT_FILE" \ @@ -25,7 +43,7 @@ create-authorization.sh \ --to "$END_USER_BASE_URL" \ --read -# public access is allowed after authorization is created +# public access is allowed after real authorization is created curl -k -w "%{http_code}\n" -o /dev/null -f -v \ -H "Accept: application/n-triples" \ diff --git a/http-tests/admin/model/add-property-constraint.sh b/http-tests/admin/model/add-property-constraint.sh index 7571a14fb..c5e179841 100755 --- a/http-tests/admin/model/add-property-constraint.sh +++ b/http-tests/admin/model/add-property-constraint.sh @@ -58,12 +58,16 @@ turtle+="_:item a <${namespace_doc}#ConstrainedClass> .\n" turtle+="_:item dct:title \"Failure\" .\n" turtle+="_:item sioc:has_container <${END_USER_BASE_URL}> .\n" +# Using direct curl instead of put.sh because put.sh uses -f flag which exits on 4xx errors, +# but this test expects to capture the 422 response response=$(echo -e "$turtle" \ | turtle --base="$END_USER_BASE_URL" \ -| put.sh \ - -f "$OWNER_CERT_FILE" \ - -p "$OWNER_CERT_PWD" \ - --content-type "text/turtle" \ +| curl -k -v \ + -E "$OWNER_CERT_FILE":"$OWNER_CERT_PWD" \ + -d @- \ + -X PUT \ + -H "Content-Type: text/turtle" \ + -H "Accept: text/turtle" \ "$END_USER_BASE_URL" \ 2>&1) # redirect output from stderr to stdout diff --git a/http-tests/config/system.trig b/http-tests/config/system.trig new file mode 100644 index 000000000..47ed5c76a --- /dev/null +++ b/http-tests/config/system.trig @@ -0,0 +1,92 @@ +@prefix lapp: . +@prefix ldh: . +@prefix a: . +@prefix ac: . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . +@prefix ldt: . +@prefix sd: . +@prefix dct: . +@prefix foaf: . + +### do not use blank nodes to identify resources! ### +### urn: URI scheme is used because applications/services are not accessible in their own dataspace (under $BASE_URI) ### + +# root admin + + a lapp:Application, lapp:AdminApplication ; + dct:title "LinkedDataHub admin" ; + # ldt:base ; + ldh:origin ; + ldt:ontology ; + ldt:service ; + ac:stylesheet ; + lapp:endUserApplication ; + lapp:frontendProxy . + + a sd:Service ; + dct:title "LinkedDataHub admin service" ; + sd:supportedLanguage sd:SPARQL11Query, sd:SPARQL11Update ; + sd:endpoint ; + a:graphStore ; + a:quadStore ; + lapp:backendProxy . + +# root end-user + + a lapp:Application, lapp:EndUserApplication ; + dct:title "LinkedDataHub" ; + # ldt:base ; + ldh:origin ; + ldt:ontology ; + ldt:service ; + lapp:adminApplication ; + lapp:frontendProxy ; + lapp:public true . + + a sd:Service ; + dct:title "LinkedDataHub service" ; + sd:supportedLanguage sd:SPARQL11Query, sd:SPARQL11Update ; + sd:endpoint ; + a:graphStore ; + a:quadStore ; + lapp:backendProxy . + +# test admin + + a lapp:Application, lapp:AdminApplication ; + dct:title "Test admin" ; + ldh:origin ; + ldt:ontology ; + ldt:service ; + ac:stylesheet ; + lapp:endUserApplication ; + lapp:frontendProxy . + + a sd:Service ; + dct:title "Test admin service" ; + sd:supportedLanguage sd:SPARQL11Query, sd:SPARQL11Update ; + sd:endpoint ; + a:graphStore ; + a:quadStore ; + lapp:backendProxy . + +# test end-user + + a lapp:Application, lapp:EndUserApplication ; + dct:title "Test" ; + ldh:origin ; + ldt:ontology ; + ldt:service ; + lapp:adminApplication ; + lapp:frontendProxy ; + lapp:public true . + + a sd:Service ; + dct:title "Test service" ; + sd:supportedLanguage sd:SPARQL11Query, sd:SPARQL11Update ; + sd:endpoint ; + a:graphStore ; + a:quadStore ; + lapp:backendProxy . diff --git a/http-tests/dataspaces/non-existent-dataspace.sh b/http-tests/dataspaces/non-existent-dataspace.sh new file mode 100755 index 000000000..ae443f7d3 --- /dev/null +++ b/http-tests/dataspaces/non-existent-dataspace.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Test that accessing a non-configured dataspace returns 404, not 500 + +# Try to access admin on non-existent test.localhost dataspace +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -H "Accept: application/n-triples" \ + "https://admin.non-existing.localhost:4443/" \ +| grep -q "$STATUS_NOT_FOUND" + +# Try to access end-user on non-existent test.localhost dataspace +curl -k -w "%{http_code}\n" -o /dev/null -s \ + -H "Accept: application/n-triples" \ + "https://non-existing.localhost:4443/" \ +| grep -q "$STATUS_NOT_FOUND" diff --git a/http-tests/docker-compose.http-tests.yml b/http-tests/docker-compose.http-tests.yml index 0d8e28d3a..158c2e29c 100644 --- a/http-tests/docker-compose.http-tests.yml +++ b/http-tests/docker-compose.http-tests.yml @@ -11,8 +11,10 @@ services: environment: - JPDA_ADDRESS=*:8000 # debugger host - performance hit when enabled volumes: - - ./http-tests/datasets/owner:/var/linkeddatahub/datasets/owner - - ./http-tests/datasets/secretary:/var/linkeddatahub/datasets/secretary + - ./http-tests/config/system.trig:/var/linkeddatahub/datasets/system.trig:ro + - ./http-tests/root-owner.trig.template:/var/linkeddatahub/root-owner.trig.template:ro + - ./datasets/owner:/var/linkeddatahub/datasets/owner + - ./datasets/secretary:/var/linkeddatahub/datasets/secretary - ./http-tests/uploads:/var/www/linkeddatahub/uploads - ./http-tests/ssl/server:/var/linkeddatahub/ssl/server - ./http-tests/ssl/owner:/var/linkeddatahub/ssl/owner diff --git a/http-tests/document-hierarchy/GET-children.sh b/http-tests/document-hierarchy/GET-children.sh new file mode 100755 index 000000000..97cdd69ea --- /dev/null +++ b/http-tests/document-hierarchy/GET-children.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash +set -euo pipefail + +initialize_dataset "$END_USER_BASE_URL" "$TMP_END_USER_DATASET" "$END_USER_ENDPOINT_URL" +initialize_dataset "$ADMIN_BASE_URL" "$TMP_ADMIN_DATASET" "$ADMIN_ENDPOINT_URL" +purge_cache "$END_USER_VARNISH_SERVICE" +purge_cache "$ADMIN_VARNISH_SERVICE" +purge_cache "$FRONTEND_VARNISH_SERVICE" + +# add agent to the writers group + +add-agent-to-group.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + --agent "$AGENT_URI" \ + "${ADMIN_BASE_URL}acl/groups/writers/" + +# execute SPARQL query to retrieve children of the end-user base URL to prime the Varnish cache + +query="DESCRIBE * WHERE { SELECT DISTINCT ?child ?thing WHERE { GRAPH ?childGraph { { ?child <${END_USER_BASE_URL}>. } UNION { ?child <${END_USER_BASE_URL}>. } ?child ?Type. OPTIONAL { ?child ?title. } OPTIONAL { ?child ?thing. } } } ORDER BY (?title) LIMIT 20 }" + +# URL-encode query with uppercase hex digits (matching Java's UriComponent.encode()) +# Note: We must construct the URL manually instead of using curl's -G --data-urlencode because curl normalizes percent-encoding to lowercase, +# which won't match the uppercase percent-encoding that Java produces in cache invalidation BAN requests +encoded_query=$(python -c "import urllib.parse; print(urllib.parse.quote('''$query''', safe=''))") + +curl -k -f -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + "${END_USER_BASE_URL}sparql?query=${encoded_query}" \ + > /dev/null + +# create container + +slug="test-children-query" + +container=$(create-container.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test Children Query" \ + --slug "$slug" \ + --parent "$END_USER_BASE_URL") + +# execute SPARQL query again - the new container should appear (verifies cache invalidation) + +curl -k -f -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + "${END_USER_BASE_URL}sparql?query=${encoded_query}" \ +| grep -q "<${container}>" diff --git a/http-tests/misc/cors-jaxrs.sh b/http-tests/misc/cors-jaxrs.sh new file mode 100755 index 000000000..875f96339 --- /dev/null +++ b/http-tests/misc/cors-jaxrs.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash +set -euo pipefail + +initialize_dataset "$END_USER_BASE_URL" "$TMP_END_USER_DATASET" "$END_USER_ENDPOINT_URL" +initialize_dataset "$ADMIN_BASE_URL" "$TMP_ADMIN_DATASET" "$ADMIN_ENDPOINT_URL" +purge_cache "$END_USER_VARNISH_SERVICE" +purge_cache "$ADMIN_VARNISH_SERVICE" +purge_cache "$FRONTEND_VARNISH_SERVICE" + +# Test JAX-RS CORSFilter on dynamic content (GET request) + +response=$(curl -i -k -s \ + -H "Origin: https://example.com" \ + -H "Accept: text/turtle" \ + "$END_USER_BASE_URL") + +# Verify Access-Control-Allow-Origin header is present +if ! echo "$response" | grep -q "Access-Control-Allow-Origin: \*"; then + echo "CORS header 'Access-Control-Allow-Origin' not found in GET response" + exit 1 +fi + +# Verify Access-Control-Allow-Methods header is present +if ! echo "$response" | grep -q "Access-Control-Allow-Methods:"; then + echo "CORS header 'Access-Control-Allow-Methods' not found in GET response" + exit 1 +fi + +# Test OPTIONS preflight request + +preflight=$(curl -i -k -s \ + -X OPTIONS \ + -H "Origin: https://example.com" \ + -H "Access-Control-Request-Method: POST" \ + "$END_USER_BASE_URL") + +# Verify preflight response has CORS headers +if ! echo "$preflight" | grep -q "Access-Control-Allow-Origin: \*"; then + echo "CORS header 'Access-Control-Allow-Origin' not found in OPTIONS response" + exit 1 +fi + +# Verify preflight response has Access-Control-Max-Age +if ! echo "$preflight" | grep -q "Access-Control-Max-Age:"; then + echo "CORS header 'Access-Control-Max-Age' not found in OPTIONS response" + exit 1 +fi + +# Verify OPTIONS request returns 204 No Content +if ! echo "$preflight" | grep -q "HTTP/.* 204"; then + echo "OPTIONS preflight did not return 204 No Content" + exit 1 +fi diff --git a/http-tests/misc/cors-static.sh b/http-tests/misc/cors-static.sh new file mode 100755 index 000000000..ae39023f7 --- /dev/null +++ b/http-tests/misc/cors-static.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Test Tomcat CorsFilter on static files +# The Tomcat filter only adds CORS headers when Origin header is present + +response=$(curl -i -k -s \ + -H "Origin: https://example.com" \ + "${END_USER_BASE_URL}static/com/atomgraph/linkeddatahub/css/bootstrap.css") + +# Verify Access-Control-Allow-Origin header is present +if ! echo "$response" | grep -q "Access-Control-Allow-Origin: \*"; then + echo "CORS header 'Access-Control-Allow-Origin' not found on static file" + exit 1 +fi + +# Verify the static file was served successfully +if ! echo "$response" | grep -q "HTTP/.* 200"; then + echo "Static file request did not return 200 OK" + exit 1 +fi + +# Test OPTIONS request on static files + +preflight=$(curl -i -k -s \ + -X OPTIONS \ + -H "Origin: https://example.com" \ + -H "Access-Control-Request-Method: GET" \ + "${END_USER_BASE_URL}static/com/atomgraph/linkeddatahub/css/bootstrap.css") + +# Verify preflight response has CORS headers +if ! echo "$preflight" | grep -q "Access-Control-Allow-Origin: \*"; then + echo "CORS header 'Access-Control-Allow-Origin' not found in OPTIONS response for static file" + exit 1 +fi diff --git a/http-tests/proxy/GET-proxied-internal-403.sh b/http-tests/proxy/GET-proxied-internal-403.sh new file mode 100755 index 000000000..1a1b88632 --- /dev/null +++ b/http-tests/proxy/GET-proxied-internal-403.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +set -euo pipefail + +initialize_dataset "$END_USER_BASE_URL" "$TMP_END_USER_DATASET" "$END_USER_ENDPOINT_URL" +initialize_dataset "$ADMIN_BASE_URL" "$TMP_ADMIN_DATASET" "$ADMIN_ENDPOINT_URL" +purge_cache "$END_USER_VARNISH_SERVICE" +purge_cache "$ADMIN_VARNISH_SERVICE" +purge_cache "$FRONTEND_VARNISH_SERVICE" + +# add agent to the readers group to be able to read documents + +add-agent-to-group.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + --agent "$AGENT_URI" \ + "${ADMIN_BASE_URL}acl/groups/readers/" + +# LNK-009: Test that internal Docker services are blocked via SSRF protection +# Attempt to access the internal fuseki-admin SPARQL endpoint via the proxy +# This should be blocked and return 403 Forbidden + +http_status=$(curl -k -s -o /dev/null -w "%{http_code}" \ + -G \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H 'Accept: application/n-triples' \ + --data-urlencode "uri=http://fuseki-admin:3030/ds" \ + "$END_USER_BASE_URL" || true) + +# Verify that access was forbidden (403) +if [ "$http_status" != "403" ]; then + echo "Expected HTTP 403 Forbidden for internal service access, got: $http_status" + exit 1 +fi diff --git a/http-tests/proxy/GET-proxied-rfc1918-403.sh b/http-tests/proxy/GET-proxied-rfc1918-403.sh new file mode 100755 index 000000000..a5726f58e --- /dev/null +++ b/http-tests/proxy/GET-proxied-rfc1918-403.sh @@ -0,0 +1,59 @@ +#!/usr/bin/env bash +set -euo pipefail + +initialize_dataset "$END_USER_BASE_URL" "$TMP_END_USER_DATASET" "$END_USER_ENDPOINT_URL" +initialize_dataset "$ADMIN_BASE_URL" "$TMP_ADMIN_DATASET" "$ADMIN_ENDPOINT_URL" +purge_cache "$END_USER_VARNISH_SERVICE" +purge_cache "$ADMIN_VARNISH_SERVICE" +purge_cache "$FRONTEND_VARNISH_SERVICE" + +# add agent to the readers group to be able to read documents + +add-agent-to-group.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + --agent "$AGENT_URI" \ + "${ADMIN_BASE_URL}acl/groups/readers/" + +# LNK-009: Test that RFC 1918 private addresses are blocked via SSRF protection +# Test Class A (10.0.0.0/8) + +http_status=$(curl -k -s -o /dev/null -w "%{http_code}" \ + -G \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H 'Accept: application/n-triples' \ + --data-urlencode "uri=http://10.0.0.1:8080/test" \ + "$END_USER_BASE_URL" || true) + +if [ "$http_status" != "403" ]; then + echo "Expected HTTP 403 Forbidden for 10.0.0.1 access, got: $http_status" + exit 1 +fi + +# Test Class B (172.16.0.0/12) + +http_status=$(curl -k -s -o /dev/null -w "%{http_code}" \ + -G \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H 'Accept: application/n-triples' \ + --data-urlencode "uri=http://172.16.0.1:8080/test" \ + "$END_USER_BASE_URL" || true) + +if [ "$http_status" != "403" ]; then + echo "Expected HTTP 403 Forbidden for 172.16.0.1 access, got: $http_status" + exit 1 +fi + +# Test Class C (192.168.0.0/16) + +http_status=$(curl -k -s -o /dev/null -w "%{http_code}" \ + -G \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H 'Accept: application/n-triples' \ + --data-urlencode "uri=http://192.168.1.1:8080/test" \ + "$END_USER_BASE_URL" || true) + +if [ "$http_status" != "403" ]; then + echo "Expected HTTP 403 Forbidden for 192.168.1.1 access, got: $http_status" + exit 1 +fi diff --git a/http-tests/proxy/PATCH-proxied-update.sh b/http-tests/proxy/PATCH-proxied-update.sh new file mode 100755 index 000000000..e0d389457 --- /dev/null +++ b/http-tests/proxy/PATCH-proxied-update.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +set -euo pipefail + +initialize_dataset "$END_USER_BASE_URL" "$TMP_END_USER_DATASET" "$END_USER_ENDPOINT_URL" +initialize_dataset "$ADMIN_BASE_URL" "$TMP_ADMIN_DATASET" "$ADMIN_ENDPOINT_URL" +purge_cache "$END_USER_VARNISH_SERVICE" +purge_cache "$ADMIN_VARNISH_SERVICE" +purge_cache "$FRONTEND_VARNISH_SERVICE" + +# add agent to the writers group + +add-agent-to-group.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + --agent "$AGENT_URI" \ + "${ADMIN_BASE_URL}acl/groups/writers/" + +# create item document to PATCH + +item=$(create-item.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test Item" \ + --slug "test-patch-$(date +%s)" \ + --container "$END_USER_BASE_URL") + +# execute SPARQL UPDATE on the item using LDH as a proxy + +update=$(cat < + +INSERT +{ + <${item}> dcterms:description "Updated via proxy" . +} +WHERE {} +EOF +) + +curl -k -w "%{http_code}\n" -o /dev/null -f -s \ + -X PATCH \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H 'Content-Type: application/sparql-update' \ + --url-query "uri=${item}" \ + --data-binary "$update" \ + "$END_USER_BASE_URL" \ +| grep -q "$STATUS_NO_CONTENT" + +# check that the data was inserted + +curl -k -f -s -G \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: application/n-triples" \ + --url-query "uri=${item}" \ + "$END_USER_BASE_URL" \ +| grep "Updated via proxy" > /dev/null diff --git a/http-tests/proxy/POST-proxied-form.sh b/http-tests/proxy/POST-proxied-form.sh new file mode 100755 index 000000000..9bfec095d --- /dev/null +++ b/http-tests/proxy/POST-proxied-form.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +set -euo pipefail + +initialize_dataset "$END_USER_BASE_URL" "$TMP_END_USER_DATASET" "$END_USER_ENDPOINT_URL" +initialize_dataset "$ADMIN_BASE_URL" "$TMP_ADMIN_DATASET" "$ADMIN_ENDPOINT_URL" +purge_cache "$END_USER_VARNISH_SERVICE" +purge_cache "$ADMIN_VARNISH_SERVICE" +purge_cache "$FRONTEND_VARNISH_SERVICE" + +# add agent to the writers group + +add-agent-to-group.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + --agent "$AGENT_URI" \ + "${ADMIN_BASE_URL}acl/groups/writers/" + +# POST form data to admin clear endpoint via proxy + +curl -k -w "%{http_code}\n" -o /dev/null -f -s \ + -X POST \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H 'Content-Type: application/x-www-form-urlencoded' \ + -H 'Accept: application/rdf+xml' \ + --url-query "uri=${ADMIN_BASE_URL}clear" \ + --data-urlencode "uri=${END_USER_BASE_URL}ns#" \ + "$END_USER_BASE_URL" \ +| grep -q "$STATUS_OK" diff --git a/http-tests/root-owner.trig.template b/http-tests/root-owner.trig.template new file mode 100644 index 000000000..1b78aad03 --- /dev/null +++ b/http-tests/root-owner.trig.template @@ -0,0 +1,88 @@ +@prefix rdfs: . +@prefix xsd: . +@prefix acl: . +@prefix cert: . +@prefix dh: . +@prefix sioc: . +@prefix foaf: . +@prefix dct: . + +# AGENT + +<${OWNER_DOC_URI}> +{ + + <${OWNER_DOC_URI}> a dh:Item ; + foaf:primaryTopic <${OWNER_URI}> ; + sioc:has_container ; + dct:title "${OWNER_COMMON_NAME}" . + + <${OWNER_URI}> a foaf:Agent ; + foaf:name "${OWNER_COMMON_NAME}" ; + foaf:mbox ; + cert:key . + + # secretary delegates the owner agent + + <${SECRETARY_URI}> acl:delegates <${OWNER_URI}> . + +} + +# PUBLIC KEY + + +{ + + a dh:Item ; + foaf:primaryTopic ; + sioc:has_container ; + dct:title "${OWNER_COMMON_NAME}" . + + a cert:PublicKey ; + rdfs:label "${OWNER_COMMON_NAME}" ; + cert:modulus "${OWNER_PUBLIC_KEY_MODULUS}"^^xsd:hexBinary; + cert:exponent 65537 . + +} + +# AUTHORIZATIONS + +# root owner is a member of the owners group + + +{ + + foaf:member <${OWNER_URI}> . +} + + # TO-DO: use $OWNER_AUTH_UUID +{ + + a dh:Item ; + foaf:primaryTopic ; + sioc:has_container ; + dct:title "Public owner's WebID" . + + a acl:Authorization ; + acl:accessTo <${OWNER_DOC_URI}>, ; + acl:mode acl:Read ; + acl:agentClass foaf:Agent, acl:AuthenticatedAgent . + +} + +# test.localhost owner authorization (for HTTP tests) + + +{ + + a dh:Item ; + foaf:primaryTopic ; + dct:title "Test owner Control authorization" . + + a acl:Authorization ; + acl:accessTo ; + acl:accessToClass ; + acl:mode acl:Control ; + acl:agent <${OWNER_URI}> . + +} diff --git a/http-tests/run.sh b/http-tests/run.sh index 6360c315d..49e8ca193 100755 --- a/http-tests/run.sh +++ b/http-tests/run.sh @@ -110,7 +110,7 @@ export HTTP_TEST_ROOT="$PWD" export END_USER_ENDPOINT_URL="http://localhost:3031/ds/" export ADMIN_ENDPOINT_URL="http://localhost:3030/ds/" export END_USER_BASE_URL="https://localhost:4443/" -export ADMIN_BASE_URL="https://localhost:4443/admin/" +export ADMIN_BASE_URL="https://admin.localhost:4443/" export END_USER_VARNISH_SERVICE="varnish-end-user" export ADMIN_VARNISH_SERVICE="varnish-admin" export FRONTEND_VARNISH_SERVICE="varnish-frontend" @@ -142,6 +142,8 @@ run_tests $(find ./add/ -type f -name '*.sh') (( error_count += $? )) run_tests $(find ./admin/ -type f -name '*.sh') (( error_count += $? )) +run_tests $(find ./dataspaces/ -type f -name '*.sh') +(( error_count += $? )) run_tests $(find ./access/ -type f -name '*.sh') (( error_count += $? )) run_tests $(find ./imports/ -type f -name '*.sh') diff --git a/platform/context.xsl b/platform/context.xsl index 34cd4026c..1c4b4bd78 100644 --- a/platform/context.xsl +++ b/platform/context.xsl @@ -4,6 +4,7 @@ + ]> @@ -48,6 +50,8 @@ xmlns:google="&google;" + + @@ -158,6 +162,12 @@ xmlns:google="&google;" + + + + + + diff --git a/platform/datasets/admin.trig b/platform/datasets/admin.trig index 720af6949..111123be6 100644 --- a/platform/datasets/admin.trig +++ b/platform/datasets/admin.trig @@ -1,11 +1,11 @@ @prefix def: . @prefix ldh: . -@prefix ac: . +@prefix ac: . @prefix rdf: . @prefix xsd: . -@prefix dh: . +@prefix dh: . @prefix sd: . -@prefix sp: . +@prefix sp: . @prefix sioc: . @prefix foaf: . @prefix dct: . @@ -58,22 +58,6 @@ } - -{ - - a foaf:Document ; - dct:title "Access endpoint" . - -} - - -{ - - a foaf:Document ; - dct:title "Access request endpoint" . - -} - # CONTAINERS @@ -355,43 +339,8 @@ WHERE } - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "DBPedia" ; - foaf:primaryTopic . - - - a sd:Service ; - dct:title "DBPedia" ; - dct:description "Public SPARQL endpoint providing access to the DBpedia knowledge base, a crowd-sourced community effort to extract structured information from Wikipedia and make this information available on the Web." ; - sd:endpoint ; - sd:supportedLanguage sd:SPARQL11Query . - -} - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Wikidata" ; - foaf:primaryTopic . - - - a sd:Service ; - dct:title "Wikidata" ; - dct:description "Public SPARQL endpoint providing access to Wikidata, a free and open knowledge base that acts as central storage for the structured data of Wikimedia projects including Wikipedia, Wikivoyage, Wiktionary, Wikisource, and others." ; - sd:endpoint ; - sd:supportedLanguage sd:SPARQL11Query . - -} - ### ADMIN-SPECIFIC -@prefix ns: <../ns#> . @prefix lacl: . @prefix adm: . @prefix rdfs: . @@ -438,22 +387,6 @@ WHERE } - -{ - - a foaf:Document ; - dct:title "OAuth 2.0 login" . - -} - - -{ - - a foaf:Document ; - dct:title "Google OAuth2.0 authorization" . - -} - { @@ -637,44 +570,6 @@ WHERE } -# public namespace - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Public namespace access" ; - foaf:primaryTopic . - - a acl:Authorization ; - rdfs:label "Public namespace access" ; - rdfs:comment "Allows non-authenticated access" ; - acl:accessTo <../ns> ; # end-user ontologies are public - acl:mode acl:Read, acl:Append ; # allow queries over GET as well as POST - acl:agentClass foaf:Agent, acl:AuthenticatedAgent . - -} - -# SPARQL endpoint - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "SPARQL endpoint access" ; - foaf:primaryTopic . - - a acl:Authorization ; - rdfs:label "SPARQL endpoint access" ; - rdfs:comment "Allows only authenticated access" ; - acl:accessTo <../sparql> ; - acl:mode acl:Read, acl:Append ; # allow queries over GET as well as POST - acl:agentClass acl:AuthenticatedAgent . - -} - # access endpoint @@ -688,7 +583,7 @@ WHERE a acl:Authorization ; rdfs:label "Access description access" ; rdfs:comment "Allows non-authenticated access" ; - acl:accessTo ; + acl:accessToClass ldh:Access ; acl:mode acl:Read ; acl:agentClass foaf:Agent, acl:AuthenticatedAgent . @@ -707,7 +602,7 @@ WHERE a acl:Authorization ; rdfs:label "Access request access" ; rdfs:comment "Allows non-authenticated access" ; - acl:accessTo ; + acl:accessToClass ldh:AccessRequest ; acl:mode acl:Append ; acl:agentClass foaf:Agent, acl:AuthenticatedAgent . @@ -746,26 +641,26 @@ WHERE a acl:Authorization ; rdfs:label "OAuth2 login access" ; rdfs:comment "Required to enable public OAuth2 login" ; - acl:accessTo ; + acl:accessToClass ldh:OAuthLogin ; acl:mode acl:Read ; acl:agentClass foaf:Agent . } -# Google authorization +# OAuth2 authorization - + { - a dh:Item ; + a dh:Item ; sioc:has_container ; - dct:title "Google's OAuth2 authorization" ; - foaf:primaryTopic . + dct:title "OAuth2 authorization" ; + foaf:primaryTopic . - a acl:Authorization ; - rdfs:label "Google's OAuth2 authorization" ; + a acl:Authorization ; + rdfs:label "OAuth2 authorization" ; rdfs:comment "Required to enable public OAuth2 login" ; - acl:accessTo ; + acl:accessToClass ldh:OAuthAuthorize ; acl:mode acl:Read ; acl:agentClass foaf:Agent . @@ -804,52 +699,12 @@ WHERE rdfs:label "Full control" ; rdfs:comment "Allows full read/write access to all application resources" ; acl:accessToClass dh:Item, dh:Container, def:Root ; - acl:accessTo <../sparql>, <../importer>, <../add>, <../generate>, <../ns>, , ; + acl:accessTo , ; acl:mode acl:Read, acl:Append, acl:Write, acl:Control ; acl:agentGroup . } -# write/append access - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Write/append access" ; - foaf:primaryTopic . - - a acl:Authorization ; - rdfs:label "Write/append access" ; - rdfs:comment "Allows write access to all documents and containers" ; - acl:accessToClass dh:Item, dh:Container, def:Root ; - acl:accessTo <../sparql>, <../importer>, <../add>, <../generate>, <../ns> ; - acl:mode acl:Write, acl:Append ; - acl:agentGroup , . - -} - -# read access - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Read access" ; - foaf:primaryTopic . - - a acl:Authorization ; - rdfs:label "Read access" ; - rdfs:comment "Allows read access to all resources" ; - acl:accessToClass dh:Item, dh:Container, def:Root, ; - acl:accessTo <../sparql> ; - acl:mode acl:Read ; - acl:agentGroup , , . - -} - # GROUPS # owners @@ -917,24 +772,3 @@ WHERE rdf:value ldh:ChildrenView . } - -# ONTOLOGIES - -# namespace - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Namespace" ; - foaf:primaryTopic ns: . - - ns: a owl:Ontology ; - rdfs:label "Namespace" ; - rdfs:comment "Namespace of the application" ; - foaf:isPrimaryTopicOf <../ns> ; - owl:imports ; - owl:versionInfo "1.0-SNAPSHOT" . - -} \ No newline at end of file diff --git a/platform/datasets/end-user.trig b/platform/datasets/end-user.trig index 95030df64..4c3574b08 100644 --- a/platform/datasets/end-user.trig +++ b/platform/datasets/end-user.trig @@ -1,11 +1,11 @@ @prefix def: . @prefix ldh: . -@prefix ac: . +@prefix ac: . @prefix rdf: . @prefix xsd: . -@prefix dh: . +@prefix dh: . @prefix sd: . -@prefix sp: . +@prefix sp: . @prefix sioc: . @prefix foaf: . @prefix dct: . @@ -58,22 +58,6 @@ } - -{ - - a foaf:Document ; - dct:title "Access endpoint" . - -} - - -{ - - a foaf:Document ; - dct:title "Access request endpoint" . - -} - # CONTAINERS @@ -355,6 +339,56 @@ WHERE } +### END-USER-SPECIFIC + + +{ + + a ldh:Access ; + dct:title "Access endpoint" . + +} + + +{ + + a ldh:AccessRequest ; + dct:title "Access request endpoint" . + +} + + +{ + + a ldh:OAuthLogin ; + dct:title "OAuth 2.0 login" . + +} + + +{ + + a ldh:OAuthAuthorize ; + dct:title "Google OAuth2.0 authorization" . + +} + + +{ + + a ldh:OAuthLogin ; + dct:title "ORCID OAuth2.0 login" . + +} + + +{ + + a ldh:OAuthAuthorize ; + dct:title "ORCID OAuth2.0 authorization" . + +} + { diff --git a/platform/entrypoint.sh b/platform/entrypoint.sh index d6a50090d..6a4bb2e6d 100755 --- a/platform/entrypoint.sh +++ b/platform/entrypoint.sh @@ -13,40 +13,62 @@ fi # change server configuration if [ -n "$HTTP" ]; then - HTTP_PARAM="--stringparam http $HTTP " + HTTP_PARAM="--stringparam Connector.http $HTTP " fi if [ -n "$HTTP_SCHEME" ]; then - HTTP_SCHEME_PARAM="--stringparam http.scheme $HTTP_SCHEME " + HTTP_SCHEME_PARAM="--stringparam Connector.scheme.http $HTTP_SCHEME " fi if [ -n "$HTTP_PORT" ]; then - HTTP_PORT_PARAM="--stringparam http.port $HTTP_PORT " + HTTP_PORT_PARAM="--stringparam Connector.port.http $HTTP_PORT " fi if [ -n "$HTTP_PROXY_NAME" ]; then lc_proxy_name=$(echo "$HTTP_PROXY_NAME" | tr '[:upper:]' '[:lower:]') # make sure it's lower-case - HTTP_PROXY_NAME_PARAM="--stringparam http.proxyName $lc_proxy_name " + HTTP_PROXY_NAME_PARAM="--stringparam Connector.proxyName.http $lc_proxy_name " fi if [ -n "$HTTP_PROXY_PORT" ]; then - HTTP_PROXY_PORT_PARAM="--stringparam http.proxyPort $HTTP_PROXY_PORT " + HTTP_PROXY_PORT_PARAM="--stringparam Connector.proxyPort.http $HTTP_PROXY_PORT " fi if [ -n "$HTTP_REDIRECT_PORT" ]; then - HTTP_REDIRECT_PORT_PARAM="--stringparam http.redirectPort $HTTP_REDIRECT_PORT " + HTTP_REDIRECT_PORT_PARAM="--stringparam Connector.redirectPort.http $HTTP_REDIRECT_PORT " fi if [ -n "$HTTP_CONNECTION_TIMEOUT" ]; then - HTTP_CONNECTION_TIMEOUT_PARAM="--stringparam http.connectionTimeout $HTTP_CONNECTION_TIMEOUT " + HTTP_CONNECTION_TIMEOUT_PARAM="--stringparam Connector.connectionTimeout.http $HTTP_CONNECTION_TIMEOUT " fi if [ -n "$HTTP_COMPRESSION" ]; then - HTTP_COMPRESSION_PARAM="--stringparam http.compression $HTTP_COMPRESSION " + HTTP_COMPRESSION_PARAM="--stringparam Connector.compression.http $HTTP_COMPRESSION " fi if [ -n "$HTTPS" ]; then - HTTPS_PARAM="--stringparam https $HTTPS " + HTTPS_PARAM="--stringparam Connector.https $HTTPS " +fi + +# RemoteIpValve configuration takes precedence over Connector proxy settings + +if [ -n "$REMOTE_IP_VALVE" ]; then + REMOTE_IP_VALVE_PARAM="--stringparam RemoteIpValve $REMOTE_IP_VALVE " +fi + +if [ -n "$REMOTE_IP_VALVE_PROTOCOL_HEADER" ]; then + REMOTE_IP_VALVE_PROTOCOL_HEADER_PARAM="--stringparam RemoteIpValve.protocolHeader $REMOTE_IP_VALVE_PROTOCOL_HEADER " +fi + +if [ -n "$REMOTE_IP_VALVE_PORT_HEADER" ]; then + REMOTE_IP_VALVE_PORT_HEADER_PARAM="--stringparam RemoteIpValve.portHeader $REMOTE_IP_VALVE_PORT_HEADER " +fi + +if [ -n "$REMOTE_IP_VALVE_REMOTE_IP_HEADER" ]; then + REMOTE_IP_VALVE_REMOTE_IP_HEADER_PARAM="--stringparam RemoteIpValve.remoteIpHeader $REMOTE_IP_VALVE_REMOTE_IP_HEADER " +fi + +if [ -n "$REMOTE_IP_VALVE_HOST_HEADER" ]; then + REMOTE_IP_VALVE_HOST_HEADER_PARAM="--stringparam RemoteIpValve.hostHeader $REMOTE_IP_VALVE_HOST_HEADER " fi transform="xsltproc \ @@ -60,6 +82,11 @@ transform="xsltproc \ $HTTP_CONNECTION_TIMEOUT_PARAM \ $HTTP_COMPRESSION_PARAM \ $HTTPS_PARAM \ + $REMOTE_IP_VALVE_PARAM \ + $REMOTE_IP_VALVE_PROTOCOL_HEADER_PARAM \ + $REMOTE_IP_VALVE_PORT_HEADER_PARAM \ + $REMOTE_IP_VALVE_REMOTE_IP_HEADER_PARAM \ + $REMOTE_IP_VALVE_HOST_HEADER_PARAM \ conf/letsencrypt-tomcat.xsl \ conf/server.xml" @@ -184,25 +211,37 @@ if [ -z "$MAIL_USER" ]; then exit 1 fi -# construct base URI (ignore default HTTP and HTTPS ports) +# construct base URI and origins (ignore default HTTP and HTTPS ports for URI, but always include port for origins) if [ "$PROTOCOL" = "https" ]; then if [ "$HTTPS_PROXY_PORT" = 443 ]; then export BASE_URI="${PROTOCOL}://${HOST}${ABS_PATH}" + export ADMIN_BASE_URI="${PROTOCOL}://admin.${HOST}${ABS_PATH}" + export ORIGIN="${PROTOCOL}://${HOST}" else export BASE_URI="${PROTOCOL}://${HOST}:${HTTPS_PROXY_PORT}${ABS_PATH}" + export ADMIN_BASE_URI="${PROTOCOL}://admin.${HOST}:${HTTPS_PROXY_PORT}${ABS_PATH}" + export ORIGIN="${PROTOCOL}://${HOST}:${HTTPS_PROXY_PORT}" fi else if [ "$HTTP_PROXY_PORT" = 80 ]; then export BASE_URI="${PROTOCOL}://${HOST}${ABS_PATH}" + export ADMIN_BASE_URI="${PROTOCOL}://admin.${HOST}${ABS_PATH}" + export ORIGIN="${PROTOCOL}://${HOST}" else export BASE_URI="${PROTOCOL}://${HOST}:${HTTP_PROXY_PORT}${ABS_PATH}" + export ADMIN_BASE_URI="${PROTOCOL}://admin.${HOST}:${HTTP_PROXY_PORT}${ABS_PATH}" + export ORIGIN="${PROTOCOL}://${HOST}:${HTTP_PROXY_PORT}" fi fi BASE_URI=$(echo "$BASE_URI" | tr '[:upper:]' '[:lower:]') # make sure it's lower-case +ADMIN_BASE_URI=$(echo "$ADMIN_BASE_URI" | tr '[:upper:]' '[:lower:]') # make sure it's lower-case +ORIGIN=$(echo "$ORIGIN" | tr '[:upper:]' '[:lower:]') # make sure it's lower-case printf "\n### Base URI: %s\n" "$BASE_URI" +printf "\n### Admin Base URI: %s\n" "$ADMIN_BASE_URI" +printf "\n### Origin: %s\n" "$ORIGIN" # functions that wait for other services to start @@ -308,7 +347,6 @@ generate_cert() local keystore_password="${11}" local cert_output="${12}" local public_key_output="${13}" - local private_key_output="${14}" # Build the Distinguished Name (DN) string, only including components if they're non-empty dname="CN=${common_name}" @@ -358,11 +396,11 @@ get_modulus() } OWNER_UUID=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase -OWNER_URI="${OWNER_URI:-${BASE_URI}admin/acl/agents/${OWNER_UUID}/#this}" # WebID URI. Can be external! +OWNER_URI="${OWNER_URI:-${ADMIN_BASE_URI}acl/agents/${OWNER_UUID}/#this}" # WebID URI. Can be external! OWNER_COMMON_NAME="$OWNER_GIVEN_NAME $OWNER_FAMILY_NAME" # those are required SECRETARY_UUID=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase -SECRETARY_URI="${SECRETARY_URI:-${BASE_URI}admin/acl/agents/${SECRETARY_UUID}/#this}" # WebID URI. Can be external! +SECRETARY_URI="${SECRETARY_URI:-${ADMIN_BASE_URI}acl/agents/${SECRETARY_UUID}/#this}" # WebID URI. Can be external! OWNER_DATASET_PATH="/var/linkeddatahub/datasets/owner/${OWNER_CERT_ALIAS}.trig" @@ -385,19 +423,21 @@ if [ ! -f "$OWNER_PUBLIC_KEY" ]; then "$OWNER_ORG_UNIT" "$OWNER_ORGANIZATION" \ "$OWNER_LOCALITY" "$OWNER_STATE_OR_PROVINCE" "$OWNER_COUNTRY_NAME" \ "$CERT_VALIDITY" "$OWNER_KEYSTORE" "$OWNER_CERT_PASSWORD" \ - "$OWNER_CERT" "$OWNER_PUBLIC_KEY" "$OWNER_PRIVATE_KEY" + "$OWNER_CERT" "$OWNER_PUBLIC_KEY" # write owner's metadata to a file mkdir -p "$(dirname "$OWNER_DATASET_PATH")" - OWNER_DOC_URI="${BASE_URI}admin/acl/agents/${OWNER_UUID}/" + OWNER_DOC_URI="${ADMIN_BASE_URI}acl/agents/${OWNER_UUID}/" OWNER_KEY_UUID=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase + OWNER_KEY_DOC_URI="${ADMIN_BASE_URI}acl/public-keys/${OWNER_KEY_UUID}/" + OWNER_KEY_URI="${OWNER_KEY_DOC_URI}#this" OWNER_PUBLIC_KEY_MODULUS=$(get_modulus "$OWNER_PUBLIC_KEY") printf "\n### Root owner WebID public key modulus: %s\n" "$OWNER_PUBLIC_KEY_MODULUS" - export OWNER_COMMON_NAME OWNER_URI OWNER_DOC_URI OWNER_PUBLIC_KEY_MODULUS OWNER_KEY_UUID SECRETARY_URI + export OWNER_COMMON_NAME OWNER_URI OWNER_DOC_URI OWNER_KEY_DOC_URI OWNER_KEY_URI OWNER_PUBLIC_KEY_MODULUS SECRETARY_URI envsubst < root-owner.trig.template > "$OWNER_DATASET_PATH" fi @@ -422,29 +462,52 @@ if [ ! -f "$SECRETARY_PUBLIC_KEY" ]; then "" "" \ "" "" "" \ "$CERT_VALIDITY" "$SECRETARY_KEYSTORE" "$SECRETARY_CERT_PASSWORD" \ - "$SECRETARY_CERT" "$SECRETARY_PUBLIC_KEY" "$SECRETARY_PRIVATE_KEY" + "$SECRETARY_CERT" "$SECRETARY_PUBLIC_KEY" # write secretary's metadata to a file mkdir -p "$(dirname "$SECRETARY_DATASET_PATH")" - SECRETARY_DOC_URI="${BASE_URI}admin/acl/agents/${SECRETARY_UUID}/" + SECRETARY_DOC_URI="${ADMIN_BASE_URI}acl/agents/${SECRETARY_UUID}/" SECRETARY_KEY_UUID=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase + SECRETARY_KEY_DOC_URI="${ADMIN_BASE_URI}acl/public-keys/${SECRETARY_KEY_UUID}/" + SECRETARY_KEY_URI="${SECRETARY_KEY_DOC_URI}#this" SECRETARY_PUBLIC_KEY_MODULUS=$(get_modulus "$SECRETARY_PUBLIC_KEY") printf "\n### Secretary WebID public key modulus: %s\n" "$SECRETARY_PUBLIC_KEY_MODULUS" - export SECRETARY_URI SECRETARY_DOC_URI SECRETARY_PUBLIC_KEY_MODULUS SECRETARY_KEY_UUID + export SECRETARY_URI SECRETARY_DOC_URI SECRETARY_KEY_DOC_URI SECRETARY_KEY_URI SECRETARY_PUBLIC_KEY_MODULUS envsubst < root-secretary.trig.template > "$SECRETARY_DATASET_PATH" fi -if [ -z "$LOAD_DATASETS" ]; then - if [ ! -d /var/linkeddatahub/based-datasets ]; then - LOAD_DATASETS=true - else - LOAD_DATASETS=false - fi -fi +mkdir -p /var/linkeddatahub/based-datasets + +# If certs already exist, extract metadata from existing .trig files using SPARQL and create .nq files +printf "\n### Reading owner metadata from existing file: %s\n" /var/linkeddatahub/based-datasets/root-owner.nq + +trig --base="$ADMIN_BASE_URI" --output=nq "$OWNER_DATASET_PATH" > /var/linkeddatahub/based-datasets/root-owner.nq + +owner_metadata=$(sparql --data=/var/linkeddatahub/based-datasets/root-owner.nq --query=select-agent-metadata.rq --results=XML) + +OWNER_URI=$(echo "$owner_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='agent']/srx:uri") +OWNER_DOC_URI=$(echo "$owner_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='doc']/srx:uri") +OWNER_KEY_URI=$(echo "$owner_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='key']/srx:uri") +OWNER_KEY_DOC_URI=$(echo "$OWNER_KEY_URI" | sed 's|#this$||') +OWNER_KEY_URI="${OWNER_KEY_DOC_URI}#this" + +printf "\n### Reading secretary metadata from existing file: %s\n" /var/linkeddatahub/based-datasets/root-secretary.nq + +trig --base="$ADMIN_BASE_URI" --output=nq "$SECRETARY_DATASET_PATH" > /var/linkeddatahub/based-datasets/root-secretary.nq + +secretary_metadata=$(sparql --data=/var/linkeddatahub/based-datasets/root-secretary.nq --query=select-agent-metadata.rq --results=XML) + +SECRETARY_URI=$(echo "$secretary_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='agent']/srx:uri") +SECRETARY_DOC_URI=$(echo "$secretary_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='doc']/srx:uri") +SECRETARY_KEY_URI=$(echo "$secretary_metadata" | xmlstarlet sel -N srx="http://www.w3.org/2005/sparql-results#" -T -t -v "/srx:sparql/srx:results/srx:result/srx:binding[@name='key']/srx:uri") +SECRETARY_KEY_DOC_URI=$(echo "$SECRETARY_KEY_URI" | sed 's|#this$||') +SECRETARY_KEY_URI="${SECRETARY_KEY_DOC_URI}#this" + +# Note: LOAD_DATASETS check is now done per-app inside the loop # base the $CONTEXT_DATASET @@ -476,7 +539,7 @@ readarray apps < <(xmlstarlet sel -B \ -o "\"" \ -v "srx:binding[@name = 'endUserApp']" \ -o "\" \"" \ - -v "srx:binding[@name = 'endUserBase']" \ + -v "srx:binding[@name = 'endUserOrigin']" \ -o "\" \"" \ -v "srx:binding[@name = 'endUserQuadStore']" \ -o "\" \"" \ @@ -490,7 +553,7 @@ readarray apps < <(xmlstarlet sel -B \ -o "\" \"" \ -v "srx:binding[@name = 'adminApp']" \ -o "\" \"" \ - -v "srx:binding[@name = 'adminBase']" \ + -v "srx:binding[@name = 'adminOrigin']" \ -o "\" \"" \ -v "srx:binding[@name = 'adminQuadStore']" \ -o "\" \"" \ @@ -508,21 +571,21 @@ readarray apps < <(xmlstarlet sel -B \ for app in "${apps[@]}"; do app_array=(${app}) end_user_app="${app_array[0]//\"/}" - end_user_base_uri="${app_array[1]//\"/}" + end_user_origin="${app_array[1]//\"/}" end_user_quad_store_url="${app_array[2]//\"/}" end_user_endpoint_url="${app_array[3]//\"/}" end_user_service_auth_user="${app_array[4]//\"/}" end_user_service_auth_pwd="${app_array[5]//\"/}" end_user_owner="${app_array[6]//\"/}" admin_app="${app_array[7]//\"/}" - admin_base_uri="${app_array[8]//\"/}" + admin_origin="${app_array[8]//\"/}" admin_quad_store_url="${app_array[9]//\"/}" admin_endpoint_url="${app_array[10]//\"/}" admin_service_auth_user="${app_array[11]//\"/}" admin_service_auth_pwd="${app_array[12]//\"/}" admin_owner="${app_array[13]//\"/}" - printf "\n### Processing dataspace. End-user app: %s Admin app: %s\n" "$end_user_app" "$admin_app" + printf "\n### Processing dataspace. End-user app: %s (origin: %s) Admin app: %s (origin: %s)\n" "$end_user_app" "$end_user_origin" "$admin_app" "$admin_origin" if [ -z "$end_user_app" ]; then printf "\nEnd-user app URI could not be extracted from %s. Exiting...\n" "$CONTEXT_DATASET" @@ -536,8 +599,8 @@ for app in "${apps[@]}"; do printf "\nAdmin app URI could not be extracted for the <%s> app. Exiting...\n" "$end_user_app" exit 1 fi - if [ -z "$admin_base_uri" ]; then - printf "\nAdmin base URI extracted for the <%s> app. Exiting...\n" "$end_user_app" + if [ -z "$admin_origin" ]; then + printf "\nAdmin origin could not be extracted for the <%s> app. Exiting...\n" "$end_user_app" exit 1 fi if [ -z "$admin_quad_store_url" ]; then @@ -545,13 +608,15 @@ for app in "${apps[@]}"; do exit 1 fi - # check if this app is the root app - if [ "$end_user_base_uri" = "$BASE_URI" ]; then + # check if this app is the root app by comparing origins + if [ "$end_user_origin" = "$ORIGIN" ]; then root_end_user_app="$end_user_app" + #root_end_user_origin="$end_user_origin" root_end_user_quad_store_url="$end_user_quad_store_url" root_end_user_service_auth_user="$end_user_service_auth_user" root_end_user_service_auth_pwd="$end_user_service_auth_pwd" root_admin_app="$admin_app" + #root_admin_origin="$admin_origin" root_admin_quad_store_url="$admin_quad_store_url" root_admin_service_auth_user="$admin_service_auth_user" root_admin_service_auth_pwd="$admin_service_auth_pwd" @@ -569,9 +634,23 @@ for app in "${apps[@]}"; do printf "\n### Quad store URL of the root end-user service: %s\n" "$end_user_quad_store_url" printf "\n### Quad store URL of the root admin service: %s\n" "$admin_quad_store_url" - # load default admin/end-user datasets if we haven't yet created a folder with re-based versions of them (and then create it) - if [ "$LOAD_DATASETS" = "true" ]; then - mkdir -p /var/linkeddatahub/based-datasets + # Create app-specific subfolder based on end-user origin + app_folder=$(echo "$end_user_origin" | sed 's|https://||' | sed 's|http://||' | sed 's|[:/]|-|g') + + # Determine whether to load datasets for this app + load_datasets_for_app="$LOAD_DATASETS" + if [ -z "$load_datasets_for_app" ]; then + if [ ! -d "/var/linkeddatahub/based-datasets/${app_folder}" ]; then + load_datasets_for_app=true + else + load_datasets_for_app=false + fi + fi + + # Check if this specific app's datasets should be loaded + if [ "$load_datasets_for_app" = true ]; then + printf "\n### Loading datasets for app: %s\n" "$app_folder" + mkdir -p "/var/linkeddatahub/based-datasets/${app_folder}" # create query file by injecting environmental variables into the template @@ -580,7 +659,7 @@ for app in "${apps[@]}"; do END_USER_DATASET=$(echo "$END_USER_DATASET_URL" | cut -c 8-) # strip leading file:// printf "\n### Reading end-user dataset from a local file: %s\n" "$END_USER_DATASET" ;; - *) + *) END_USER_DATASET=$(mktemp) printf "\n### Downloading end-user dataset from a URL: %s\n" "$END_USER_DATASET_URL" @@ -593,7 +672,7 @@ for app in "${apps[@]}"; do ADMIN_DATASET=$(echo "$ADMIN_DATASET_URL" | cut -c 8-) # strip leading file:// printf "\n### Reading admin dataset from a local file: %s\n" "$ADMIN_DATASET" ;; - *) + *) ADMIN_DATASET=$(mktemp) printf "\n### Downloading admin dataset from a URL: %s\n" "$ADMIN_DATASET_URL" @@ -601,42 +680,83 @@ for app in "${apps[@]}"; do curl "$ADMIN_DATASET_URL" > "$ADMIN_DATASET" ;; esac - trig --base="$end_user_base_uri" "$END_USER_DATASET" > /var/linkeddatahub/based-datasets/end-user.nq + trig --base="${end_user_origin}/" "$END_USER_DATASET" > "/var/linkeddatahub/based-datasets/${app_folder}/end-user.nq" printf "\n### Waiting for %s...\n" "$end_user_quad_store_url" wait_for_url "$end_user_quad_store_url" "$end_user_service_auth_user" "$end_user_service_auth_pwd" "$TIMEOUT" "application/n-quads" printf "\n### Loading end-user dataset into the triplestore...\n" - append_quads "$end_user_quad_store_url" "$end_user_service_auth_user" "$end_user_service_auth_pwd" /var/linkeddatahub/based-datasets/end-user.nq "application/n-quads" + append_quads "$end_user_quad_store_url" "$end_user_service_auth_user" "$end_user_service_auth_pwd" "/var/linkeddatahub/based-datasets/${app_folder}/end-user.nq" "application/n-quads" - trig --base="$admin_base_uri" "$ADMIN_DATASET" > /var/linkeddatahub/based-datasets/admin.nq + trig --base="${admin_origin}/" "$ADMIN_DATASET" > "/var/linkeddatahub/based-datasets/${app_folder}/admin.nq" printf "\n### Waiting for %s...\n" "$admin_quad_store_url" wait_for_url "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" "$TIMEOUT" "application/n-quads" printf "\n### Loading admin dataset into the triplestore...\n" - append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" /var/linkeddatahub/based-datasets/admin.nq "application/n-quads" + append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" "/var/linkeddatahub/based-datasets/${app_folder}/admin.nq" "application/n-quads" + + namespace_ontology_dataset_path="/var/linkeddatahub/datasets/${app_folder}/namespace-ontology.trig" + mkdir -p "$(dirname "$namespace_ontology_dataset_path")" + export end_user_origin admin_origin + envsubst < namespace-ontology.trig.template > "$namespace_ontology_dataset_path" + + trig --base="${admin_origin}/" --output=nq "$namespace_ontology_dataset_path" > "/var/linkeddatahub/based-datasets/${app_folder}/namespace-ontology.nq" + + printf "\n### Loading namespace ontology into the admin triplestore...\n" + append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" "/var/linkeddatahub/based-datasets/${app_folder}/namespace-ontology.nq" "application/n-quads" + + # Load full owner/secretary metadata (agent + key) only for root app + if [ "$end_user_origin" = "$ORIGIN" ]; then + printf "\n### Uploading the metadata of the owner agent...\n\n" + append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" /var/linkeddatahub/based-datasets/root-owner.nq "application/n-quads" + + printf "\n### Uploading the metadata of the secretary agent...\n\n" + append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" /var/linkeddatahub/based-datasets/root-secretary.nq "application/n-quads" + fi + + # Load owner/secretary authorizations for this app (with app-specific UUIDs) + # Note: OWNER_URI and SECRETARY_URI reference the root admin URIs + owner_auth_dataset_path="/var/linkeddatahub/datasets/${app_folder}/owner-authorization.trig" + mkdir -p "$(dirname "$owner_auth_dataset_path")" + + OWNER_AUTH_UUID=$(uuidgen | tr '[:upper:]' '[:lower:]') + OWNER_AUTH_DOC_URI="${admin_origin}/acl/authorizations/${OWNER_AUTH_UUID}/" + OWNER_AUTH_URI="${OWNER_AUTH_DOC_URI}#auth" + + export OWNER_URI OWNER_DOC_URI OWNER_KEY_DOC_URI OWNER_AUTH_DOC_URI OWNER_AUTH_URI + envsubst < root-owner-authorization.trig.template > "$owner_auth_dataset_path" + + trig --base="${admin_origin}/" --output=nq "$owner_auth_dataset_path" > "/var/linkeddatahub/based-datasets/${app_folder}/owner-authorization.nq" + + printf "\n### Uploading owner authorizations for this app...\n\n" + append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" "/var/linkeddatahub/based-datasets/${app_folder}/owner-authorization.nq" "application/n-quads" - trig --base="$admin_base_uri" --output=nq "$OWNER_DATASET_PATH" > /var/linkeddatahub/based-datasets/root-owner.nq + secretary_auth_dataset_path="/var/linkeddatahub/datasets/${app_folder}/secretary-authorization.trig" + mkdir -p "$(dirname "$secretary_auth_dataset_path")" - printf "\n### Uploading the metadata of the owner agent...\n\n" - append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" /var/linkeddatahub/based-datasets/root-owner.nq "application/n-quads" + SECRETARY_AUTH_UUID=$(uuidgen | tr '[:upper:]' '[:lower:]') + SECRETARY_AUTH_DOC_URI="${admin_origin}/acl/authorizations/${SECRETARY_AUTH_UUID}/" + SECRETARY_AUTH_URI="${SECRETARY_AUTH_DOC_URI}#auth" - trig --base="$admin_base_uri" --output=nq "$SECRETARY_DATASET_PATH" > /var/linkeddatahub/based-datasets/root-secretary.nq + export SECRETARY_URI SECRETARY_DOC_URI SECRETARY_KEY_DOC_URI SECRETARY_AUTH_DOC_URI SECRETARY_AUTH_URI + envsubst < root-secretary-authorization.trig.template > "$secretary_auth_dataset_path" - printf "\n### Uploading the metadata of the secretary agent...\n\n" - append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" /var/linkeddatahub/based-datasets/root-secretary.nq "application/n-quads" + trig --base="${admin_origin}/" --output=nq "$secretary_auth_dataset_path" > "/var/linkeddatahub/based-datasets/${app_folder}/secretary-authorization.nq" + + printf "\n### Uploading secretary authorizations for this app...\n\n" + append_quads "$admin_quad_store_url" "$admin_service_auth_user" "$admin_service_auth_pwd" "/var/linkeddatahub/based-datasets/${app_folder}/secretary-authorization.nq" "application/n-quads" fi done rm -f root_service_metadata.xml if [ -z "$root_end_user_app" ]; then - printf "\nRoot end-user app with base URI <%s> not found. Exiting...\n" "$BASE_URI" + printf "\nRoot end-user app with origin <%s> not found. Exiting...\n" "$ORIGIN" exit 1 fi if [ -z "$root_admin_app" ]; then - printf "\nRoot admin app (for end-user app with base URI <%s>) not found. Exiting...\n" "$BASE_URI" + printf "\nRoot admin app (for end-user app with origin <%s>) not found. Exiting...\n" "$ORIGIN" exit 1 fi @@ -820,6 +940,16 @@ if [ -f "/run/secrets/google_client_secret" ]; then GOOGLE_CLIENT_SECRET_PARAM="--stringparam google:clientSecret '$GOOGLE_CLIENT_SECRET' " fi +if [ -f "/run/secrets/orcid_client_id" ]; then + ORCID_CLIENT_ID=$(cat /run/secrets/orcid_client_id) + ORCID_CLIENT_ID_PARAM="--stringparam orcid:clientID '$ORCID_CLIENT_ID' " +fi + +if [ -f "/run/secrets/orcid_client_secret" ]; then + ORCID_CLIENT_SECRET=$(cat /run/secrets/orcid_client_secret) + ORCID_CLIENT_SECRET_PARAM="--stringparam orcid:clientSecret '$ORCID_CLIENT_SECRET' " +fi + transform="xsltproc \ --output conf/Catalina/localhost/ROOT.xml \ $CACHE_MODEL_LOADS_PARAM \ @@ -855,6 +985,8 @@ transform="xsltproc \ $MAIL_PASSWORD_PARAM \ $GOOGLE_CLIENT_ID_PARAM \ $GOOGLE_CLIENT_SECRET_PARAM \ + $ORCID_CLIENT_ID_PARAM \ + $ORCID_CLIENT_SECRET_PARAM \ /var/linkeddatahub/xsl/context.xsl \ conf/Catalina/localhost/ROOT.xml" diff --git a/platform/namespace-ontology.trig.template b/platform/namespace-ontology.trig.template new file mode 100644 index 000000000..c62472479 --- /dev/null +++ b/platform/namespace-ontology.trig.template @@ -0,0 +1,134 @@ +@prefix def: . +@prefix ldh: . +@prefix ac: . +@prefix rdf: . +@prefix xsd: . +@prefix dh: . +@prefix sd: . +@prefix sp: . +@prefix sioc: . +@prefix foaf: . +@prefix dct: . +@prefix spin: . +@prefix lacl: . +@prefix adm: . +@prefix rdfs: . +@prefix owl: . +@prefix acl: . +@prefix cert: . +@prefix spin: . + +# namespace ontology + +<${admin_origin}/ontologies/namespace/> +{ + <${admin_origin}/ontologies/namespace/> a dh:Item ; + sioc:has_container <${admin_origin}/ontologies/> ; + dct:title "Namespace" ; + foaf:primaryTopic <${end_user_origin}/ns#> . + + <${end_user_origin}/ns#> a owl:Ontology ; + rdfs:label "Namespace" ; + rdfs:comment "Namespace of the application" ; + foaf:isPrimaryTopicOf <${end_user_origin}/ns> ; + owl:imports ; + owl:versionInfo "1.0-SNAPSHOT" . +} + +# public namespace authorization + +<${admin_origin}/acl/authorizations/public-namespace/> +{ + + <${admin_origin}/acl/authorizations/public-namespace/> a dh:Item ; + sioc:has_container <${admin_origin}/acl/authorizations/> ; + dct:title "Public namespace access" ; + foaf:primaryTopic <${admin_origin}/acl/authorizations/public-namespace/#this> . + + <${admin_origin}/acl/authorizations/public-namespace/#this> a acl:Authorization ; + rdfs:label "Public namespace access" ; + rdfs:comment "Allows non-authenticated access" ; + acl:accessTo <${end_user_origin}/ns> ; # end-user ontologies are public + acl:mode acl:Read, acl:Append ; # allow queries over GET as well as POST + acl:agentClass foaf:Agent, acl:AuthenticatedAgent . + +} + +# SPARQL endpoint authorization + +<${admin_origin}/acl/authorizations/sparql-endpoint/> +{ + + <${admin_origin}/acl/authorizations/sparql-endpoint/> a dh:Item ; + sioc:has_container <${admin_origin}/acl/authorizations/> ; + dct:title "SPARQL endpoint access" ; + foaf:primaryTopic <${admin_origin}/acl/authorizations/sparql-endpoint/#this> . + + <${admin_origin}/acl/authorizations/sparql-endpoint/#this> a acl:Authorization ; + rdfs:label "SPARQL endpoint access" ; + rdfs:comment "Allows only authenticated access" ; + acl:accessTo <${end_user_origin}/sparql> ; + acl:mode acl:Read, acl:Append ; # allow queries over GET as well as POST + acl:agentClass acl:AuthenticatedAgent . + +} + +# write/append authorization + +<${admin_origin}/acl/authorizations/write-append/> +{ + + <${admin_origin}/acl/authorizations/write-append/> a dh:Item ; + sioc:has_container <${admin_origin}/acl/authorizations/> ; + dct:title "Write/append access" ; + foaf:primaryTopic <${admin_origin}/acl/authorizations/write-append/#this> . + + <${admin_origin}/acl/authorizations/write-append/#this> a acl:Authorization ; + rdfs:label "Write/append access" ; + rdfs:comment "Allows write access to all documents and containers" ; + acl:accessToClass dh:Item, dh:Container, def:Root ; + acl:accessTo <${end_user_origin}/sparql>, <${end_user_origin}/importer>, <${end_user_origin}/add>, <${end_user_origin}/generate>, <${end_user_origin}/ns> ; + acl:mode acl:Write, acl:Append ; + acl:agentGroup <${admin_origin}/acl/groups/owners/#this>, <${admin_origin}/acl/groups/writers/#this> . + +} + +# full access authorization + +<${admin_origin}/acl/authorizations/full-control/> +{ + + <${admin_origin}/acl/authorizations/full-control/> a dh:Item ; + sioc:has_container <${admin_origin}/acl/authorizations/> ; + dct:title "Full control" ; + foaf:primaryTopic <${admin_origin}/acl/authorizations/full-control/#this> . + + <${admin_origin}/acl/authorizations/full-control/#this> a acl:Authorization ; + rdfs:label "Full control" ; + rdfs:comment "Allows full read/write access to all application resources" ; + acl:accessToClass dh:Item, dh:Container, def:Root ; + acl:accessTo <${end_user_origin}/sparql>, <${end_user_origin}/importer>, <${end_user_origin}/add>, <${end_user_origin}/generate>, <${end_user_origin}/ns> ; + acl:mode acl:Read, acl:Append, acl:Write, acl:Control ; + acl:agentGroup <${admin_origin}/acl/groups/owners/#this> . + +} + +# read access + +<${admin_origin}/acl/authorizations/read/> +{ + + <${admin_origin}/acl/authorizations/read/> a dh:Item ; + sioc:has_container <${admin_origin}/acl/authorizations/> ; + dct:title "Read access" ; + foaf:primaryTopic <${admin_origin}/acl/authorizations/read/#this> . + + <${admin_origin}/acl/authorizations/read/#this> a acl:Authorization ; + rdfs:label "Read access" ; + rdfs:comment "Allows read access to all resources" ; + acl:accessToClass dh:Item, dh:Container, def:Root, ; + acl:accessTo <${end_user_origin}/sparql> ; + acl:mode acl:Read ; + acl:agentGroup <${admin_origin}/acl/groups/owners/#this>, <${admin_origin}/acl/groups/writers/#this>, <${admin_origin}/acl/groups/readers/#this> . + +} diff --git a/platform/root-owner-authorization.trig.template b/platform/root-owner-authorization.trig.template new file mode 100644 index 000000000..35357cd70 --- /dev/null +++ b/platform/root-owner-authorization.trig.template @@ -0,0 +1,33 @@ +@prefix rdfs: . +@prefix xsd: . +@prefix acl: . +@prefix cert: . +@prefix dh: . +@prefix sioc: . +@prefix foaf: . +@prefix dct: . + +# AUTHORIZATIONS + +# root owner is a member of the owners group + + +{ + + foaf:member <${OWNER_URI}> . +} + +<${OWNER_AUTH_DOC_URI}> +{ + + <${OWNER_AUTH_DOC_URI}> a dh:Item ; + foaf:primaryTopic <${OWNER_AUTH_URI}> ; + sioc:has_container ; + dct:title "Public owner's WebID" . + + <${OWNER_AUTH_URI}> a acl:Authorization ; + acl:accessTo <${OWNER_DOC_URI}>, <${OWNER_KEY_DOC_URI}> ; + acl:mode acl:Read ; + acl:agentClass foaf:Agent, acl:AuthenticatedAgent . + +} diff --git a/platform/root-owner.trig.template b/platform/root-owner.trig.template index 5a0196568..64567bd60 100644 --- a/platform/root-owner.trig.template +++ b/platform/root-owner.trig.template @@ -20,7 +20,7 @@ <${OWNER_URI}> a foaf:Agent ; foaf:name "${OWNER_COMMON_NAME}" ; foaf:mbox ; - cert:key . + cert:key <${OWNER_KEY_URI}> . # secretary delegates the owner agent @@ -30,42 +30,17 @@ # PUBLIC KEY - +<${OWNER_KEY_DOC_URI}> { - a dh:Item ; - foaf:primaryTopic ; + <${OWNER_KEY_DOC_URI}> a dh:Item ; + foaf:primaryTopic <${OWNER_KEY_URI}> ; sioc:has_container ; dct:title "${OWNER_COMMON_NAME}" . - a cert:PublicKey ; + <${OWNER_KEY_URI}> a cert:PublicKey ; rdfs:label "${OWNER_COMMON_NAME}" ; cert:modulus "${OWNER_PUBLIC_KEY_MODULUS}"^^xsd:hexBinary; cert:exponent 65537 . -} - -# AUTHORIZATIONS - -# root owner is a member of the owners group - - -{ - - foaf:member <${OWNER_URI}> . -} - - # TO-DO: use $OWNER_AUTH_UUID -{ - - a dh:Item ; - foaf:primaryTopic ; - sioc:has_container ; - dct:title "Public owner's WebID" . - - a acl:Authorization ; - acl:accessTo <${OWNER_DOC_URI}>, ; - acl:mode acl:Read ; - acl:agentClass foaf:Agent, acl:AuthenticatedAgent . - } \ No newline at end of file diff --git a/platform/root-secretary-authorization.trig.template b/platform/root-secretary-authorization.trig.template new file mode 100644 index 000000000..4bedeb5cb --- /dev/null +++ b/platform/root-secretary-authorization.trig.template @@ -0,0 +1,34 @@ +@prefix rdfs: . +@prefix xsd: . +@prefix acl: . +@prefix cert: . +@prefix dh: . +@prefix sioc: . +@prefix foaf: . +@prefix dct: . + +# AUTHORIZATION + +# secretary is a member of the writers group + + +{ + + foaf:member <${SECRETARY_URI}> . + +} + +<${SECRETARY_AUTH_DOC_URI}> +{ + + <${SECRETARY_AUTH_DOC_URI}> a dh:Item ; + foaf:primaryTopic <${SECRETARY_AUTH_URI}> ; + sioc:has_container ; + dct:title "Public secretary's WebID" . + + <${SECRETARY_AUTH_URI}> a acl:Authorization ; + acl:accessTo <${SECRETARY_DOC_URI}>, <${SECRETARY_KEY_DOC_URI}> ; + acl:mode acl:Read ; + acl:agentClass foaf:Agent, acl:AuthenticatedAgent . + +} diff --git a/platform/root-secretary.trig.template b/platform/root-secretary.trig.template index a6579251c..4aa9a333b 100644 --- a/platform/root-secretary.trig.template +++ b/platform/root-secretary.trig.template @@ -19,49 +19,23 @@ <${SECRETARY_URI}> a foaf:Agent ; foaf:name "LinkedDataHub" ; - cert:key . + cert:key <${SECRETARY_KEY_URI}> . } # PUBLIC KEY - +<${SECRETARY_KEY_DOC_URI}> { - a dh:Item ; - foaf:primaryTopic ; + <${SECRETARY_KEY_DOC_URI}> a dh:Item ; + foaf:primaryTopic <${SECRETARY_KEY_URI}> ; sioc:has_container ; dct:title "LinkedDataHub" . - a cert:PublicKey ; + <${SECRETARY_KEY_URI}> a cert:PublicKey ; rdfs:label "LinkedDataHub" ; cert:modulus "${SECRETARY_PUBLIC_KEY_MODULUS}"^^xsd:hexBinary; cert:exponent 65537 . -} - -# AUTHORIZATION - -# secretary is a member of the writers group - - -{ - - foaf:member <${SECRETARY_URI}> . - -} - - # TO-DO: use $SECRETARY_AUTH_UUID -{ - - a dh:Item ; - foaf:primaryTopic ; - sioc:has_container ; - dct:title "Public secretary's WebID" . - - a acl:Authorization ; - acl:accessTo <${SECRETARY_DOC_URI}>, ; - acl:mode acl:Read ; - acl:agentClass foaf:Agent, acl:AuthenticatedAgent . - } \ No newline at end of file diff --git a/platform/select-agent-metadata.rq b/platform/select-agent-metadata.rq new file mode 100644 index 000000000..bb01ebe55 --- /dev/null +++ b/platform/select-agent-metadata.rq @@ -0,0 +1,13 @@ +PREFIX foaf: +PREFIX cert: +SELECT ?agent ?doc ?key +WHERE { +GRAPH ?g1 { + ?agent a foaf:Agent . + ?agent cert:key ?key . +} +GRAPH ?g2 { + ?doc foaf:primaryTopic ?agent . +} +} +LIMIT 1 diff --git a/platform/select-root-services.rq b/platform/select-root-services.rq index 658fa4d61..2a307e4e1 100644 --- a/platform/select-root-services.rq +++ b/platform/select-root-services.rq @@ -2,15 +2,16 @@ PREFIX ldt: PREFIX sd: PREFIX a: PREFIX lapp: +PREFIX ldh: PREFIX foaf: -SELECT ?endUserApp ?endUserBase ?endUserQuadStore ?endUserEndpoint ?endUserAuthUser ?endUserAuthPwd ?endUserMaker ?adminApp ?adminBase ?adminQuadStore ?adminEndpoint ?adminAuthUser ?adminAuthPwd ?adminMaker +SELECT ?endUserApp ?endUserOrigin ?endUserQuadStore ?endUserEndpoint ?endUserAuthUser ?endUserAuthPwd ?endUserMaker ?adminApp ?adminOrigin ?adminQuadStore ?adminEndpoint ?adminAuthUser ?adminAuthPwd ?adminMaker { - ?endUserApp ldt:base ?endUserBase ; + ?endUserApp ldh:origin ?endUserOrigin ; ldt:service ?endUserService ; lapp:adminApplication ?adminApp . ?adminApp ldt:service ?adminService ; - ldt:base ?adminBase . + ldh:origin ?adminOrigin . ?endUserService a:quadStore ?endUserQuadStore ; sd:endpoint ?endUserEndpoint . ?adminService a:quadStore ?adminQuadStore ; diff --git a/pom.xml b/pom.xml index 0f9b5d06e..809d1463d 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ com.atomgraph linkeddatahub - 5.0.23 + 5.1.0-SNAPSHOT ${packaging.type} AtomGraph LinkedDataHub @@ -46,7 +46,7 @@ https://github.com/AtomGraph/LinkedDataHub scm:git:git://github.com/AtomGraph/LinkedDataHub.git scm:git:git@github.com:AtomGraph/LinkedDataHub.git - linkeddatahub-5.0.23 + linkeddatahub-2.1.1 @@ -104,11 +104,6 @@ jersey-media-json-processing 3.1.0 - - xom - xom - 1.3.8 - com.sun.mail jakarta.mail @@ -177,6 +172,12 @@ 10.1.2 jar + + junit + junit + 4.13.2 + test + diff --git a/release.sh b/release.sh index 9aa35e951..f45cf3fe3 100755 --- a/release.sh +++ b/release.sh @@ -21,6 +21,40 @@ print_error() { echo -e "${RED}[ERROR]${NC} $1" } +# Track if release completed successfully +RELEASE_SUCCESSFUL=false + +# Trap handler for automatic cleanup on failure +cleanup_on_failure() { + local exit_code=$? + + # Only clean up if release failed (non-zero exit) and wasn't successful + if [ $exit_code -ne 0 ] && [ "$RELEASE_SUCCESSFUL" = false ]; then + print_error "Release failed! Rolling back changes..." + + # Clean up Maven release artifacts + mvn release:clean 2>/dev/null || true + + # Delete release tag if it exists + if [ -n "$RELEASE_TAG" ]; then + git tag -d "$RELEASE_TAG" 2>/dev/null || true + fi + + # Switch back to develop branch + git checkout develop 2>/dev/null || true + + # Delete release branch if it exists + if [ -n "$RELEASE_BRANCH" ]; then + git branch -D "$RELEASE_BRANCH" 2>/dev/null || true + fi + + print_status "Rollback complete. You're back on develop branch." + exit $exit_code + fi +} + +trap cleanup_on_failure EXIT + # Check if we're on develop branch CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) if [ "$CURRENT_BRANCH" != "develop" ]; then @@ -40,6 +74,22 @@ if ! git diff-index --quiet HEAD --; then exit 1 fi +# Check if GPG is installed and configured +if ! command -v gpg &> /dev/null; then + print_error "GPG is not installed. Maven release requires GPG to sign artifacts." + print_error "Install GPG with: brew install gnupg" + exit 1 +fi + +# Check if GPG has at least one secret key +if ! gpg --list-secret-keys --keyid-format=long 2>/dev/null | grep -q "sec"; then + print_error "No GPG secret key found. You need a GPG key to sign Maven artifacts." + print_error "Generate one with: gpg --gen-key" + exit 1 +fi + +print_status "GPG check passed" + # Get current version from pom.xml CURRENT_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) print_status "Current version: $CURRENT_VERSION" @@ -123,6 +173,9 @@ if git ls-remote --heads origin "$RELEASE_BRANCH" | grep -q "$RELEASE_BRANCH"; t fi fi +# Mark release as successful to prevent rollback +RELEASE_SUCCESSFUL=true + print_status "Release $RELEASE_VERSION completed successfully!" print_status "- Master branch contains release version $RELEASE_VERSION" print_status "- Develop branch contains next development version" diff --git a/src/main/java/com/atomgraph/linkeddatahub/Application.java b/src/main/java/com/atomgraph/linkeddatahub/Application.java index 49192395b..eeebe124a 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/Application.java +++ b/src/main/java/com/atomgraph/linkeddatahub/Application.java @@ -23,6 +23,7 @@ import com.atomgraph.linkeddatahub.server.mapper.auth.webid.InvalidWebIDURIExceptionMapper; import com.atomgraph.linkeddatahub.server.mapper.auth.AuthorizationExceptionMapper; import com.atomgraph.linkeddatahub.server.mapper.auth.AuthenticationExceptionMapper; +import com.atomgraph.linkeddatahub.server.mapper.ForbiddenExceptionMapper; import com.atomgraph.linkeddatahub.server.mapper.auth.webid.WebIDCertificateExceptionMapper; import com.atomgraph.client.MediaTypes; import com.atomgraph.client.locator.PrefixMapper; @@ -99,11 +100,11 @@ import com.atomgraph.linkeddatahub.server.factory.ServiceFactory; import com.atomgraph.linkeddatahub.server.filter.request.OntologyFilter; import com.atomgraph.linkeddatahub.server.filter.request.AuthorizationFilter; -import com.atomgraph.linkeddatahub.server.filter.request.auth.IDTokenFilter; import com.atomgraph.linkeddatahub.server.filter.request.ContentLengthLimitFilter; import com.atomgraph.linkeddatahub.server.filter.request.auth.ProxiedWebIDFilter; +import com.atomgraph.linkeddatahub.server.filter.response.CORSFilter; import com.atomgraph.linkeddatahub.server.filter.response.ResponseHeadersFilter; -import com.atomgraph.linkeddatahub.server.filter.response.BackendInvalidationFilter; +import com.atomgraph.linkeddatahub.server.filter.response.CacheInvalidationFilter; import com.atomgraph.linkeddatahub.server.filter.response.XsltExecutableFilter; import com.atomgraph.linkeddatahub.server.interceptor.RDFPostMediaTypeInterceptor; import com.atomgraph.linkeddatahub.server.mapper.auth.oauth2.TokenExpiredExceptionMapper; @@ -116,6 +117,7 @@ import com.atomgraph.linkeddatahub.vocabulary.LDH; import com.atomgraph.linkeddatahub.vocabulary.LDHC; import com.atomgraph.linkeddatahub.vocabulary.Google; +import com.atomgraph.linkeddatahub.vocabulary.ORCID; import com.atomgraph.linkeddatahub.vocabulary.LAPP; import com.atomgraph.linkeddatahub.writer.Mode; import com.atomgraph.linkeddatahub.writer.ResultSetXSLTWriter; @@ -123,7 +125,6 @@ import com.atomgraph.linkeddatahub.writer.factory.ModeFactory; import com.atomgraph.linkeddatahub.writer.function.DecodeURI; import com.atomgraph.server.mapper.NotAcceptableExceptionMapper; -import com.atomgraph.server.vocabulary.LDT; import com.atomgraph.server.mapper.OntologyExceptionMapper; import com.atomgraph.server.mapper.jena.DatatypeFormatExceptionMapper; import com.atomgraph.server.mapper.jena.QueryParseExceptionMapper; @@ -194,6 +195,7 @@ import jakarta.ws.rs.client.ClientBuilder; import jakarta.ws.rs.client.ClientRequestFilter; import java.util.concurrent.ConcurrentHashMap; +import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.stream.StreamSource; import net.jodah.expiringmap.ExpiringMap; @@ -204,7 +206,6 @@ import net.sf.saxon.s9api.XdmAtomicValue; import net.sf.saxon.s9api.XsltCompiler; import net.sf.saxon.s9api.XsltExecutable; -import nu.xom.XPathException; import org.apache.http.HttpClientConnection; import org.apache.http.HttpHost; import org.apache.http.client.HttpRequestRetryHandler; @@ -240,6 +241,7 @@ import org.glassfish.jersey.process.internal.RequestScoped; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.server.filter.HttpMethodOverrideFilter; +import org.xml.sax.SAXException; /** * JAX-RS application subclass. @@ -269,7 +271,7 @@ public class Application extends ResourceConfig private final OntModelSpec ontModelSpec; private final boolean cacheStylesheet; private final boolean resolvingUncached; - private final URI baseURI, uploadRoot; // TO-DO: replace baseURI with ServletContext URI? + private final URI baseURI, uploadRoot; private final boolean invalidateCache; private final Integer cookieMaxAge; private final boolean enableLinkedDataProxy; @@ -282,6 +284,7 @@ public class Application extends ResourceConfig private final List supportedLanguages; private final ExpiringMap webIDmodelCache = ExpiringMap.builder().expiration(1, TimeUnit.DAYS).build(); // TO-DO: config for the expiration period? private final ExpiringMap oidcModelCache = ExpiringMap.builder().variableExpiration().build(); + private final ExpiringMap jwksCache = ExpiringMap.builder().expiration(1, TimeUnit.DAYS).build(); // Cache JWKS responses private final Map xsltExecutableCache = new ConcurrentHashMap<>(); private final MessageDigest messageDigest; private final boolean enableWebIDSignUp; @@ -344,7 +347,9 @@ public Application(@Context ServletConfig servletConfig) throws URISyntaxExcepti servletConfig.getServletContext().getInitParameter("mail.smtp.host") != null ? servletConfig.getServletContext().getInitParameter("mail.smtp.host") : null, servletConfig.getServletContext().getInitParameter("mail.smtp.port") != null ? servletConfig.getServletContext().getInitParameter("mail.smtp.port") : null, servletConfig.getServletContext().getInitParameter(Google.clientID.getURI()) != null ? servletConfig.getServletContext().getInitParameter(Google.clientID.getURI()) : null, - servletConfig.getServletContext().getInitParameter(Google.clientSecret.getURI()) != null ? servletConfig.getServletContext().getInitParameter(Google.clientSecret.getURI()) : null + servletConfig.getServletContext().getInitParameter(Google.clientSecret.getURI()) != null ? servletConfig.getServletContext().getInitParameter(Google.clientSecret.getURI()) : null, + servletConfig.getServletContext().getInitParameter(ORCID.clientID.getURI()) != null ? servletConfig.getServletContext().getInitParameter(ORCID.clientID.getURI()) : null, + servletConfig.getServletContext().getInitParameter(ORCID.clientSecret.getURI()) != null ? servletConfig.getServletContext().getInitParameter(ORCID.clientSecret.getURI()) : null ); URI contextDatasetURI = servletConfig.getServletContext().getInitParameter(LDHC.contextDataset.getURI()) != null ? new URI(servletConfig.getServletContext().getInitParameter(LDHC.contextDataset.getURI())) : null; @@ -404,6 +409,8 @@ public Application(@Context ServletConfig servletConfig) throws URISyntaxExcepti * @param smtpPort port of the SMTP email server * @param googleClientID client ID for Google's OAuth * @param googleClientSecret client secret for Google's OAuth + * @param orcidClientID client ID for ORCID's OAuth + * @param orcidClientSecret client secret for ORCID's OAuth */ public Application(final ServletConfig servletConfig, final MediaTypes mediaTypes, final Integer maxGetRequestSize, final boolean cacheModelLoads, final boolean preemptiveAuth, @@ -419,7 +426,8 @@ public Application(final ServletConfig servletConfig, final MediaTypes mediaType final Integer maxConnPerRoute, final Integer maxTotalConn, final Integer maxRequestRetries, final Integer maxImportThreads, final String notificationAddressString, final String supportedLanguageCodes, final boolean enableWebIDSignUp, final String oidcRefreshTokensPropertiesPath, final String mailUser, final String mailPassword, final String smtpHost, final String smtpPort, - final String googleClientID, final String googleClientSecret) + final String googleClientID, final String googleClientSecret, + final String orcidClientID, final String orcidClientSecret) { if (clientKeyStoreURIString == null) { @@ -544,7 +552,9 @@ public Application(final ServletConfig servletConfig, final MediaTypes mediaType this.oidcRefreshTokens = new Properties(); if (googleClientID != null) this.property(Google.clientID.getURI(), googleClientID); if (googleClientSecret != null) this.property(Google.clientSecret.getURI(), googleClientSecret); - + if (orcidClientID != null) this.property(ORCID.clientID.getURI(), orcidClientID); + if (orcidClientSecret != null) this.property(ORCID.clientSecret.getURI(), orcidClientSecret); + try { this.uploadRoot = new URI(uploadRootString); @@ -594,6 +604,19 @@ public Application(final ServletConfig servletConfig, final MediaTypes mediaType } else notificationAddress = null; + try + { + javax.xml.parsers.SAXParserFactory factory = javax.xml.parsers.SAXParserFactory.newInstance(); + javax.xml.parsers.SAXParser parser = factory.newSAXParser(); + org.xml.sax.XMLReader reader = parser.getXMLReader(); + if (log.isDebugEnabled()) log.debug("SAXParserFactory class: {}", factory.getClass().getName()); + if (log.isDebugEnabled()) log.debug("XMLReader class: {}", reader.getClass().getName()); + } + catch (ParserConfigurationException | SAXException e) + { + if (log.isErrorEnabled()) log.error("Failed to get XML parser info", e); + } + // add RDF/POST reader RDFLanguages.register(RDFLanguages.RDFPOST); RDFParserRegistry.registerLangTriples(RDFLanguages.RDFPOST, new RDFPostReaderFactory()); @@ -664,8 +687,8 @@ public Application(final ServletConfig servletConfig, final MediaTypes mediaType if (proxyHostname != null) { - ClientRequestFilter rewriteFilter = new ClientUriRewriteFilter(baseURI, proxyScheme, proxyHostname, proxyPort); // proxyPort can be null - + ClientRequestFilter rewriteFilter = new ClientUriRewriteFilter(baseURI.getHost(), proxyScheme, proxyHostname, proxyPort); // proxyPort can be null + client.register(rewriteFilter); externalClient.register(rewriteFilter); importClient.register(rewriteFilter); @@ -771,7 +794,7 @@ protected PasswordAuthentication getPasswordAuthentication() xsltProc.getUnderlyingConfiguration().getGlobalDocumentPool().add(doc, baseURI.resolve(XSLTWriterBase.TRANSLATIONS_PATH).toString()); } } - catch (XPathException | TransformerException ex) + catch (TransformerException ex) { if (log.isErrorEnabled()) log.error("Error reading mapped RDF document: {}", ex); throw new IllegalStateException(ex); @@ -902,7 +925,16 @@ protected void configure() @Override protected void configure() { - bindFactory(ApplicationFactory.class).to(com.atomgraph.linkeddatahub.apps.model.Application.class). + bindFactory(ApplicationFactory.class).to(new TypeLiteral>() {}). + in(RequestScoped.class); + } + }); + register(new AbstractBinder() + { + @Override + protected void configure() + { + bindFactory(com.atomgraph.linkeddatahub.server.factory.UnwrappedApplicationFactory.class).to(com.atomgraph.linkeddatahub.apps.model.Application.class). in(RequestScoped.class); } }); @@ -969,6 +1001,24 @@ protected void configure() protected void registerResourceClasses() { register(Dispatcher.class); + + // Conditionally register Google OAuth endpoints if configured + if (getProperty(com.atomgraph.linkeddatahub.vocabulary.Google.clientID.getURI()) != null && + getProperty(com.atomgraph.linkeddatahub.vocabulary.Google.clientSecret.getURI()) != null) + { + register(com.atomgraph.linkeddatahub.resource.oauth2.google.Authorize.class); + register(com.atomgraph.linkeddatahub.resource.oauth2.google.Login.class); + if (log.isDebugEnabled()) log.debug("Google OAuth endpoints registered"); + } + + // Conditionally register ORCID OAuth endpoints if configured + if (getProperty(com.atomgraph.linkeddatahub.vocabulary.ORCID.clientID.getURI()) != null && + getProperty(com.atomgraph.linkeddatahub.vocabulary.ORCID.clientSecret.getURI()) != null) + { + register(com.atomgraph.linkeddatahub.resource.oauth2.orcid.Authorize.class); + register(com.atomgraph.linkeddatahub.resource.oauth2.orcid.Login.class); + if (log.isDebugEnabled()) log.debug("ORCID OAuth endpoints registered"); + } } /** @@ -980,10 +1030,25 @@ protected void registerContainerRequestFilters() register(ApplicationFilter.class); register(OntologyFilter.class); register(ProxiedWebIDFilter.class); - register(IDTokenFilter.class); register(AuthorizationFilter.class); if (getMaxContentLength() != null) register(new ContentLengthLimitFilter(getMaxContentLength())); register(new RDFPostMediaTypeInterceptor()); // for application/x-www-form-urlencoded + + // Conditionally register Google OAuth filter if configured + if (getProperty(com.atomgraph.linkeddatahub.vocabulary.Google.clientID.getURI()) != null && + getProperty(com.atomgraph.linkeddatahub.vocabulary.Google.clientSecret.getURI()) != null) + { + register(com.atomgraph.linkeddatahub.server.filter.request.auth.google.IDTokenFilter.class); + if (log.isDebugEnabled()) log.debug("Google OAuth filter registered"); + } + + // Conditionally register ORCID OAuth filter if configured + if (getProperty(com.atomgraph.linkeddatahub.vocabulary.ORCID.clientID.getURI()) != null && + getProperty(com.atomgraph.linkeddatahub.vocabulary.ORCID.clientSecret.getURI()) != null) + { + register(com.atomgraph.linkeddatahub.server.filter.request.auth.orcid.IDTokenFilter.class); + if (log.isDebugEnabled()) log.debug("ORCID OAuth filter registered"); + } } /** @@ -991,9 +1056,10 @@ protected void registerContainerRequestFilters() */ protected void registerContainerResponseFilters() { + register(new CORSFilter()); register(new ResponseHeadersFilter()); register(new XsltExecutableFilter()); - if (isInvalidateCache()) register(new BackendInvalidationFilter()); + if (isInvalidateCache()) register(new CacheInvalidationFilter()); // register(new ProvenanceFilter()); } @@ -1026,6 +1092,7 @@ protected void registerExceptionMappers() register(ResourceExistsExceptionMapper.class); register(QueryParseExceptionMapper.class); register(AuthenticationExceptionMapper.class); + register(ForbiddenExceptionMapper.class); register(AuthorizationExceptionMapper.class); register(MessagingExceptionMapper.class); } @@ -1168,27 +1235,12 @@ public void handleAuthorizationCreated(AuthorizationCreated event) throws Messag /** * Matches application by type and request URL. * - * @param type app type - * @param absolutePath request URL without the query string - * @return app resource or null, if none matched - */ - public Resource matchApp(Resource type, URI absolutePath) - { - return matchApp(getContextModel(), type, absolutePath); // make sure we return an immutable model - } - - /** - * Matches application by type and request URL in a given application model. - * It finds the apps where request URL is relative to the app base URI, and returns the one with the longest match. - * - * @param appModel application model - * @param type application type * @param absolutePath request URL without the query string * @return app resource or null, if none matched */ - public Resource matchApp(Model appModel, Resource type, URI absolutePath) + public Resource matchApp(URI absolutePath) { - return getLongestURIResource(getLengthMap(getRelativeBaseApps(appModel, type, absolutePath))); + return getAppByOrigin(getContextModel(), LAPP.Application, absolutePath); // make sure we return an immutable model } /** @@ -1207,35 +1259,63 @@ public Resource getLongestURIResource(Map lengthMap) } /** - * Builds a base URI to application resource map from the application model. + * Normalizes a URI origin by adding explicit default ports (80 for HTTP, 443 for HTTPS). + * An origin consists of scheme, hostname, and port. + * This allows comparing origins with implicit and explicit default ports. + * + * @param uri the URI to normalize + * @return normalized origin string in format "scheme://host:port" + * @see Origin - MDN Web Docs + */ + public static String normalizeOrigin(URI uri) + { + if (uri == null) throw new IllegalArgumentException("URI cannot be null"); + + String scheme = uri.getScheme(); + String host = uri.getHost(); + int port = uri.getPort(); + + if (port == -1) + { + if ("https".equals(scheme)) port = 443; + else if ("http".equals(scheme)) port = 80; + } + + return scheme + "://" + host + ":" + port; + } + + /** + * Finds application by origin matching from the application model. * Applications are filtered by type first. - * + * * @param model application model * @param type application type * @param absolutePath request URL (without the query string) - * @return URI to app map + * @return app resource or null if no match found */ - public Map getRelativeBaseApps(Model model, Resource type, URI absolutePath) + public Resource getAppByOrigin(Model model, Resource type, URI absolutePath) { if (model == null) throw new IllegalArgumentException("Model cannot be null"); if (type == null) throw new IllegalArgumentException("Resource cannot be null"); if (absolutePath == null) throw new IllegalArgumentException("URI cannot be null"); - Map apps = new HashMap<>(); - + String requestOrigin = normalizeOrigin(absolutePath); + ResIterator it = model.listSubjectsWithProperty(RDF.type, type); try { while (it.hasNext()) { Resource app = it.next(); - - if (!app.hasProperty(LDT.base)) - throw new InternalServerErrorException(new IllegalStateException("Application resource <" + app.getURI() + "> has no ldt:base value")); - - URI base = URI.create(app.getPropertyResourceValue(LDT.base).getURI()); - URI relative = base.relativize(absolutePath); - if (!relative.isAbsolute()) apps.put(base, app); + + // Use origin-based matching - return immediately on match since origins are unique + if (app.hasProperty(LDH.origin)) + { + URI appOriginURI = URI.create(app.getPropertyResourceValue(LDH.origin).getURI()); + String normalizedAppOrigin = normalizeOrigin(appOriginURI); + + if (requestOrigin.equals(normalizedAppOrigin)) return app; + } } } finally @@ -1243,7 +1323,7 @@ public Map getRelativeBaseApps(Model model, Resource type, URI ab it.close(); } - return apps; + return null; } /** @@ -1996,18 +2076,29 @@ public ExpiringMap getWebIDModelCache() /** * A map of cached OpenID connect agent graphs. * User ID (ID token subject) is the cache key. Entries expire after the configured period of time. - * + * * @return URI to model map */ public ExpiringMap getOIDCModelCache() { return oidcModelCache; } - + + /** + * A map of cached JWKS (JSON Web Key Set) responses for JWT verification. + * JWKS endpoint URI is the cache key. Entries expire after 1 day. + * + * @return JWKS endpoint to JsonObject map + */ + public ExpiringMap getJWKSCache() + { + return jwksCache; + } + /** * A map of cached (compiled) XSLT stylesheets. * Stylesheet URI is the cache key. - * + * * @return URI to stylesheet map */ public Map getXsltExecutableCache() diff --git a/src/main/java/com/atomgraph/linkeddatahub/apps/model/AdminApplication.java b/src/main/java/com/atomgraph/linkeddatahub/apps/model/AdminApplication.java index e265ec730..075c579a3 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/apps/model/AdminApplication.java +++ b/src/main/java/com/atomgraph/linkeddatahub/apps/model/AdminApplication.java @@ -28,15 +28,15 @@ public interface AdminApplication extends Application /** * The relative path at which the request access endpoint is located. * TO-DO: extract from JAX-RS @Path annotation? - * - * @see com.atomgraph.linkeddatahub.resource.admin.RequestAccess + * + * @see com.atomgraph.linkeddatahub.resource.acl.AccessRequest */ public static final String REQUEST_ACCESS_PATH = "access/request"; - + /** * The relative path of the authorization request container. - * - * @see com.atomgraph.linkeddatahub.resource.admin.RequestAccess + * + * @see com.atomgraph.linkeddatahub.resource.acl.AccessRequest */ public static final String AUTHORIZATION_REQUEST_PATH = "acl/authorization-requests/"; diff --git a/src/main/java/com/atomgraph/linkeddatahub/apps/model/Application.java b/src/main/java/com/atomgraph/linkeddatahub/apps/model/Application.java index eeb505f5d..699066916 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/apps/model/Application.java +++ b/src/main/java/com/atomgraph/linkeddatahub/apps/model/Application.java @@ -17,6 +17,7 @@ package com.atomgraph.linkeddatahub.apps.model; import com.atomgraph.linkeddatahub.model.Service; +import jakarta.ws.rs.core.UriBuilder; import java.net.URI; import org.apache.jena.rdf.model.Resource; @@ -57,14 +58,35 @@ public interface Application extends Resource, com.atomgraph.core.model.Applicat /** * Returns the application's base URI. - * + * * @return URI of the base resource */ URI getBaseURI(); + + /** + * Returns the application's origin resource. + * + * @return origin resource + */ + Resource getOrigin(); + + /** + * Returns URI builder initialized with the applications origin URI. + * + * @return URI builder + */ + UriBuilder getUriBuilder(); + /** + * Returns the application's origin URI. + * + * @return URI of the origin resource + */ + URI getOriginURI(); + /** * Returns applications service. - * + * * @return service resource */ @Override diff --git a/src/main/java/com/atomgraph/linkeddatahub/apps/model/impl/ApplicationImpl.java b/src/main/java/com/atomgraph/linkeddatahub/apps/model/impl/ApplicationImpl.java index 7c2bbfc66..4a0c956f8 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/apps/model/impl/ApplicationImpl.java +++ b/src/main/java/com/atomgraph/linkeddatahub/apps/model/impl/ApplicationImpl.java @@ -21,7 +21,9 @@ import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.vocabulary.FOAF; import com.atomgraph.linkeddatahub.vocabulary.LAPP; +import com.atomgraph.linkeddatahub.vocabulary.LDH; import com.atomgraph.server.vocabulary.LDT; +import jakarta.ws.rs.core.UriBuilder; import org.apache.jena.enhanced.EnhGraph; import org.apache.jena.graph.Node; import org.apache.jena.rdf.model.Resource; @@ -55,14 +57,26 @@ public ApplicationImpl(Node n, EnhGraph g) @Override public Resource getBase() { - return getPropertyResourceValue(LDT.base); + return getModel().createResource(getOriginURI().resolve("/").toString()); } @Override public URI getBaseURI() { - if (getBase() != null) return URI.create(getBase().getURI()); - + return getOriginURI().resolve("/"); + } + + @Override + public Resource getOrigin() + { + return getPropertyResourceValue(LDH.origin); + } + + @Override + public URI getOriginURI() + { + if (getOrigin() != null) return URI.create(getOrigin().getURI()); + return null; } @@ -110,4 +124,9 @@ public boolean isReadAllowed() return false; } + @Override + public UriBuilder getUriBuilder() + { + return UriBuilder.fromUri(getOriginURI()); + } } diff --git a/src/main/java/com/atomgraph/linkeddatahub/client/filter/ClientUriRewriteFilter.java b/src/main/java/com/atomgraph/linkeddatahub/client/filter/ClientUriRewriteFilter.java index db62d4dea..9bd4f71ee 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/client/filter/ClientUriRewriteFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/client/filter/ClientUriRewriteFilter.java @@ -20,13 +20,15 @@ import java.net.URI; import jakarta.ws.rs.client.ClientRequestContext; import jakarta.ws.rs.client.ClientRequestFilter; +import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.UriBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Client request filter that rewrites the target URL using a proxy URL. - * + * Client request filter that rewrites target URLs matching the configured host to internal proxy URLs. + * This improves performance by routing internal requests through the Docker network instead of external network. + * * @author {@literal Martynas Jusevičius } */ public class ClientUriRewriteFilter implements ClientRequestFilter @@ -34,79 +36,85 @@ public class ClientUriRewriteFilter implements ClientRequestFilter private static final Logger log = LoggerFactory.getLogger(ClientUriRewriteFilter.class); - private final URI baseURI; - private final String scheme, hostname; - private final Integer port; + private final String host; + private final String proxyScheme, proxyHost; + private final Integer proxyPort; /** * Constructs filter from URI components. - * - * @param baseURI base URI - * @param scheme new scheme - * @param hostname new hostname - * @param port new port number + * + * @param host external hostname to match, including subdomains (e.g., "localhost", "linkeddatahub.com") + * @param proxyScheme proxy scheme to rewrite to (e.g., "http") + * @param proxyHost proxy hostname to rewrite to (e.g., "nginx") + * @param proxyPort proxy port to rewrite to (e.g., 9443) */ - public ClientUriRewriteFilter(URI baseURI, String scheme, String hostname, Integer port) + public ClientUriRewriteFilter(String host, String proxyScheme, String proxyHost, Integer proxyPort) { - this.baseURI = baseURI; - this.scheme = scheme; - this.hostname = hostname; - this.port = port; + this.host = host; + this.proxyScheme = proxyScheme; + this.proxyHost = proxyHost; + this.proxyPort = proxyPort; } @Override public void filter(ClientRequestContext cr) throws IOException { - if (getBaseURI().relativize(cr.getUri()).isAbsolute()) return; // don't rewrite URIs that are not relative to the base URI (e.g. SPARQL Protocol URLs) + // Only rewrite requests to our own host (or subdomains), not external URLs + if (!cr.getUri().getHost().equals(getHost()) && !cr.getUri().getHost().endsWith("." + getHost())) return; + + // Preserve original host for nginx routing + String originalHost = cr.getUri().getHost(); + if (cr.getUri().getPort() != -1) originalHost += ":" + cr.getUri().getPort(); + cr.getHeaders().putSingle(HttpHeaders.HOST, originalHost); String newScheme = cr.getUri().getScheme(); - if (getScheme() != null) newScheme = getScheme(); + if (getProxyScheme() != null) newScheme = getProxyScheme(); // cannot use the URI class because query string with special chars such as '+' gets decoded - URI newUri = UriBuilder.fromUri(cr.getUri()).scheme(newScheme).host(getHostname()).port(getPort()).build(); + URI newUri = UriBuilder.fromUri(cr.getUri()).scheme(newScheme).host(getProxyHost()).port(getProxyPort()).build(); if (log.isDebugEnabled()) log.debug("Rewriting client request URI from '{}' to '{}'", cr.getUri(), newUri); cr.setUri(newUri); } - + /** - * Base URI of the application - * - * @return base URI + * External hostname to match (including subdomains). + * + * @return hostname string (e.g., "localhost", "linkeddatahub.com") */ - public URI getBaseURI() + public String getHost() { - return baseURI; + return host; } - + /** - * Scheme component of the new (rewritten) URI. - * - * @return scheme string or null + * Proxy scheme to rewrite to. + * + * @return scheme string or null (e.g., "http") */ - public String getScheme() + public String getProxyScheme() { - return scheme; + return proxyScheme; } - + /** - * Hostname component of the new (rewritten) URI. - * - * @return hostname string + * Proxy hostname to rewrite to. + * + * @return hostname string (e.g., "nginx") */ - public String getHostname() + public String getProxyHost() { - return hostname; + return proxyHost; } /** - * Port component of the new (rewritten) URI. - * - * @return port number + * Proxy port to rewrite to. + * + * @return port number (e.g., 9443) */ - public Integer getPort() + public Integer getProxyPort() { - return port; + return proxyPort; } } diff --git a/src/main/java/com/atomgraph/linkeddatahub/client/filter/JSONGRDDLFilter.java b/src/main/java/com/atomgraph/linkeddatahub/client/filter/JSONGRDDLFilter.java index c8dcf46bc..5dfe2e790 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/client/filter/JSONGRDDLFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/client/filter/JSONGRDDLFilter.java @@ -145,7 +145,7 @@ public void filter(ClientRequestContext requestContext, ClientResponseContext re * @param requestURI original request URI for context * @return RDF/XML as string * @throws SaxonApiException if Saxon processing fails - * @throws java.io.IOException + * @throws java.io.IOException if I/O error occurs */ protected String transformJSONToRDF(String jsonContent, URI requestURI) throws SaxonApiException, IOException { diff --git a/src/main/java/com/atomgraph/linkeddatahub/client/filter/auth/IDTokenDelegationFilter.java b/src/main/java/com/atomgraph/linkeddatahub/client/filter/auth/IDTokenDelegationFilter.java index 42c69bc0e..b7428fda2 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/client/filter/auth/IDTokenDelegationFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/client/filter/auth/IDTokenDelegationFilter.java @@ -21,7 +21,7 @@ import jakarta.ws.rs.client.ClientRequestFilter; import jakarta.ws.rs.core.Cookie; import jakarta.ws.rs.core.HttpHeaders; -import com.atomgraph.linkeddatahub.server.filter.request.auth.IDTokenFilter; +import com.atomgraph.linkeddatahub.server.filter.request.auth.google.IDTokenFilter; import com.atomgraph.linkeddatahub.server.filter.request.auth.WebIDFilter; import org.apache.jena.rdf.model.Resource; diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java index d4692f6fe..0d7704308 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java @@ -21,7 +21,7 @@ import com.atomgraph.linkeddatahub.client.LinkedDataClient; import com.atomgraph.linkeddatahub.imports.QueryLoader; import com.atomgraph.linkeddatahub.model.Service; -import com.atomgraph.linkeddatahub.server.filter.response.BackendInvalidationFilter; +import com.atomgraph.linkeddatahub.server.filter.response.CacheInvalidationFilter; import com.atomgraph.linkeddatahub.server.model.impl.GraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.util.Skolemizer; @@ -237,7 +237,7 @@ public Response ban(Resource proxy, String url) if (url == null) throw new IllegalArgumentException("Resource cannot be null"); return getSystem().getClient().target(proxy.getURI()).request(). - header(BackendInvalidationFilter.HEADER_NAME, UriComponent.encode(url, UriComponent.Type.UNRESERVED)). // the value has to be URL-encoded in order to match request URLs in Varnish + header(CacheInvalidationFilter.HEADER_NAME, UriComponent.encode(url, UriComponent.Type.UNRESERVED)). // the value has to be URL-encoded in order to match request URLs in Varnish method("BAN", Response.class); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Namespace.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Namespace.java index 1efa29e00..7f86014e9 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Namespace.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Namespace.java @@ -76,7 +76,7 @@ public class Namespace extends com.atomgraph.core.model.impl.SPARQLEndpointImpl /** * Constructs endpoint from the in-memory ontology model. - * + * * @param request current request * @param uriInfo current request's URI info * @param application current end-user application @@ -86,7 +86,7 @@ public class Namespace extends com.atomgraph.core.model.impl.SPARQLEndpointImpl * @param system system application */ @Inject - public Namespace(@Context Request request, @Context UriInfo uriInfo, + public Namespace(@Context Request request, @Context UriInfo uriInfo, Application application, Optional ontology, MediaTypes mediaTypes, @Context SecurityContext securityContext, com.atomgraph.linkeddatahub.Application system) { @@ -98,6 +98,22 @@ public Namespace(@Context Request request, @Context UriInfo uriInfo, this.system = system; } + /** + * If SPARQL query is provided, returns its result over the in-memory namespace ontology graph. + * If query is not provided + *
    + *
  • returns constructed instance if forClass URL param value (ontology class URI) is provided
  • + *
  • otherwise, returns the namespace ontology graph (which is standalone, i.e. not the full ontology imports closure)
  • + *
+ * + * @param query SPARQL query string (optional) + * @param defaultGraphUris default graph URI (ignored) + * @param namedGraphUris named graph URIs (ignored) + * + * {@link com.atomgraph.linkeddatahub.server.model.impl.Dispatcher#getNamespace()} + * + * @return response + */ @Override @GET public Response get(@QueryParam(QUERY) Query query, @@ -122,11 +138,11 @@ public Response get(@QueryParam(QUERY) Query query, if (getApplication().canAs(EndUserApplication.class)) { - String ontologyURI = getURI().toString() + "#"; // TO-DO: hard-coding "#" is not great. Replace with RDF property lookup. + // the application ontology MUST use a URI! This is the URI this ontology endpoint is deployed on by the Dispatcher class + String ontologyURI = getApplication().getOntology().getURI(); if (log.isDebugEnabled()) log.debug("Returning namespace ontology from OntDocumentManager: {}", ontologyURI); // not returning the injected in-memory ontology because it has inferences applied to it - OntologyModelGetter modelGetter = new OntologyModelGetter(getApplication().as(EndUserApplication.class), - getSystem().getOntModelSpec(), getSystem().getOntologyQuery(), getSystem().getClient(), getSystem().getMediaTypes()); + OntologyModelGetter modelGetter = new OntologyModelGetter(getApplication().as(EndUserApplication.class), getSystem().getOntModelSpec(), getSystem().getOntologyQuery()); return getResponseBuilder(modelGetter.getModel(ontologyURI)).build(); } else throw new BadRequestException("SPARQL query string not provided"); diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java index 22e121f18..b49f85d87 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Transform.java @@ -27,8 +27,10 @@ import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.vocabulary.NFO; import com.atomgraph.spinrdf.vocabulary.SPIN; +import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; +import java.net.UnknownHostException; import java.util.Map; import java.util.Optional; import jakarta.inject.Inject; @@ -114,6 +116,10 @@ public Response post(Model model, @QueryParam("default") @DefaultValue("false") Resource queryRes = arg.getPropertyResourceValue(SPIN.query); if (queryRes == null) throw new BadRequestException("Transformation query string (spin:query) not provided"); + // LNK-002: Validate URIs to prevent SSRF attacks + validateNotInternalURL(URI.create(queryRes.getURI())); + validateNotInternalURL(URI.create(source.getURI())); + LinkedDataClient ldc = LinkedDataClient.create(getSystem().getClient(), getSystem().getMediaTypes()). delegation(getUriInfo().getBaseUri(), getAgentContext().orElse(null)); QueryLoader queryLoader = new QueryLoader(URI.create(queryRes.getURI()), getApplication().getBase().getURI(), Syntax.syntaxARQ, ldc); @@ -201,7 +207,10 @@ public Response postFileBodyPart(Model model, Map file Resource queryRes = file.getPropertyResourceValue(SPIN.query); if (queryRes == null) throw new BadRequestException("Transformation query string (spin:query) not provided"); - + + // LNK-002: Validate query URI to prevent SSRF attacks + validateNotInternalURL(URI.create(queryRes.getURI())); + LinkedDataClient ldc = LinkedDataClient.create(getSystem().getClient(), getSystem().getMediaTypes()). delegation(getUriInfo().getBaseUri(), getAgentContext().orElse(null)); QueryLoader queryLoader = new QueryLoader(URI.create(queryRes.getURI()), getApplication().getBase().getURI(), Syntax.syntaxARQ, ldc); @@ -224,7 +233,7 @@ public Response postFileBodyPart(Model model, Map file /** * Forwards POST request to a graph. - * + * * @param entity request entity * @param graphURI the graph URI * @return JAX-RS response @@ -241,5 +250,41 @@ protected Response forwardPost(Entity entity, String graphURI) build(); } } - + + /** + * Validates that the given URI does not point to an internal/private network address. + * Prevents SSRF attacks by blocking access to RFC 1918 private addresses and link-local addresses. + * + * @param uri the URI to validate + * @throws IllegalArgumentException if URI or host is null + * @throws BadRequestException if the URI resolves to an internal address + * @see LNK-002: SSRF primitives in admin endpoint + */ + protected static void validateNotInternalURL(URI uri) + { + if (uri == null) throw new IllegalArgumentException("URI cannot be null"); + + String host = uri.getHost(); + if (host == null) throw new IllegalArgumentException("URI host cannot be null"); + + // Resolve hostname to IP and check if it's private/internal + try + { + InetAddress address = InetAddress.getByName(host); + + // Note: We don't block loopback addresses (127.0.0.1, localhost) because transformation queries + // and data sources may legitimately reference resources on the same server + + if (address.isLinkLocalAddress()) + throw new BadRequestException("URI cannot resolve to link-local addresses: " + address.getHostAddress()); + if (address.isSiteLocalAddress()) + throw new BadRequestException("URI cannot resolve to private addresses (RFC 1918): " + address.getHostAddress()); + } + catch (UnknownHostException e) + { + if (log.isWarnEnabled()) log.warn("Could not resolve hostname for SSRF validation: {}", host); + // Allow request to proceed - will fail later with better error message + } + } + } diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/acl/Access.java b/src/main/java/com/atomgraph/linkeddatahub/resource/acl/Access.java index f72a85376..021523308 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/acl/Access.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/acl/Access.java @@ -16,18 +16,21 @@ */ package com.atomgraph.linkeddatahub.resource.acl; +import com.atomgraph.client.util.HTMLMediaTypePredicate; import com.atomgraph.core.MediaTypes; import static com.atomgraph.core.model.SPARQLEndpoint.DEFAULT_GRAPH_URI; import static com.atomgraph.core.model.SPARQLEndpoint.NAMED_GRAPH_URI; import static com.atomgraph.core.model.SPARQLEndpoint.QUERY; +import com.atomgraph.core.util.ModelUtils; import com.atomgraph.linkeddatahub.apps.model.AdminApplication; import com.atomgraph.linkeddatahub.apps.model.Application; +import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.model.auth.Agent; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.util.AuthorizationParams; -import com.atomgraph.linkeddatahub.server.util.SetResultSetValues; import com.atomgraph.linkeddatahub.vocabulary.ACL; +import com.atomgraph.linkeddatahub.vocabulary.FOAF; import com.atomgraph.linkeddatahub.vocabulary.LACL; import com.atomgraph.spinrdf.vocabulary.SPIN; import jakarta.inject.Inject; @@ -35,21 +38,21 @@ import jakarta.ws.rs.GET; import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.EntityTag; import jakarta.ws.rs.core.Request; import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.SecurityContext; import jakarta.ws.rs.core.UriInfo; import java.net.URI; import java.net.URISyntaxException; import java.util.List; import java.util.Optional; -import org.apache.jena.ontology.Ontology; import org.apache.jena.query.ParameterizedSparqlString; import org.apache.jena.query.Query; import org.apache.jena.query.QuerySolutionMap; import org.apache.jena.query.ResultSetRewindable; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.rdf.model.ResIterator; import org.apache.jena.rdf.model.Resource; import org.apache.jena.rdf.model.ResourceFactory; import org.apache.jena.vocabulary.RDF; @@ -61,13 +64,15 @@ * * @author Martynas Jusevičius {@literal } */ -public class Access extends com.atomgraph.core.model.impl.SPARQLEndpointImpl +public class Access { private static final Logger log = LoggerFactory.getLogger(Access.class); + private final Request request; private final UriInfo uriInfo; - private final Application application; + private final MediaTypes mediaTypes; + private final EndUserApplication application; private final Optional agentContext; private final ParameterizedSparqlString documentTypeQuery, documentOwnerQuery, aclQuery, ownerAclQuery; @@ -76,43 +81,42 @@ public class Access extends com.atomgraph.core.model.impl.SPARQLEndpointImpl * * @param request current request * @param uriInfo current request's URI info + * @param mediaTypes supported media types * @param application current end-user application - * @param ontology application's ontology - * @param mediaTypes registry of readable/writable media types - * @param securityContext JAX-RS security context * @param agentContext agent context * @param system system application */ @Inject - public Access(@Context Request request, @Context UriInfo uriInfo, - Application application, Optional ontology, MediaTypes mediaTypes, - @Context SecurityContext securityContext, Optional agentContext, + public Access(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, + Application application, Optional agentContext, com.atomgraph.linkeddatahub.Application system) { - super(request, application.getService(), mediaTypes); - if (!application.canAs(AdminApplication.class)) throw new IllegalStateException("The " + getClass() + " endpoint is only available on admin applications"); - + if (!application.canAs(EndUserApplication.class)) throw new IllegalStateException("The " + getClass() + " endpoint is only available on end-user applications"); + this.request = request; this.uriInfo = uriInfo; - this.application = application; + this.mediaTypes = mediaTypes; + this.application = application.as(EndUserApplication.class); this.agentContext = agentContext; documentTypeQuery = new ParameterizedSparqlString(system.getDocumentTypeQuery().toString()); documentOwnerQuery = new ParameterizedSparqlString(system.getDocumentOwnerQuery().toString()); aclQuery = new ParameterizedSparqlString(system.getACLQuery().toString()); ownerAclQuery = new ParameterizedSparqlString(system.getOwnerACLQuery().toString()); } - - @Override + + /** + * Implements the HTTP GET method for retrieving access control information. + * + * @param unused SPARQL query parameter (unused) + * @param defaultGraphUris default graph URIs + * @param namedGraphUris named graph URIs + * @return response with access control data + */ @GET public Response get(@QueryParam(QUERY) Query unused, @QueryParam(DEFAULT_GRAPH_URI) List defaultGraphUris, @QueryParam(NAMED_GRAPH_URI) List namedGraphUris) { final Agent agent = getAgentContext().map(AgentContext::getAgent).orElse(null); -// final Agent agent = ModelFactory.createDefaultModel(). -// createResource(getUriInfo().getQueryParameters().getFirst("agent")). -// addProperty(RDF.type, FOAF.Agent). -// as(Agent.class); - - //final ParameterizedSparqlString pss = getApplication().canAs(EndUserApplication.class) ? getACLQuery() : getOwnerACLQuery(); + try { if (!getUriInfo().getQueryParameters().containsKey(SPIN.THIS_VAR_NAME)) throw new BadRequestException("?this query param is not provided"); @@ -129,18 +133,28 @@ public Response get(@QueryParam(QUERY) Query unused, try { final ParameterizedSparqlString authPss = getACLQuery(); - authPss.setParams(new AuthorizationParams(getApplication().getBase(), accessTo, agent).get()); - Query authQuery = new SetResultSetValues().apply(authPss.asQuery(), docTypesResult); - assert authQuery.toString().contains("VALUES"); + authPss.setParams(new AuthorizationParams(getApplication().getAdminApplication().getBase(), accessTo, agent).get()); + + Model authModel = getApplication().getAdminApplication().getService().getSPARQLClient().loadModel(authPss.asQuery()); + + // filter out authorizations with acl:accessToClass foaf:Agent - all agents already have that access + ResIterator agentClassIter = authModel.listSubjectsWithProperty(ACL.agentClass, FOAF.Agent); + try + { + agentClassIter.toList().forEach((auth) -> authModel.removeAll(auth, null, null)); + } + finally + { + agentClassIter.close(); + } - Model authModel = getEndpointAccessor().loadModel(authQuery, List.of(), List.of()); // special case where the agent is the owner of the requested document - automatically grant acl:Read/acl:Append/acl:Write access if (isOwner(accessTo, agent)) { log.debug("Agent <{}> is the owner of <{}>, granting acl:Read/acl:Append/acl:Write access", agent, accessTo); authModel.add(createOwnerAuthorization(accessTo, agent).getModel()); } - + return getResponseBuilder(authModel).build(); } finally @@ -216,12 +230,41 @@ protected Service getEndUserService() getApplication().getService(); } + /** + * Returns response builder for the given RDF model. + * + * @param model RDF model + * @return response builder + */ + public Response.ResponseBuilder getResponseBuilder(Model model) + { + return new com.atomgraph.core.model.impl.Response(getRequest(), + model, + null, + new EntityTag(Long.toHexString(ModelUtils.hashModel(model))), + getMediaTypes().getWritable(Model.class), + null, + null, + new HTMLMediaTypePredicate()). + getResponseBuilder(); + } + + /** + * Returns the current request. + * + * @return request object + */ + public Request getRequest() + { + return request; + } + /** * Returns the current application. * * @return application resource */ - public Application getApplication() + public EndUserApplication getApplication() { return application; } @@ -235,6 +278,16 @@ public UriInfo getUriInfo() { return uriInfo; } + + /** + * Returns media types registry. + * + * @return media types + */ + public MediaTypes getMediaTypes() + { + return mediaTypes; + } /** * Returns agent context. diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/acl/AccessRequest.java b/src/main/java/com/atomgraph/linkeddatahub/resource/acl/AccessRequest.java index e983ed17c..8e2a60699 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/acl/AccessRequest.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/acl/AccessRequest.java @@ -16,13 +16,10 @@ */ package com.atomgraph.linkeddatahub.resource.acl; -import com.atomgraph.core.MediaTypes; import com.atomgraph.core.exception.ConfigurationException; -import com.atomgraph.linkeddatahub.apps.model.AdminApplication; import static com.atomgraph.linkeddatahub.apps.model.AdminApplication.AUTHORIZATION_REQUEST_PATH; -import com.atomgraph.linkeddatahub.model.Service; +import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; import com.atomgraph.linkeddatahub.model.auth.Agent; -import com.atomgraph.linkeddatahub.server.model.impl.GraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.util.Skolemizer; import com.atomgraph.linkeddatahub.vocabulary.ACL; @@ -40,17 +37,12 @@ import jakarta.ws.rs.POST; import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.core.Context; -import jakarta.ws.rs.core.Request; import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.SecurityContext; import jakarta.ws.rs.core.UriBuilder; -import jakarta.ws.rs.core.UriInfo; -import jakarta.ws.rs.ext.Providers; import java.net.URI; import java.util.GregorianCalendar; import java.util.Optional; import java.util.UUID; -import org.apache.jena.ontology.Ontology; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; import org.apache.jena.rdf.model.ResIterator; @@ -67,11 +59,13 @@ * * @author Martynas Jusevičius {@literal } */ -public class AccessRequest extends GraphStoreImpl +public class AccessRequest { private static final Logger log = LoggerFactory.getLogger(AccessRequest.class); + private final EndUserApplication application; + private final Optional agentContext; private final String emailSubject; private final String emailText; private final UriBuilder authRequestContainerUriBuilder; @@ -79,30 +73,21 @@ public class AccessRequest extends GraphStoreImpl /** * Constructs an AccessRequest resource handler. * - * @param request HTTP request context - * @param uriInfo URI information context - * @param mediaTypes supported media types * @param application current application - * @param ontology optional application ontology - * @param service optional SPARQL service - * @param securityContext security context * @param agentContext optional agent context - * @param providers JAX-RS providers * @param system system application * @param servletConfig servlet configuration */ @Inject - public AccessRequest(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, - com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional service, - @Context SecurityContext securityContext, Optional agentContext, - @Context Providers providers, com.atomgraph.linkeddatahub.Application system, @Context ServletConfig servletConfig) + public AccessRequest(com.atomgraph.linkeddatahub.apps.model.Application application, Optional agentContext, + com.atomgraph.linkeddatahub.Application system, @Context ServletConfig servletConfig) { - super(request, uriInfo, mediaTypes, application, ontology, service, securityContext, agentContext, providers, system); if (log.isDebugEnabled()) log.debug("Constructing {}", getClass()); - if (!application.canAs(AdminApplication.class)) throw new IllegalStateException("The " + getClass() + " endpoint is only available on admin applications"); - if (securityContext == null || !(securityContext.getUserPrincipal() instanceof Agent)) throw new IllegalStateException("Agent is not authenticated"); - - authRequestContainerUriBuilder = UriBuilder.fromUri(URI.create(application.getBase().toString())).path(AUTHORIZATION_REQUEST_PATH); + if (!application.canAs(EndUserApplication.class)) throw new IllegalStateException("The " + getClass() + " endpoint is only available on end-user applications"); + this.application = application.as(EndUserApplication.class); + this.agentContext = agentContext; + + authRequestContainerUriBuilder = this.application.getAdminApplication().getUriBuilder().path(AUTHORIZATION_REQUEST_PATH); emailSubject = servletConfig.getServletContext().getInitParameter(LDHC.requestAccessEMailSubject.getURI()); if (emailSubject == null) throw new InternalServerErrorException(new ConfigurationException(LDHC.requestAccessEMailSubject)); @@ -111,16 +96,29 @@ public AccessRequest(@Context Request request, @Context UriInfo uriInfo, MediaTy if (emailText == null) throw new InternalServerErrorException(new ConfigurationException(LDHC.requestAccessEMailText)); } - + + /** + * Implements the HTTP GET method. + * + * @param defaultGraph default graph flag + * @param graphUri graph URI + * @return response object + */ @GET - @Override public Response get(@QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) { throw new NotAllowedException("GET is not allowed on this endpoint"); } - + + /** + * Implements the HTTP POST method for submitting access requests. + * + * @param model RDF model with access request data + * @param defaultGraph default graph flag + * @param graphUri graph URI + * @return response object + */ @POST - @Override public Response post(Model model, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) { ResIterator it = model.listResourcesWithProperty(RDF.type, ACL.Authorization); @@ -134,7 +132,7 @@ public Response post(Model model, @QueryParam("default") @DefaultValue("false") Model requestModel = ModelFactory.createDefaultModel(); Resource agent = authorization.getPropertyResourceValue(ACL.agent); - if (!agent.equals(getAgentContext().get().getAgent())) throw new IllegalStateException("Agent requesting access must be authenticated"); + if (getAgentContext().isEmpty() || !agent.equals(getAgentContext().get().getAgent())) throw new IllegalStateException("Agent requesting access must be authenticated"); String humanReadableName = getAgentsHumanReadableName(getAgentContext().get().getAgent()); String accessRequestLabel = humanReadableName != null ? "Access request by " + humanReadableName : null; // TO-DO: localize the string @@ -182,11 +180,7 @@ public Response post(Model model, @QueryParam("default") @DefaultValue("false") new Skolemizer(graphUri.toString()).apply(requestModel); // store access request in the admin service - //getApplication().as(EndUserApplication.class).getAdminApplication().getService().getGraphStoreClient().add(graphUri.toASCIIString(), requestModel); - try (Response resp = super.post(requestModel, false, graphUri)) - { - resp.close(); - } // don't wrap into try-with-resources because that will close the Response + getApplication().getAdminApplication().getService().getGraphStoreClient().add(graphUri.toString(), requestModel); } return Response.ok().build(); @@ -212,6 +206,16 @@ public String getAgentsHumanReadableName(Agent agent) return null; } + + /** + * Returns the current application. + * + * @return end-user application + */ + public EndUserApplication getApplication() + { + return application; + } /** * Returns the URI builder for authorization requests. @@ -222,5 +226,15 @@ public UriBuilder getAuthRequestContainerUriBuilder() { return authRequestContainerUriBuilder.clone(); } + + /** + * Returns the agent context of the current request. + * + * @return optional agent context + */ + public Optional getAgentContext() + { + return agentContext; + } } diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/Clear.java b/src/main/java/com/atomgraph/linkeddatahub/resource/admin/Clear.java index afe779b8e..01a8b19c9 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/Clear.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/admin/Clear.java @@ -19,7 +19,7 @@ import com.atomgraph.linkeddatahub.apps.model.AdminApplication; import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; import static com.atomgraph.linkeddatahub.server.filter.request.OntologyFilter.addDocumentModel; -import com.atomgraph.linkeddatahub.server.filter.response.BackendInvalidationFilter; +import com.atomgraph.linkeddatahub.server.filter.response.CacheInvalidationFilter; import com.atomgraph.linkeddatahub.server.util.OntologyModelGetter; import java.net.URI; import jakarta.inject.Inject; @@ -101,8 +101,7 @@ public Response post(@FormParam("uri") String ontologyURI, @HeaderParam("Referer // !!! we need to reload the ontology model before returning a response, to make sure the next request already gets the new version !!! // same logic as in OntologyFilter. TO-DO: encapsulate? - OntologyModelGetter modelGetter = new OntologyModelGetter(app, - ontModelSpec, getSystem().getOntologyQuery(), getSystem().getNoCertClient(), getSystem().getMediaTypes()); + OntologyModelGetter modelGetter = new OntologyModelGetter(app, ontModelSpec, getSystem().getOntologyQuery()); ontModelSpec.setImportModelGetter(modelGetter); if (log.isDebugEnabled()) log.debug("Started loading ontology with URI '{}' from the admin dataset", ontologyURI); Model baseModel = modelGetter.getModel(ontologyURI); @@ -132,7 +131,7 @@ public Response ban(Resource proxy, String url) if (url == null) throw new IllegalArgumentException("Resource cannot be null"); return getSystem().getClient().target(proxy.getURI()).request(). - header(BackendInvalidationFilter.HEADER_NAME, UriComponent.encode(url, UriComponent.Type.UNRESERVED)). // the value has to be URL-encoded in order to match request URLs in Varnish + header(CacheInvalidationFilter.HEADER_NAME, UriComponent.encode(url, UriComponent.Type.UNRESERVED)). // the value has to be URL-encoded in order to match request URLs in Varnish method("BAN", Response.class); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/RequestAccess.java b/src/main/java/com/atomgraph/linkeddatahub/resource/admin/RequestAccess.java deleted file mode 100644 index e468485f3..000000000 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/RequestAccess.java +++ /dev/null @@ -1,255 +0,0 @@ -/** - * Copyright 2019 Martynas Jusevičius - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package com.atomgraph.linkeddatahub.resource.admin; - -import com.atomgraph.core.MediaTypes; -import com.atomgraph.core.exception.ConfigurationException; -import static com.atomgraph.linkeddatahub.apps.model.AdminApplication.AUTHORIZATION_REQUEST_PATH; -import com.atomgraph.linkeddatahub.client.LinkedDataClient; -import com.atomgraph.linkeddatahub.model.Service; -import com.atomgraph.linkeddatahub.listener.EMailListener; -import com.atomgraph.linkeddatahub.model.auth.Agent; -import com.atomgraph.linkeddatahub.server.model.impl.GraphStoreImpl; -import com.atomgraph.linkeddatahub.server.security.AgentContext; -import com.atomgraph.linkeddatahub.server.util.MessageBuilder; -import com.atomgraph.linkeddatahub.server.util.Skolemizer; -import com.atomgraph.linkeddatahub.vocabulary.LDHC; -import com.atomgraph.linkeddatahub.vocabulary.FOAF; -import com.atomgraph.linkeddatahub.vocabulary.LACL; -import java.io.UnsupportedEncodingException; -import java.net.URI; -import java.util.GregorianCalendar; -import java.util.Optional; -import java.util.UUID; -import jakarta.inject.Inject; -import jakarta.mail.MessagingException; -import jakarta.servlet.ServletConfig; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.InternalServerErrorException; -import jakarta.ws.rs.core.Context; -import jakarta.ws.rs.core.Request; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.SecurityContext; -import jakarta.ws.rs.core.UriBuilder; -import jakarta.ws.rs.core.UriInfo; -import jakarta.ws.rs.ext.Providers; -import org.apache.jena.ontology.Ontology; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ResIterator; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.vocabulary.DCTerms; -import org.apache.jena.vocabulary.RDF; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * JAX-RS endpoint that handles requests for access. - * Creates an authorization request and sends a notification email to the maker of the application. - * - * @author Martynas Jusevičius {@literal } - */ -@Deprecated -public class RequestAccess extends GraphStoreImpl -{ - - private static final Logger log = LoggerFactory.getLogger(RequestAccess.class); - - private final String emailSubject; - private final String emailText; - private final UriBuilder authRequestContainerUriBuilder; - - /** - * Constructs access request resource. - * - * @param request current request - * @param uriInfo request URI information - * @param mediaTypes registry of readable/writable media types - * @param application current application - * @param ontology current application's ontology - * @param service current application's service - * @param securityContext JAX-RS security service - * @param providers registry of JAX-RS providers - * @param system system application - * @param servletConfig servlet config - * @param agentContext optional agent context - */ - @Inject - public RequestAccess(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, - com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional service, - @Context SecurityContext securityContext, Optional agentContext, - @Context Providers providers, com.atomgraph.linkeddatahub.Application system, @Context ServletConfig servletConfig) - { - super(request, uriInfo, mediaTypes, application, ontology, service, securityContext, agentContext, providers, system); - if (log.isDebugEnabled()) log.debug("Constructing {}", getClass()); - if (securityContext == null || !(securityContext.getUserPrincipal() instanceof Agent)) throw new IllegalStateException("Agent is not authenticated"); - - authRequestContainerUriBuilder = uriInfo.getBaseUriBuilder().path(AUTHORIZATION_REQUEST_PATH); - - emailSubject = servletConfig.getServletContext().getInitParameter(LDHC.requestAccessEMailSubject.getURI()); - if (emailSubject == null) throw new InternalServerErrorException(new ConfigurationException(LDHC.requestAccessEMailSubject)); - - emailText = servletConfig.getServletContext().getInitParameter(LDHC.requestAccessEMailText.getURI()); - if (emailText == null) throw new InternalServerErrorException(new ConfigurationException(LDHC.requestAccessEMailText)); - } - - @GET - @Override - public Response get(@QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) - { - return super.get(false, getURI()); - } - - @POST - @Override - public Response post(Model requestModel, @QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) - { - graphUri = getAuthRequestContainerUriBuilder().path(UUID.randomUUID().toString() + "/").build(); - new Skolemizer(graphUri.toString()).apply(requestModel); - - ResIterator it = requestModel.listResourcesWithProperty(RDF.type, LACL.AuthorizationRequest); - try - { - Resource accessRequest = it.next(); - Resource requestAgent = accessRequest.getPropertyResourceValue(LACL.requestAgent); - if (!requestAgent.equals(getAgentContext().get().getAgent())) throw new IllegalStateException("Agent requesting access must be authenticated"); - - Resource owner = getApplication().getMaker(); - if (owner == null) throw new IllegalStateException("Application <" + getApplication().getURI() + "> does not have a maker (foaf:maker)"); - String ownerURI = owner.getURI(); - - accessRequest.addLiteral(DCTerms.created, GregorianCalendar.getInstance()); - - LinkedDataClient ldc = LinkedDataClient.create(getSystem().getClient(), getSystem().getMediaTypes()). - delegation(getUriInfo().getBaseUri(), getAgentContext().orElse(null)); - Model agentModel = ldc.getModel(ownerURI); - owner = agentModel.getResource(ownerURI); - if (!agentModel.containsResource(owner)) throw new IllegalStateException("Could not load agent's <" + ownerURI + "> description from admin service"); - - Response response = super.post(requestModel, false, graphUri); // don't wrap into try-with-resources because that will close the Response - - try - { - sendEmail(owner, accessRequest); - } - catch (MessagingException | UnsupportedEncodingException ex) - { - if (log.isErrorEnabled()) log.error("Could not send access request email to Agent: {}", getAgentContext().get().getAgent().getURI()); - } - - return response; // 201 Created - } - finally - { - it.close(); - } - } - - /** - * Sends access request notification email to applications owner. - * - * @param owner application's owner - * @param accessRequest access request resource - * @throws MessagingException error sending email - * @throws UnsupportedEncodingException encoding error - */ - public void sendEmail(Resource owner, Resource accessRequest) throws MessagingException, UnsupportedEncodingException - { - // TO-DO: trim values? - final String name; - if (owner.hasProperty(FOAF.givenName) && owner.hasProperty(FOAF.familyName)) - { - String givenName = owner.getProperty(FOAF.givenName).getString(); - String familyName = owner.getProperty(FOAF.familyName).getString(); - name = givenName + " " + familyName; - } - else - { - if (owner.hasProperty(FOAF.name)) name = owner.getProperty(FOAF.name).getString(); - else throw new IllegalStateException("Owner Agent '" + owner + "' does not have either foaf:givenName/foaf:familyName or foaf:name"); - } - - // we expect foaf:mbox value as mailto: URI (it gets converted from literal in Model provider) - String mbox = owner.getRequiredProperty(FOAF.mbox).getResource().getURI().substring("mailto:".length()); - - Resource requestAgent = accessRequest.getPropertyResourceValue(LACL.requestAgent); - Resource accessTo = accessRequest.getPropertyResourceValue(LACL.requestAccessTo); - - MessageBuilder builder = getSystem().getMessageBuilder(). - subject(String.format(getEmailSubject(), - getApplication().getProperty(DCTerms.title).getString())). - to(mbox, name). - textBodyPart(String.format(getEmailText(), requestAgent.getURI(), accessTo.getURI(), accessRequest.getURI())); - - if (getSystem().getNotificationAddress() != null) builder = builder.from(getSystem().getNotificationAddress()); - - EMailListener.submit(builder.build()); - } - - /** - * Returns the SPARQL service from which agent data is retrieved. - * - * @return SPARQL service - */ - protected Service getAgentService() - { - return getApplication().getService(); - } - - /** - * Returns URI of this resource. - * - * @return resource URI - */ - public URI getURI() - { - return getUriInfo().getAbsolutePath(); - } - - /** - * Returns the subject of the notification email. - * - * @return subject - */ - public String getEmailSubject() - { - return emailSubject; - } - - /** - * Returns the text of the notification email. - * - * @return text - */ - public String getEmailText() - { - return emailText; - } - - /** - * Returns the URI builder for authorization requests. - * - * @return URI builder - */ - public UriBuilder getAuthRequestContainerUriBuilder() - { - return authRequestContainerUriBuilder.clone(); - } - -} \ No newline at end of file diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java b/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java index 91c587942..95b22305b 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/admin/SignUp.java @@ -25,6 +25,7 @@ import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; import com.atomgraph.linkeddatahub.model.Service; import com.atomgraph.linkeddatahub.listener.EMailListener; +import com.atomgraph.linkeddatahub.server.filter.response.CacheInvalidationFilter; import com.atomgraph.linkeddatahub.server.model.impl.GraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.util.MessageBuilder; @@ -77,6 +78,7 @@ import org.apache.jena.ontology.Ontology; import org.apache.jena.query.ParameterizedSparqlString; import org.apache.jena.query.Query; +import org.apache.jena.query.ResultSet; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; import org.apache.jena.rdf.model.Property; @@ -89,6 +91,7 @@ import org.apache.jena.vocabulary.DCTerms; import org.apache.jena.vocabulary.RDF; import org.glassfish.jersey.server.internal.process.MappableException; +import org.glassfish.jersey.uri.UriComponent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -198,10 +201,12 @@ public Response post(Model agentModel, @QueryParam("default") @DefaultValue("fal String password = validateAndRemovePassword(agent); // TO-DO: trim values Resource mbox = agent.getRequiredProperty(FOAF.mbox).getResource(); - + ParameterizedSparqlString pss = new ParameterizedSparqlString(getAgentQuery().toString()); pss.setParam(FOAF.mbox.getLocalName(), mbox); - boolean agentExists = !getAgentService().getSPARQLClient().loadModel(pss.asQuery()).isEmpty(); + ResultSet rs = getAgentService().getSPARQLClient().select(pss.asQuery()); + boolean agentExists = rs.hasNext(); + rs.close(); if (agentExists) throw createSPINConstraintViolationException(agent, FOAF.mbox, "Agent with this mailbox already exists"); String givenName = agent.getRequiredProperty(FOAF.givenName).getString(); @@ -282,6 +287,9 @@ public Response post(Model agentModel, @QueryParam("default") @DefaultValue("fal throw new InternalServerErrorException("Cannot create Authorization"); } + // purge agent lookup from proxy cache + if (getAgentService().getBackendProxy() != null) ban(getAgentService().getBackendProxy(), mbox.getURI()); + // remove secretary WebID from cache getSystem().getEventBus().post(new com.atomgraph.linkeddatahub.server.event.SignUp(getSystem().getSecretaryWebIDURI())); @@ -565,5 +573,21 @@ public Query getAgentQuery() { return getSystem().getAgentQuery(); } - + + /** + * Bans URL from the backend proxy cache. + * + * @param proxy proxy server URL + * @param url banned URL + * @return proxy server response + */ + public Response ban(Resource proxy, String url) + { + if (url == null) throw new IllegalArgumentException("Resource cannot be null"); + + return getSystem().getClient().target(proxy.getURI()).request(). + header(CacheInvalidationFilter.HEADER_NAME, UriComponent.encode(url, UriComponent.Type.UNRESERVED)). // the value has to be URL-encoded in order to match request URLs in Varnish + method("BAN", Response.class); + } + } \ No newline at end of file diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/oauth2/google/Authorize.java b/src/main/java/com/atomgraph/linkeddatahub/resource/admin/oauth2/google/Authorize.java deleted file mode 100644 index 3df2b9c23..000000000 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/oauth2/google/Authorize.java +++ /dev/null @@ -1,179 +0,0 @@ -/** - * Copyright 2019 Martynas Jusevičius - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package com.atomgraph.linkeddatahub.resource.admin.oauth2.google; - -import com.atomgraph.linkeddatahub.model.Service; -import com.atomgraph.core.exception.ConfigurationException; -import com.atomgraph.linkeddatahub.apps.model.AdminApplication; -import com.atomgraph.linkeddatahub.apps.model.Application; -import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; -import com.atomgraph.linkeddatahub.resource.admin.oauth2.Login; -import com.atomgraph.linkeddatahub.vocabulary.Google; -import java.math.BigInteger; -import java.net.URI; -import java.security.SecureRandom; -import java.util.Base64; -import java.util.Optional; -import java.util.UUID; -import jakarta.inject.Inject; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.core.Context; -import jakarta.ws.rs.core.NewCookie; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.UriBuilder; -import jakarta.ws.rs.core.UriInfo; -import org.apache.jena.ontology.Ontology; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * JAX-RS resource that handles Google authorization requests. - * - * @author Martynas Jusevičius {@literal } - */ -@Path("oauth2/authorize/google") -public class Authorize -{ - private static final Logger log = LoggerFactory.getLogger(Authorize.class); - - /** Google's OAuth endpoint URL */ - public static final String ENDPOINT_URI = "https://accounts.google.com/o/oauth2/v2/auth"; - /** OAuth authorization scope */ - public static final String SCOPE = "openid email profile"; - /** JWT cookie name */ - public static final String COOKIE_NAME = "LinkedDataHub.state"; - /** URL parameter name */ - public static final String REFERER_PARAM_NAME = "referer"; - - private final UriInfo uriInfo; - private final Application application; - private final Ontology ontology; - private final String clientID; - - /** - * Constructs resource from current request info. - * - * @param uriInfo URI info - * @param application application - * @param ontology application's ontology - * @param service application's SPARQL service - * @param system JAX-RS application - */ - @Inject - public Authorize(@Context UriInfo uriInfo, - Optional service, com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, - com.atomgraph.linkeddatahub.Application system) - { - this.uriInfo = uriInfo; - this.application = application; - this.ontology = ontology.get(); - if (log.isDebugEnabled()) log.debug("Constructing {}", getClass()); - clientID = (String)system.getProperty(Google.clientID.getURI()); - } - - /** - * Implements the HTTP GET method. - * - * @return response object - */ - @GET - public Response get() - { - if (getClientID() == null) throw new ConfigurationException(Google.clientID); - - final String originUri; - //if (getHttpHeaders().getHeaderString("Referer") != null) originUri = getHttpHeaders().getHeaderString("Referer"); // Referer value missing after redirect - if (getUriInfo().getQueryParameters().containsKey(REFERER_PARAM_NAME)) originUri = getUriInfo().getQueryParameters().getFirst(REFERER_PARAM_NAME); - else originUri = getEndUserApplication().getBase().getURI(); - - URI redirectUri = getUriInfo().getBaseUriBuilder(). - path(Login.class). - build(); - - String state = new BigInteger(130, new SecureRandom()).toString(32); - String stateValue = Base64.getEncoder().encodeToString((state + ";" + originUri).getBytes()); - NewCookie stateCookie = new NewCookie(COOKIE_NAME, stateValue, getEndUserApplication().getBaseURI().getPath(), null, NewCookie.DEFAULT_VERSION, null, NewCookie.DEFAULT_MAX_AGE, false); - - UriBuilder authUriBuilder = UriBuilder.fromUri(ENDPOINT_URI). - queryParam("response_type", "code"). - queryParam("access_type", "offline"). - queryParam("client_id", getClientID()). - queryParam("redirect_uri", redirectUri). - queryParam("scope", SCOPE). - queryParam("state", stateValue). - queryParam("nonce", UUID.randomUUID().toString()); - - return Response.seeOther(authUriBuilder.build()). - cookie(stateCookie). - build(); - } - - /** - * Returns the end-user application of the current dataspace. - * - * @return application resource - */ - public EndUserApplication getEndUserApplication() - { - if (getApplication().canAs(EndUserApplication.class)) - return getApplication().as(EndUserApplication.class); - else - return getApplication().as(AdminApplication.class).getEndUserApplication(); - } - - /** - * Returns URI information for the current request. - * - * @return URI info - */ - public UriInfo getUriInfo() - { - return uriInfo; - } - - /** - * Returns matched application. - * - * @return application resource - */ - public Application getApplication() - { - return application; - } - - /** - * Returns application's ontology. - * - * @return ontology resource - */ - public Ontology getOntology() - { - return ontology; - } - - /** - * Returns Google OAuth client ID. - * - * @return client ID - */ - private String getClientID() - { - return clientID; - } - -} diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/AuthorizeBase.java b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/AuthorizeBase.java new file mode 100644 index 000000000..2a9abafcd --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/AuthorizeBase.java @@ -0,0 +1,201 @@ +/** + * Copyright 2025 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.resource.oauth2; + +import com.atomgraph.linkeddatahub.apps.model.AdminApplication; +import com.atomgraph.linkeddatahub.apps.model.Application; +import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; +import java.math.BigInteger; +import java.net.URI; +import java.security.SecureRandom; +import java.util.Base64; +import java.util.UUID; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.NewCookie; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.UriBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Abstract base class for OAuth 2.0 authorization endpoints. + * + * @author Martynas Jusevičius {@literal } + */ +public abstract class AuthorizeBase +{ + private static final Logger log = LoggerFactory.getLogger(AuthorizeBase.class); + + /** JWT cookie name */ + public static final String COOKIE_NAME = "LinkedDataHub.state"; + /** URL parameter name */ + public static final String REFERER_PARAM_NAME = "referer"; + + private final HttpServletRequest httpServletRequest; + private final Application application; + private final com.atomgraph.linkeddatahub.Application system; + private final String clientID; + + /** + * Constructs resource from current request info. + * + * @param httpServletRequest servlet request + * @param application application + * @param system JAX-RS application + * @param clientID OAuth client ID + */ + public AuthorizeBase(HttpServletRequest httpServletRequest, Application application, com.atomgraph.linkeddatahub.Application system, String clientID) + { + if (!application.canAs(EndUserApplication.class)) + throw new IllegalStateException("The " + getClass() + " endpoint is only available on end-user applications"); + + this.httpServletRequest = httpServletRequest; + this.application = application; + this.system = system; + this.clientID = clientID; + if (log.isDebugEnabled()) log.debug("Constructing {}", getClass()); + } + + /** + * Implements the HTTP GET method. + * + * @param originUri URI to redirect back to + * @return response object + */ + @GET + public Response get(@QueryParam(REFERER_PARAM_NAME) String originUri) + { + // the redirect URI must be on the domain, not sub-domains (i.e. on the root dataspace) + URI redirectUri = UriBuilder.fromUri(getSystem().getBaseURI()). + path(getLoginClass()). + build(); + + String state = new BigInteger(130, new SecureRandom()).toString(32); + String stateValue = Base64.getEncoder().encodeToString((state + ";" + originUri).getBytes()); + // Cookie path is "/" to make it accessible across all dataspaces + NewCookie stateCookie = new NewCookie.Builder(COOKIE_NAME). + value(stateValue). + path("/"). + build(); + + UriBuilder authUriBuilder = getAuthorizeUriBuilder(getAuthorizeEndpoint(), getClientID(), redirectUri.toString(), getScope(), stateValue, UUID.randomUUID().toString()); + + return Response.seeOther(authUriBuilder.build()). + cookie(stateCookie). + build(); + } + + /** + * Returns the OAuth authorization endpoint URI. + * + * @return authorization endpoint URI + */ + protected abstract URI getAuthorizeEndpoint(); + + /** + * Returns the OAuth scope string. + * + * @return scope string + */ + protected abstract String getScope(); + + /** + * Returns the Login class for building the redirect URI. + * + * @return Login class + */ + protected abstract Class getLoginClass(); + + /** + * Builds a URI for the OAuth 2.0 / OpenID Connect authorization request. + * Constructs the authorization endpoint URL with standard OAuth parameters. + * + * @param endpoint OAuth authorization endpoint URI + * @param clientID OAuth client ID + * @param redirectURI redirect URI for the authorization response + * @param scope OAuth scope string + * @param stateValue state parameter for CSRF protection + * @param nonce nonce parameter for replay attack prevention + * @return URI builder with authorization request parameters + */ + public UriBuilder getAuthorizeUriBuilder(URI endpoint, String clientID, String redirectURI, String scope, String stateValue, String nonce) + { + return UriBuilder.fromUri(endpoint). + queryParam("response_type", "code"). + queryParam("client_id", clientID). + queryParam("redirect_uri", redirectURI). + queryParam("scope", scope). + queryParam("state", stateValue). + queryParam("nonce", nonce); + } + + /** + * Returns the end-user application of the current dataspace. + * + * @return application resource + */ + public EndUserApplication getEndUserApplication() + { + if (getApplication().canAs(EndUserApplication.class)) + return getApplication().as(EndUserApplication.class); + else + return getApplication().as(AdminApplication.class).getEndUserApplication(); + } + + /** + * Returns servlet request. + * + * @return servlet request + */ + public HttpServletRequest getHttpServletRequest() + { + return httpServletRequest; + } + + /** + * Returns matched application. + * + * @return application resource + */ + public Application getApplication() + { + return application; + } + + /** + * Returns system application. + * + * @return JAX-RS application + */ + public com.atomgraph.linkeddatahub.Application getSystem() + { + return system; + } + + /** + * Returns OAuth client ID. + * + * @return client ID + */ + protected String getClientID() + { + return clientID; + } + +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/oauth2/Login.java b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/LoginBase.java similarity index 52% rename from src/main/java/com/atomgraph/linkeddatahub/resource/admin/oauth2/Login.java rename to src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/LoginBase.java index d7d36e9d6..c04c04e08 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/admin/oauth2/Login.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/LoginBase.java @@ -1,5 +1,5 @@ /** - * Copyright 2019 Martynas Jusevičius + * Copyright 2025 Martynas Jusevičius * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,69 +13,57 @@ * See the License for the specific language governing permissions and * limitations under the License. * - */ -package com.atomgraph.linkeddatahub.resource.admin.oauth2; + */package com.atomgraph.linkeddatahub.resource.oauth2; -import com.atomgraph.core.MediaTypes; import com.atomgraph.core.exception.ConfigurationException; import com.atomgraph.linkeddatahub.apps.model.AdminApplication; +import com.atomgraph.linkeddatahub.apps.model.Application; import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; import com.atomgraph.linkeddatahub.listener.EMailListener; import com.atomgraph.linkeddatahub.model.Service; import static com.atomgraph.linkeddatahub.resource.admin.SignUp.AGENT_PATH; import static com.atomgraph.linkeddatahub.resource.admin.SignUp.AUTHORIZATION_PATH; -import com.atomgraph.linkeddatahub.resource.admin.oauth2.google.Authorize; -import com.atomgraph.linkeddatahub.server.filter.request.auth.IDTokenFilter; -import com.atomgraph.linkeddatahub.server.filter.response.BackendInvalidationFilter; -import com.atomgraph.linkeddatahub.server.model.impl.GraphStoreImpl; -import com.atomgraph.linkeddatahub.server.security.AgentContext; +import com.atomgraph.linkeddatahub.server.filter.response.CacheInvalidationFilter; import com.atomgraph.linkeddatahub.server.util.MessageBuilder; import com.atomgraph.linkeddatahub.server.util.Skolemizer; import com.atomgraph.linkeddatahub.vocabulary.ACL; -import com.atomgraph.linkeddatahub.vocabulary.LDHC; +import com.atomgraph.linkeddatahub.vocabulary.DH; import com.atomgraph.linkeddatahub.vocabulary.FOAF; -import com.atomgraph.linkeddatahub.vocabulary.Google; import com.atomgraph.linkeddatahub.vocabulary.LACL; -import com.atomgraph.linkeddatahub.vocabulary.DH; +import com.atomgraph.linkeddatahub.vocabulary.LDHC; import com.atomgraph.linkeddatahub.vocabulary.SIOC; import com.auth0.jwt.JWT; import com.auth0.jwt.interfaces.DecodedJWT; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.Base64; -import java.util.GregorianCalendar; -import java.util.Optional; -import java.util.UUID; -import java.util.regex.Pattern; -import jakarta.inject.Inject; import jakarta.json.JsonObject; import jakarta.mail.MessagingException; import jakarta.servlet.ServletConfig; import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.InternalServerErrorException; +import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.client.Entity; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.Cookie; import jakarta.ws.rs.core.Form; import jakarta.ws.rs.core.HttpHeaders; -import jakarta.ws.rs.core.NewCookie; import jakarta.ws.rs.core.Request; import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.SecurityContext; import jakarta.ws.rs.core.UriInfo; -import jakarta.ws.rs.ext.Providers; -import org.apache.jena.ontology.Ontology; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URI; +import java.util.Base64; +import java.util.GregorianCalendar; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.regex.Pattern; import org.apache.jena.query.ParameterizedSparqlString; import org.apache.jena.query.Query; +import org.apache.jena.query.QuerySolution; +import org.apache.jena.query.ResultSet; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.ResIterator; import org.apache.jena.rdf.model.Resource; import org.apache.jena.rdf.model.ResourceFactory; import org.apache.jena.vocabulary.DCTerms; @@ -86,82 +74,83 @@ import org.slf4j.LoggerFactory; /** - * JAX-RS resource that handles OAuth login. - * + * Abstract base class for OAuth 2.0 login endpoints. + * * @author Martynas Jusevičius {@literal } */ -@Path("oauth2/login") -public class Login extends GraphStoreImpl +public abstract class LoginBase { + + private static final Logger log = LoggerFactory.getLogger(LoginBase.class); - private static final Logger log = LoggerFactory.getLogger(Login.class); - - /** OAuth token endpoint URL */ - public static final String TOKEN_ENDPOINT = "https://oauth2.googleapis.com/token"; - /** User info endpoint URL */ - public static final String USER_INFO_ENDPOINT = "https://openidconnect.googleapis.com/v1/userinfo"; /** Relative path to the user container */ public static final String ACCOUNT_PATH = "acl/users/"; + private final UriInfo uriInfo; private final HttpHeaders httpHeaders; + private final com.atomgraph.linkeddatahub.apps.model.Application application; + private final com.atomgraph.linkeddatahub.Application system; private final String emailSubject; private final String emailText; private final String clientID, clientSecret; + /** * Constructs endpoint. * * @param request current request * @param uriInfo URI information of the current request - * @param mediaTypes a registry of readable/writable media types * @param httpHeaders HTTP headers * @param application current application - * @param ontology ontology of the current application - * @param service SPARQL service of the current application - * @param securityContext JAX-RS security context - * @param agentContext authenticated agent's context - * @param providers JAX-RS provider registry * @param system system application * @param servletConfig servlet config + * @param clientID OAuth client ID + * @param clientSecret OAuth client secret; */ - @Inject - public Login(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, @Context HttpHeaders httpHeaders, - com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional service, - @Context SecurityContext securityContext, Optional agentContext, - @Context Providers providers, com.atomgraph.linkeddatahub.Application system, @Context ServletConfig servletConfig) + public LoginBase(@Context Request request, @Context UriInfo uriInfo, @Context HttpHeaders httpHeaders, + com.atomgraph.linkeddatahub.apps.model.Application application, + com.atomgraph.linkeddatahub.Application system, @Context ServletConfig servletConfig, + String clientID, String clientSecret) { - super(request, uriInfo, mediaTypes, application, ontology, service, securityContext, agentContext, providers, system); + if (!application.canAs(EndUserApplication.class)) + throw new IllegalStateException("The " + getClass() + " endpoint is only available on end-user applications"); + + this.uriInfo = uriInfo; this.httpHeaders = httpHeaders; + this.application = application; + this.system = system; + this.clientID = clientID; + this.clientSecret = clientSecret; emailSubject = servletConfig.getServletContext().getInitParameter(LDHC.signUpEMailSubject.getURI()); if (emailSubject == null) throw new InternalServerErrorException(new ConfigurationException(LDHC.signUpEMailSubject)); emailText = servletConfig.getServletContext().getInitParameter(LDHC.oAuthSignUpEMailText.getURI()); if (emailText == null) throw new InternalServerErrorException(new ConfigurationException(LDHC.oAuthSignUpEMailText)); - - clientID = (String)system.getProperty(Google.clientID.getURI()); - clientSecret = (String)system.getProperty(Google.clientSecret.getURI()); } - + + /** + * Handles OAuth2 callback from the authorization server. + * Exchanges authorization code for access and ID tokens, creates or reuses user agent and account, + * and redirects back to the original referer with the ID token. + * + * @param code authorization code from OAuth provider + * @param state state parameter for CSRF protection + * @param error error code if authorization failed + * @return redirect response to original referer with ID token in URL fragment + */ @GET - @Override - public Response get(@QueryParam("default") @DefaultValue("false") Boolean defaultGraph, @QueryParam("graph") URI graphUri) + public Response get(@QueryParam("code") String code, @QueryParam("state") String state, @QueryParam("error") String error) // TO-DO: verify state by matching against state generated in Authorize { - if (getClientID() == null) throw new ConfigurationException(Google.clientID); - if (getClientSecret() == null) throw new ConfigurationException(Google.clientSecret); - - String error = getUriInfo().getQueryParameters().getFirst("error"); if (error != null) { if (log.isErrorEnabled()) log.error("OAuth callback error: {}", error); throw new InternalServerErrorException(error); } - - String code = getUriInfo().getQueryParameters().getFirst("code"); - String state = getUriInfo().getQueryParameters().getFirst("state"); // TO-DO: verify by matching against state generated in Authorize if (state == null) throw new BadRequestException("OAuth 'state' parameter not set"); - Cookie stateCookie = getHttpHeaders().getCookies().get(Authorize.COOKIE_NAME); - if (stateCookie == null) throw new BadRequestException("OAuth '" + Authorize.COOKIE_NAME + "' cookie not set"); + + Cookie stateCookie = getHttpHeaders().getCookies().get(AuthorizeBase.COOKIE_NAME); + if (stateCookie == null) throw new BadRequestException("OAuth '" + AuthorizeBase.COOKIE_NAME + "' cookie not set"); if (!state.equals(stateCookie.getValue())) throw new BadRequestException("OAuth 'state' parameter failed to validate"); Form form = new Form(). @@ -171,7 +160,7 @@ public Response get(@QueryParam("default") @DefaultValue("false") Boolean defaul param("client_secret", getClientSecret()). param("code", code); - try (Response cr = getSystem().getClient().target(TOKEN_ENDPOINT). + try (Response cr = getSystem().getClient().target(getTokenEndpoint()). request().post(Entity.form(form))) { JsonObject response = cr.readEntity(JsonObject.class); @@ -182,7 +171,16 @@ public Response get(@QueryParam("default") @DefaultValue("false") Boolean defaul } String idToken = response.getString("id_token"); + String accessToken = response.getString("access_token"); DecodedJWT jwt = JWT.decode(idToken); + + // Verify the ID token + if (!verify(jwt)) + { + if (log.isErrorEnabled()) log.error("Failed to verify ID token for subject '{}'", jwt.getSubject()); + throw new InternalServerErrorException("ID token verification failed"); + } + if (response.containsKey("refresh_token")) { String refreshToken = response.getString("refresh_token"); @@ -197,145 +195,174 @@ public Response get(@QueryParam("default") @DefaultValue("false") Boolean defaul } } - ParameterizedSparqlString accountPss = new ParameterizedSparqlString(getUserAccountQuery().toString()); - accountPss.setLiteral(SIOC.ID.getLocalName(), jwt.getSubject()); - accountPss.setLiteral(LACL.issuer.getLocalName(), jwt.getIssuer()); - final boolean accountExists = !getAgentService().getSPARQLClient().loadModel(accountPss.asQuery()).isEmpty(); - - if (!accountExists) // UserAccount with this ID does not exist yet + if (!userAccountExists(jwt.getSubject(), jwt.getIssuer())) // UserAccount with this ID does not exist yet { - String email = jwt.getClaim("email").asString(); - Resource mbox = ResourceFactory.createResource("mailto:" + email); - - ParameterizedSparqlString agentPss = new ParameterizedSparqlString(getAgentQuery().toString()); - agentPss.setParam(FOAF.mbox.getLocalName(), mbox); - final Model agentModel = getAgentService().getSPARQLClient().loadModel(agentPss.asQuery()); + Map userInfo = getUserInfo(jwt, accessToken); + Optional email = Optional.ofNullable(userInfo.get("email")); + Optional mbox = email.map(e -> "mailto:" + e).map(ResourceFactory::createResource); + + Model accountModel = ModelFactory.createDefaultModel(); + URI userAccountGraphUri = getAdminApplication().getUriBuilder().path(ACCOUNT_PATH).path("{slug}/").build(UUID.randomUUID().toString()); + + createUserAccount(accountModel, + userAccountGraphUri, + accountModel.createResource(getAdminApplication().getBaseURI().resolve(ACCOUNT_PATH).toString()), + jwt.getSubject(), + jwt.getIssuer(), + Optional.ofNullable(userInfo.get("name")), + email); - final boolean agentExists; - // if Agent with this foaf:mbox does not exist (lookup model is empty), create it; otherwise, reuse it - if (agentModel.isEmpty()) + new Skolemizer(userAccountGraphUri.toString()).apply(accountModel); + // lookup UserAccount resource after its URI has been skolemized + Resource userAccount = accountModel.createResource(userAccountGraphUri.toString()).getPropertyResourceValue(FOAF.primaryTopic); + + Resource agent; + Optional existingAgent = mbox.flatMap(this::findAgentByEmail); + + if (existingAgent.isEmpty()) { - agentExists = false; - URI agentGraphUri = getUriInfo().getBaseUriBuilder().path(AGENT_PATH).path("{slug}/").build(UUID.randomUUID().toString()); + Model agentModel = ModelFactory.createDefaultModel(); + URI agentGraphUri = getAdminApplication().getUriBuilder().path(AGENT_PATH).path("{slug}/").build(UUID.randomUUID().toString()); - createAgent(agentModel, + agent = createAgent(agentModel, agentGraphUri, - agentModel.createResource(getUriInfo().getBaseUri().resolve(AGENT_PATH).toString()), - jwt.getClaim("given_name").asString(), - jwt.getClaim("family_name").asString(), + agentModel.createResource(getAdminApplication().getBaseURI().resolve(AGENT_PATH).toString()), + Optional.ofNullable(userInfo.get("name")), + Optional.ofNullable(userInfo.get("given_name")), + Optional.ofNullable(userInfo.get("family_name")), email, - jwt.getClaim("picture") != null ? jwt.getClaim("picture").asString() : null); - - // skolemize here because this Model will not go through SkolemizingModelProvider - new Skolemizer(agentGraphUri.toString()).apply(agentModel); - } - else - agentExists = true; - - // lookup Agent resource after its URI has been skolemized - ResIterator it = agentModel.listResourcesWithProperty(FOAF.mbox); - try - { - // we need to retrieve resources again because they've changed from bnodes to URIs - final Resource agent = it.next(); - - Model accountModel = ModelFactory.createDefaultModel(); - URI userAccountGraphUri = getUriInfo().getBaseUriBuilder().path(ACCOUNT_PATH).path("{slug}/").build(UUID.randomUUID().toString()); - Resource userAccount = createUserAccount(accountModel, - userAccountGraphUri, - accountModel.createResource(getUriInfo().getBaseUri().resolve(ACCOUNT_PATH).toString()), - jwt.getSubject(), - jwt.getIssuer(), - jwt.getClaim("name").asString(), - email); - userAccount.addProperty(SIOC.ACCOUNT_OF, agent); - new Skolemizer(userAccountGraphUri.toString()).apply(accountModel); - - Response userAccountResponse = super.put(accountModel, false, userAccountGraphUri); - if (userAccountResponse.getStatus() != Response.Status.CREATED.getStatusCode()) - { - if (log.isErrorEnabled()) log.error("Cannot create UserAccount"); - throw new InternalServerErrorException("Cannot create UserAccount"); - } - if (log.isDebugEnabled()) log.debug("Created UserAccount for user ID: {}", jwt.getSubject()); + Optional.ofNullable(userInfo.get("picture"))); - // lookup UserAccount resource after its URI has been skolemized - userAccount = accountModel.createResource(userAccountGraphUri.toString()).getPropertyResourceValue(FOAF.primaryTopic); agent.addProperty(FOAF.account, userAccount); agentModel.add(agentModel.createResource(getSystem().getSecretaryWebIDURI().toString()), ACL.delegates, agent); // make secretary delegate whis agent - URI agentUri = URI.create(agent.getURI()); - // get Agent's document URI by stripping the fragment identifier from the Agent's URI - URI agentGraphUri = new URI(agentUri.getScheme(), agentUri.getSchemeSpecificPart(), null).normalize(); - Response agentResponse = super.put(agentModel, false, agentGraphUri); - if ((!agentExists && agentResponse.getStatus() != Response.Status.CREATED.getStatusCode()) || - (agentExists && agentResponse.getStatus() != Response.Status.OK.getStatusCode())) - { - if (log.isErrorEnabled()) log.error("Cannot create Agent or append metadata to it"); - throw new InternalServerErrorException("Cannot create Agent or append metadata to it"); - } + // skolemize here because this Model will not go through SkolemizingModelProvider + new Skolemizer(agentGraphUri.toString()).apply(agentModel); + // lookup Agent resource after its URI has been skolemized + agent = agentModel.createResource(agentGraphUri.toString()).getPropertyResourceValue(FOAF.primaryTopic); + + getAgentService().getGraphStoreClient().putModel(agentGraphUri.toString(), agentModel); + + // purge agent lookup from proxy cache (if email is present) + if (mbox.isPresent() && getAgentService().getBackendProxy() != null) + ban(getAgentService().getBackendProxy(), mbox.get().getURI()); Model authModel = ModelFactory.createDefaultModel(); - URI authGraphUri = getUriInfo().getBaseUriBuilder().path(AUTHORIZATION_PATH).path("{slug}/").build(UUID.randomUUID().toString()); - // creating authorization for the Agent documents + URI authGraphUri = getAdminApplication().getUriBuilder().path(AUTHORIZATION_PATH).path("{slug}/").build(UUID.randomUUID().toString()); + + // creating authorization for the Agent document createAuthorization(authModel, authGraphUri, - accountModel.createResource(getUriInfo().getBaseUri().resolve(AUTHORIZATION_PATH).toString()), + accountModel.createResource(getAdminApplication().getBaseURI().resolve(AUTHORIZATION_PATH).toString()), agentGraphUri, userAccountGraphUri); new Skolemizer(authGraphUri.toString()).apply(authModel); - Response authResponse = super.put(authModel, false, authGraphUri); - if (authResponse.getStatus() != Response.Status.CREATED.getStatusCode()) - { - if (log.isErrorEnabled()) log.error("Cannot create Authorization"); - throw new InternalServerErrorException("Cannot create Authorization"); - } + getAgentService().getGraphStoreClient().putModel(authGraphUri.toString(), authModel); - // purge agent lookup from proxy cache - if (getApplication().getService().getBackendProxy() != null) ban(getApplication().getService().getBackendProxy(), jwt.getSubject()); + try + { + // purge agent lookup from proxy cache + if (getApplication().getService().getBackendProxy() != null) ban(getAdminApplication().getService().getBackendProxy(), jwt.getSubject()); - // remove secretary WebID from cache - getSystem().getEventBus().post(new com.atomgraph.linkeddatahub.server.event.SignUp(getSystem().getSecretaryWebIDURI())); + // remove secretary WebID from cache + getSystem().getEventBus().post(new com.atomgraph.linkeddatahub.server.event.SignUp(getSystem().getSecretaryWebIDURI())); - if (log.isDebugEnabled()) log.debug("Created Agent for user ID: {}", jwt.getSubject()); - sendEmail(agent); - } - catch (UnsupportedEncodingException | MessagingException | URISyntaxException | InternalServerErrorException ex) - { - throw new MappableException(ex); + if (log.isDebugEnabled()) log.debug("Created Agent for user ID: {}", jwt.getSubject()); + if (agent.hasProperty(FOAF.mbox)) sendEmail(agent); + } + catch (UnsupportedEncodingException | MessagingException | InternalServerErrorException ex) + { + throw new MappableException(ex); + } } - finally + else { - it.close(); + QuerySolution qs = existingAgent.get(); + Resource agentGraph = qs.getResource("agentGraph"); + + Model agentModel = ModelFactory.createDefaultModel(); + agent = qs.getResource(FOAF.Agent.getLocalName()).inModel(agentModel); + agent.addProperty(FOAF.account, userAccount); + agentModel.add(agentModel.createResource(getSystem().getSecretaryWebIDURI().toString()), ACL.delegates, agent); // make secretary delegate whis agent + + getAgentService().getGraphStoreClient().add(agentGraph.getURI(), agentModel); } + + userAccount.addProperty(SIOC.ACCOUNT_OF, agent); + getAgentService().getGraphStoreClient().putModel(userAccountGraphUri.toString(), accountModel); + + // purge user account lookup from proxy cache + if (getAgentService().getBackendProxy() != null) ban(getAgentService().getBackendProxy(), jwt.getSubject()); } - - String path = getApplication().as(AdminApplication.class).getEndUserApplication().getBaseURI().getPath(); - NewCookie jwtCookie = new NewCookie(IDTokenFilter.COOKIE_NAME, idToken, path, null, NewCookie.DEFAULT_VERSION, null, NewCookie.DEFAULT_MAX_AGE, false); + URI originalReferer = URI.create(new String(Base64.getDecoder().decode(stateCookie.getValue())).split(Pattern.quote(";"))[1]); // fails if referer param was not specified - - return Response.seeOther(originalReferer). // redirect to where the user started authentication - cookie(jwtCookie). - build(); + + // Pass ID token in URL fragment for client-side cookie setting (works uniformly across all domains) + URI redirectUri = URI.create(originalReferer + "#id_token=" + idToken); + return Response.seeOther(redirectUri).build(); } } - + /** - * Verifies decoded JWT token. - * - * @param jwt decoded JWT token - * @return true if verified + * Checks if a UserAccount with the given subject ID and issuer already exists. + * + * @param subjectId the OAuth subject ID (e.g., ORCID iD or Google user ID) + * @param issuer the OAuth issuer URI + * @return true if UserAccount exists, false otherwise */ - public boolean verify(DecodedJWT jwt) + protected boolean userAccountExists(String subjectId, String issuer) { -// Algorithm algorithm = Algorithm.RSA256(null); -// JWTVerifier verifier = JWT.require(algorithm). -// withIssuer("auth0"). -// build(); -// DecodedJWT jwt = verifier.verify(idToken); - return true; // TO-DO: complete - //throw new JWTVerificationException(); + ParameterizedSparqlString pss = new ParameterizedSparqlString(getUserAccountQuery().toString()); + pss.setLiteral(SIOC.ID.getLocalName(), subjectId); + pss.setLiteral(LACL.issuer.getLocalName(), issuer); + + return !getAgentService().getSPARQLClient().loadModel(pss.asQuery()).isEmpty(); + } + + /** + * Finds an existing agent by email address. + * Queries the agent store for an agent with the specified foaf:mbox property. + * + * @param mbox the email address as a mailto: URI resource + * @return Optional containing the QuerySolution with ?Agent and ?agentGraph bindings if found, empty otherwise + */ + protected Optional findAgentByEmail(Resource mbox) + { + if (mbox == null) return Optional.empty(); + + ParameterizedSparqlString pss = new ParameterizedSparqlString(getAgentQuery().toString()); + pss.setParam(FOAF.mbox.getLocalName(), mbox); + + ResultSet rs = getAgentService().getSPARQLClient().select(pss.asQuery()); + try + { + if (!rs.hasNext()) return Optional.empty(); + return Optional.of(rs.next()); + } + finally + { + rs.close(); + } + } + + /** + * Verifies the decoded JWT ID token using JWKS-based signature verification. + * + * @param jwt decoded JWT ID token to verify + * @return true if verification succeeds, false otherwise + * @see com.atomgraph.linkeddatahub.server.util.JWTVerifier#verify + */ + protected boolean verify(DecodedJWT jwt) + { + return com.atomgraph.linkeddatahub.server.util.JWTVerifier.verify( + jwt, + getJWKSEndpoint(), + getIssuers(), + getClientID(), + getSystem().getClient(), + getSystem().getJWKSCache() + ); } /** @@ -344,13 +371,14 @@ public boolean verify(DecodedJWT jwt) * @param model RDF model * @param graphURI graph URI * @param container container resource + * @param name name * @param givenName given name * @param familyName family name * @param email email address * @param imgUrl image URL * @return agent resource */ - public Resource createAgent(Model model, URI graphURI, Resource container, String givenName, String familyName, String email, String imgUrl) + public Resource createAgent(Model model, URI graphURI, Resource container, Optional name, Optional givenName, Optional familyName, Optional email, Optional imgUrl) { Resource item = model.createResource(graphURI.toString()). addProperty(RDF.type, DH.Item). @@ -358,11 +386,13 @@ public Resource createAgent(Model model, URI graphURI, Resource container, Strin addLiteral(DH.slug, UUID.randomUUID().toString()); Resource agent = model.createResource(). - addProperty(RDF.type, FOAF.Agent). - addLiteral(FOAF.givenName, givenName). - addLiteral(FOAF.familyName, familyName). - addProperty(FOAF.mbox, model.createResource("mailto:" + email)); - if (imgUrl != null) agent.addProperty(FOAF.img, model.createResource(imgUrl)); + addProperty(RDF.type, FOAF.Agent); + + if (name.isPresent()) agent.addLiteral(FOAF.name, name.get()); + if (givenName.isPresent()) agent.addLiteral(FOAF.givenName, givenName.get()); + if (familyName.isPresent()) agent.addLiteral(FOAF.familyName, familyName.get()); + if (email.isPresent()) agent.addProperty(FOAF.mbox, model.createResource("mailto:" + email.get())); + if (imgUrl.isPresent()) agent.addProperty(FOAF.img, model.createResource(imgUrl.get())); item.addProperty(FOAF.primaryTopic, agent); @@ -371,33 +401,34 @@ public Resource createAgent(Model model, URI graphURI, Resource container, Strin /** * Creates new user account resource. - * + * * @param model RDF model * @param graphURI graph URI * @param container container resource * @param id user ID * @param issuer OIDC issuer - * @param name username - * @param email email address + * @param name optional username + * @param email optional email address * @return user account resource */ - public Resource createUserAccount(Model model, URI graphURI, Resource container, String id, String issuer, String name, String email) + public Resource createUserAccount(Model model, URI graphURI, Resource container, String id, String issuer, Optional name, Optional email) { Resource item = model.createResource(graphURI.toString()). addProperty(RDF.type, DH.Item). addProperty(SIOC.HAS_CONTAINER, container). addLiteral(DH.slug, UUID.randomUUID().toString()); - + Resource account = model.createResource(). addLiteral(DCTerms.created, GregorianCalendar.getInstance()). addProperty(RDF.type, SIOC.USER_ACCOUNT). addLiteral(SIOC.ID, id). - addLiteral(LACL.issuer, issuer). - addLiteral(SIOC.NAME, name). - addProperty(SIOC.EMAIL, model.createResource("mailto:" + email)); - + addLiteral(LACL.issuer, issuer); + + if (name.isPresent()) account.addLiteral(SIOC.NAME, name.get()); + if (email.isPresent()) account.addProperty(SIOC.EMAIL, model.createResource("mailto:" + email.get())); + item.addProperty(FOAF.primaryTopic, account); - + return account; } @@ -481,10 +512,42 @@ public Response ban(Resource proxy, String url) if (url == null) throw new IllegalArgumentException("Resource cannot be null"); return getSystem().getClient().target(proxy.getURI()).request(). - header(BackendInvalidationFilter.HEADER_NAME, UriComponent.encode(url, UriComponent.Type.UNRESERVED)). // the value has to be URL-encoded in order to match request URLs in Varnish + header(CacheInvalidationFilter.HEADER_NAME, UriComponent.encode(url, UriComponent.Type.UNRESERVED)). // the value has to be URL-encoded in order to match request URLs in Varnish method("BAN", Response.class); } + /** + * Retrieves additional user information from the OAuth provider. + * Some providers (like Google) include all user data in the ID token JWT claims. + * Others (like ORCID) require a separate UserInfo endpoint call. + * + * @param jwt the decoded JWT ID token + * @param accessToken the OAuth access token + * @return map of user information claims (email, name, given_name, family_name, picture, etc.) + */ + protected abstract Map getUserInfo(DecodedJWT jwt, String accessToken); + + /** + * Returns the JWKS (JSON Web Key Set) endpoint URL for retrieving public keys to verify JWT signatures. + * + * @return JWKS endpoint URI + */ + protected abstract URI getJWKSEndpoint(); + + /** + * Returns the list of valid JWT issuers for this OAuth provider. + * + * @return list of valid issuer URLs + */ + protected abstract java.util.List getIssuers(); + + /** + * Returns the OAuth token endpoint URL for this provider. + * + * @return token endpoint URI + */ + public abstract URI getTokenEndpoint(); + /** * Returns the end-user application of the current dataspace. * @@ -498,6 +561,49 @@ public EndUserApplication getEndUserApplication() return getApplication().as(AdminApplication.class).getEndUserApplication(); } + /** + * Returns the admin application of the current dataspace. + * + * @return admin application resource + */ + public AdminApplication getAdminApplication() + { + if (getApplication().canAs(AdminApplication.class)) + return getApplication().as(AdminApplication.class); + else + return getApplication().as(EndUserApplication.class).getAdminApplication(); + } + + /** + * Returns URI information for the current request. + * + * @return URI info + */ + public UriInfo getUriInfo() + { + return uriInfo; + } + + /** + * Returns the system application. + * + * @return JAX-RS application + */ + public com.atomgraph.linkeddatahub.Application getSystem() + { + return system; + } + + /** + * Returns the current application. + * + * @return application resource + */ + public Application getApplication() + { + return application; + } + /** * Returns HTTP headers of the current request. * @@ -515,7 +621,7 @@ public HttpHeaders getHttpHeaders() */ public Service getAgentService() { - return getApplication().getService(); + return getApplication().as(EndUserApplication.class).getAdminApplication().getService(); } /** @@ -563,7 +669,7 @@ public Query getAgentQuery() * * @return client ID */ - private String getClientID() + protected String getClientID() { return clientID; } @@ -573,7 +679,7 @@ private String getClientID() * * @return client secret */ - private String getClientSecret() + protected String getClientSecret() { return clientSecret; } diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/google/Authorize.java b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/google/Authorize.java new file mode 100644 index 000000000..5d3aecc51 --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/google/Authorize.java @@ -0,0 +1,75 @@ +/** + * Copyright 2019 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.resource.oauth2.google; + +import com.atomgraph.linkeddatahub.resource.oauth2.AuthorizeBase; +import com.atomgraph.linkeddatahub.vocabulary.Google; +import java.net.URI; +import jakarta.inject.Inject; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.UriBuilder; + +/** + * JAX-RS resource that handles Google authorization requests. + * + * @author Martynas Jusevičius {@literal } + */ +@Path("oauth2/authorize/google") +public class Authorize extends AuthorizeBase +{ + /** + * Constructs resource from current request info. + * + * @param httpServletRequest servlet request + * @param application application + * @param system JAX-RS application + */ + @Inject + public Authorize(@Context HttpServletRequest httpServletRequest, com.atomgraph.linkeddatahub.apps.model.Application application, com.atomgraph.linkeddatahub.Application system) + { + super(httpServletRequest, application, system, (String)system.getProperty(Google.clientID.getURI())); + } + + @Override + protected URI getAuthorizeEndpoint() + { + return URI.create("https://accounts.google.com/o/oauth2/v2/auth"); + } + + @Override + protected String getScope() + { + return "openid email profile"; + } + + @Override + protected Class getLoginClass() + { + return Login.class; + } + + @Override + public UriBuilder getAuthorizeUriBuilder(URI endpoint, String clientID, String redirectURI, String scope, String stateValue, String nonce) + { + // Google requires access_type=offline for refresh tokens + return super.getAuthorizeUriBuilder(endpoint, clientID, redirectURI, scope, stateValue, nonce). + queryParam("access_type", "offline"); + } + +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/google/Login.java b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/google/Login.java new file mode 100644 index 000000000..cf497e9a5 --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/google/Login.java @@ -0,0 +1,127 @@ +/** + * Copyright 2019 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.resource.oauth2.google; + +import com.atomgraph.linkeddatahub.resource.oauth2.LoginBase; +import com.atomgraph.linkeddatahub.vocabulary.Google; +import com.auth0.jwt.interfaces.DecodedJWT; +import java.net.URI; +import java.util.Map; +import jakarta.inject.Inject; +import jakarta.servlet.ServletConfig; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.Request; +import jakarta.ws.rs.core.UriInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * JAX-RS resource that handles OAuth2 login. + * + * @author Martynas Jusevičius {@literal } + */ +@Path("oauth2/login/google") +public class Login extends LoginBase +{ + + private static final Logger log = LoggerFactory.getLogger(Login.class); + + /** OAuth token endpoint URL */ + public static final URI TOKEN_ENDPOINT = URI.create("https://oauth2.googleapis.com/token"); + + /** JWKS endpoint URL for JWT signature verification */ + public static final URI JWKS_ENDPOINT = URI.create("https://www.googleapis.com/oauth2/v3/certs"); + + /** Valid Google issuers */ + private static final java.util.List ISSUERS = java.util.Arrays.asList("https://accounts.google.com", "accounts.google.com"); + + /** + * Constructs endpoint. + * + * @param request current request + * @param uriInfo URI information of the current request + * @param httpHeaders HTTP headers + * @param application current application + * @param system system application + * @param servletConfig servlet config + */ + @Inject + public Login(@Context Request request, @Context UriInfo uriInfo, @Context HttpHeaders httpHeaders, + com.atomgraph.linkeddatahub.apps.model.Application application, + com.atomgraph.linkeddatahub.Application system, @Context ServletConfig servletConfig) + { + super(request, uriInfo, httpHeaders, application, system, servletConfig, + (String)system.getProperty(Google.clientID.getURI()), (String)system.getProperty(Google.clientSecret.getURI())); + } + + /** + * Returns Google's OAuth token endpoint URL. + * + * @return Google token endpoint URI + */ + @Override + public URI getTokenEndpoint() + { + return TOKEN_ENDPOINT; + } + + /** + * Returns Google's JWKS endpoint URL for fetching public keys. + * + * @return Google JWKS endpoint URI + */ + @Override + protected URI getJWKSEndpoint() + { + return JWKS_ENDPOINT; + } + + /** + * Returns the list of valid Google issuers. + * + * @return list of valid issuer URLs + */ + @Override + protected java.util.List getIssuers() + { + return ISSUERS; + } + + /** + * Retrieves user information from Google ID token JWT claims. + * Google includes all user data (email, name, given_name, family_name, picture) directly in the ID token, + * so no additional API call is needed. + * + * @param jwt the decoded JWT ID token + * @param accessToken the OAuth access token (not used for Google) + * @return map of user information claims + */ + @Override + protected Map getUserInfo(DecodedJWT jwt, String accessToken) + { + // Google includes all user information in the ID token JWT claims + return jwt.getClaims().entrySet().stream(). + filter(e -> e.getValue().asString() != null). + collect(java.util.stream.Collectors.toMap( + java.util.Map.Entry::getKey, + e -> e.getValue().asString() + )); + } + +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/orcid/Authorize.java b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/orcid/Authorize.java new file mode 100644 index 000000000..838ec8240 --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/orcid/Authorize.java @@ -0,0 +1,66 @@ +/** + * Copyright 2019 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.resource.oauth2.orcid; + +import com.atomgraph.linkeddatahub.resource.oauth2.AuthorizeBase; +import com.atomgraph.linkeddatahub.vocabulary.ORCID; +import java.net.URI; +import jakarta.inject.Inject; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.Context; + +/** + * JAX-RS resource that handles ORCID authorization requests. + * + * @author Martynas Jusevičius {@literal } + */ +@Path("oauth2/authorize/orcid") +public class Authorize extends AuthorizeBase +{ + /** + * Constructs resource from current request info. + * + * @param httpServletRequest servlet request + * @param application application + * @param system JAX-RS application + */ + @Inject + public Authorize(@Context HttpServletRequest httpServletRequest, com.atomgraph.linkeddatahub.apps.model.Application application, com.atomgraph.linkeddatahub.Application system) + { + super(httpServletRequest, application, system, (String)system.getProperty(ORCID.clientID.getURI())); + } + + @Override + protected URI getAuthorizeEndpoint() + { + return URI.create("https://sandbox.orcid.org/oauth/authorize"); // "https://orcid.org/oauth/authorize" + } + + @Override + protected String getScope() + { + return "openid"; + } + + @Override + protected Class getLoginClass() + { + return Login.class; + } + +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/orcid/Login.java b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/orcid/Login.java new file mode 100644 index 000000000..d41d92681 --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/oauth2/orcid/Login.java @@ -0,0 +1,145 @@ +/** + * Copyright 2025 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.resource.oauth2.orcid; + +import com.atomgraph.linkeddatahub.resource.oauth2.LoginBase; +import com.atomgraph.linkeddatahub.vocabulary.ORCID; +import com.auth0.jwt.interfaces.DecodedJWT; +import java.net.URI; +import java.util.HashMap; +import java.util.Map; +import jakarta.inject.Inject; +import jakarta.json.JsonObject; +import jakarta.servlet.ServletConfig; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.Request; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.UriInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * JAX-RS resource that handles ORCID OAuth2 login. + * + * @author Martynas Jusevičius {@literal } + */ +@Path("oauth2/login/orcid") +public class Login extends LoginBase +{ + + private static final Logger log = LoggerFactory.getLogger(Login.class); + + /** OAuth token endpoint URL */ + public static final URI TOKEN_ENDPOINT = URI.create("https://sandbox.orcid.org/oauth/token"); // URI.create("https://orcid.org/oauth/token"); + + /** User info endpoint URL */ + public static final URI USER_INFO_ENDPOINT = URI.create("https://sandbox.orcid.org/oauth/userinfo"); // URI.create("https://orcid.org/oauth/userinfo"); + + /** JWKS endpoint URL for JWT signature verification */ + public static final URI JWKS_ENDPOINT = URI.create("https://sandbox.orcid.org/oauth/jwks"); // URI.create("https://orcid.org/oauth/jwks"); + + /** Valid ORCID issuers (supports both production and sandbox) */ + private static final java.util.List ISSUERS = java.util.Arrays.asList("https://orcid.org", "https://sandbox.orcid.org"); + + /** + * Constructs endpoint. + * + * @param request current request + * @param uriInfo URI information of the current request + * @param httpHeaders HTTP headers + * @param application current application + * @param system system application + * @param servletConfig servlet config + */ + @Inject + public Login(@Context Request request, @Context UriInfo uriInfo, @Context HttpHeaders httpHeaders, + com.atomgraph.linkeddatahub.apps.model.Application application, + com.atomgraph.linkeddatahub.Application system, @Context ServletConfig servletConfig) + { + super(request, uriInfo, httpHeaders, application, system, servletConfig, + (String)system.getProperty(ORCID.clientID.getURI()), (String)system.getProperty(ORCID.clientSecret.getURI())); + } + + /** + * Returns ORCID's OAuth token endpoint URL. + * + * @return ORCID token endpoint URI + */ + @Override + public URI getTokenEndpoint() + { + return TOKEN_ENDPOINT; + } + + /** + * Returns ORCID's JWKS endpoint URL for fetching public keys. + * + * @return ORCID JWKS endpoint URI + */ + @Override + protected URI getJWKSEndpoint() + { + return JWKS_ENDPOINT; + } + + /** + * Returns the list of valid ORCID issuers. + * + * @return list of valid issuer URLs + */ + @Override + protected java.util.List getIssuers() + { + return ISSUERS; + } + + /** + * Retrieves user information from ORCID UserInfo endpoint. + * ORCID requires a separate API call to the UserInfo endpoint to get user details, + * as they are not included in the ID token JWT claims. + * + * @param jwt the decoded JWT ID token (not used for user info retrieval) + * @param accessToken the OAuth access token used to authenticate the UserInfo endpoint call + * @return map of user information from the UserInfo endpoint (email, name, given_name, family_name) + */ + @Override + protected Map getUserInfo(DecodedJWT jwt, String accessToken) + { + // ORCID requires a separate UserInfo endpoint call to get user details + try (Response userInfoResponse = getSystem().getClient().target(USER_INFO_ENDPOINT). + request(). + header(HttpHeaders.AUTHORIZATION, "Bearer " + accessToken). + get()) + { + JsonObject json = userInfoResponse.readEntity(JsonObject.class); + Map userInfo = new HashMap<>(); + json.forEach((key, value) -> { + switch (value.getValueType()) + { + case STRING -> userInfo.put(key, json.getString(key)); + case NUMBER, TRUE, FALSE -> userInfo.put(key, value.toString()); + // Skip NULL, ARRAY, OBJECT + } + }); + + return userInfo; + } + } + +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java b/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java index 3b8101dda..41876ac2d 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/upload/Item.java @@ -169,7 +169,8 @@ public ResponseBuilder getResponseBuilder(Model model, URI graphUri) lastModified(getLastModified(file)). header(HttpHeaders.CONTENT_LENGTH, rangeOutput.getLength()). // should override Transfer-Encoding: chunked header(ACCEPT_RANGES, BYTES_RANGE). - header(CONTENT_RANGE, contentRangeValue); + header(CONTENT_RANGE, contentRangeValue). + header("Content-Security-Policy", "default-src 'none'; sandbox"); // LNK-011 fix: prevent XSS in uploaded HTML files } } @@ -178,7 +179,8 @@ public ResponseBuilder getResponseBuilder(Model model, URI graphUri) type(variant.getMediaType()). lastModified(getLastModified(file)). header(HttpHeaders.CONTENT_LENGTH, file.length()). // should override Transfer-Encoding: chunked - header(ACCEPT_RANGES, BYTES_RANGE); + header(ACCEPT_RANGES, BYTES_RANGE). + header("Content-Security-Policy", "default-src 'none'; sandbox"); // LNK-011 fix: prevent XSS in uploaded HTML files //header("Content-Disposition", "attachment; filename=\"" + getRequiredProperty(NFO.fileName).getString() + "\""). } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/exception/auth/AuthorizationException.java b/src/main/java/com/atomgraph/linkeddatahub/server/exception/auth/AuthorizationException.java index 9cc99c08d..a72f060f1 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/exception/auth/AuthorizationException.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/exception/auth/AuthorizationException.java @@ -17,6 +17,7 @@ package com.atomgraph.linkeddatahub.server.exception.auth; import com.atomgraph.linkeddatahub.model.auth.Agent; +import jakarta.ws.rs.ForbiddenException; import org.apache.jena.rdf.model.Resource; import java.net.URI; @@ -26,7 +27,7 @@ * * @author Martynas Jusevičius {@literal } */ -public class AuthorizationException extends RuntimeException +public class AuthorizationException extends ForbiddenException { /** URL of the current request (without the query string) */ diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/factory/ApplicationFactory.java b/src/main/java/com/atomgraph/linkeddatahub/server/factory/ApplicationFactory.java index cc9dd4fea..3f112a08e 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/factory/ApplicationFactory.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/factory/ApplicationFactory.java @@ -20,6 +20,7 @@ import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.ext.Provider; +import java.util.Optional; import org.glassfish.hk2.api.Factory; import org.glassfish.hk2.api.ServiceLocator; import org.slf4j.Logger; @@ -32,32 +33,32 @@ * @see com.atomgraph.linkeddatahub.server.model.impl.Dispatcher */ @Provider -public class ApplicationFactory implements Factory +public class ApplicationFactory implements Factory> { private static final Logger log = LoggerFactory.getLogger(ApplicationFactory.class); - + @Context private ServiceLocator serviceLocator; - + @Override - public com.atomgraph.linkeddatahub.apps.model.Application provide() + public Optional provide() { return getApplication(); } @Override - public void dispose(com.atomgraph.linkeddatahub.apps.model.Application t) + public void dispose(Optional t) { } /** * Retrieves application from the request context. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { - return (com.atomgraph.linkeddatahub.apps.model.Application)getContainerRequestContext().getProperty(LAPP.Application.getURI()); + return (Optional)getContainerRequestContext().getProperty(LAPP.Application.getURI()); } /** diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/factory/ServiceFactory.java b/src/main/java/com/atomgraph/linkeddatahub/server/factory/ServiceFactory.java index 52da8c31b..d3e510bb6 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/factory/ServiceFactory.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/factory/ServiceFactory.java @@ -54,13 +54,16 @@ public void dispose(Optional t) /** * Retrieves (optional) service from container request context. - * + * * @return optional service */ public Optional getService() { - Application app = (Application)getContainerRequestContext().getProperty(LAPP.Application.getURI()); - Service service = app.getService(); + Optional appOpt = (Optional)getContainerRequestContext().getProperty(LAPP.Application.getURI()); + + if (!appOpt.isPresent()) return Optional.empty(); + + Service service = appOpt.get().getService(); return Optional.of(service); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/factory/UnwrappedApplicationFactory.java b/src/main/java/com/atomgraph/linkeddatahub/server/factory/UnwrappedApplicationFactory.java new file mode 100644 index 000000000..179550fc7 --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/server/factory/UnwrappedApplicationFactory.java @@ -0,0 +1,62 @@ +/** + * Copyright 2025 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.server.factory; + +import com.atomgraph.linkeddatahub.apps.model.Application; +import jakarta.inject.Inject; +import jakarta.ws.rs.ext.Provider; +import java.util.Optional; +import org.glassfish.hk2.api.Factory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * JAX-RS factory that unwraps Optional<Application> for direct injection. + * This allows resource constructors to inject Application directly while + * filters and providers can inject Optional<Application>. + * + * @author Martynas Jusevičius {@literal } + * @see ApplicationFactory + */ +@Provider +public class UnwrappedApplicationFactory implements Factory +{ + + private static final Logger log = LoggerFactory.getLogger(UnwrappedApplicationFactory.class); + + @Inject jakarta.inject.Provider> optionalApp; + + @Override + public Application provide() + { + Optional appOpt = optionalApp.get(); + + if (!appOpt.isPresent()) + { + if (log.isErrorEnabled()) log.error("Application not present when unwrapping in UnwrappedApplicationFactory"); + return null; // This should only happen if ApplicationFilter threw NotFoundException + } + + return appOpt.get(); + } + + @Override + public void dispose(Application t) + { + } + +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/ApplicationFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/ApplicationFilter.java index 8bd3f2737..358e1491d 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/ApplicationFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/ApplicationFilter.java @@ -27,6 +27,7 @@ import jakarta.annotation.Priority; import jakarta.inject.Inject; import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.NotFoundException; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.container.ContainerRequestFilter; import jakarta.ws.rs.container.PreMatching; @@ -58,9 +59,23 @@ public class ApplicationFilter implements ContainerRequestFilter @Override public void filter(ContainerRequestContext request) throws IOException { + // used by ModeFactory and ModelXSLTWriterBase - set early so it's available even if app matching fails + if (request.getUriInfo().getQueryParameters().containsKey(AC.mode.getLocalName())) + { + List modeUris = request.getUriInfo().getQueryParameters().get(AC.mode.getLocalName()); + List modes = modeUris.stream().map(Mode::new).collect(Collectors.toList()); + request.setProperty(AC.mode.getURI(), modes); + } + else request.setProperty(AC.mode.getURI(), Collections.emptyList()); + // there always have to be an app - Resource appResource = getSystem().matchApp(LAPP.Application, request.getUriInfo().getAbsolutePath()); - if (appResource == null) throw new IllegalStateException("Request URI '" + request.getUriInfo().getAbsolutePath() + "' has not matched any lapp:Application"); + Resource appResource = getSystem().matchApp(request.getUriInfo().getAbsolutePath()); + if (appResource == null) + { + // Set empty Optional so response filters can safely check + request.setProperty(LAPP.Application.getURI(), Optional.empty()); + throw new NotFoundException("Request URI '" + request.getUriInfo().getAbsolutePath() + "' has not matched any lapp:Application"); + } // instead of InfModel, do faster explicit checks for subclasses and add rdf:type if (!appResource.canAs(com.atomgraph.linkeddatahub.apps.model.Application.class) && @@ -69,7 +84,7 @@ public void filter(ContainerRequestContext request) throws IOException throw new IllegalStateException("Resource <" + appResource + "> cannot be cast to lapp:Application"); com.atomgraph.linkeddatahub.apps.model.Application app = appResource.as(com.atomgraph.linkeddatahub.apps.model.Application.class); - request.setProperty(LAPP.Application.getURI(), app); // wrap into a helper class so it doesn't interfere with injection of Application + request.setProperty(LAPP.Application.getURI(), Optional.of(app)); // wrap in Optional so response filters can handle missing applications // use the ?uri URL parameter to override the effective request URI if its URI value is relative to the app's base URI final URI requestURI; @@ -107,15 +122,6 @@ public void filter(ContainerRequestContext request) throws IOException if (request.getUriInfo().getQueryParameters().containsKey(AC.accept.getLocalName())) request.getHeaders().putSingle(HttpHeaders.ACCEPT, request.getUriInfo().getQueryParameters().getFirst(AC.accept.getLocalName())); - // used by ModeFactory and ModelXSLTWriterBase - if (request.getUriInfo().getQueryParameters().containsKey(AC.mode.getLocalName())) - { - List modeUris = request.getUriInfo().getQueryParameters().get(AC.mode.getLocalName()); - List modes = modeUris.stream().map(Mode::new).collect(Collectors.toList()); - request.setProperty(AC.mode.getURI(), modes); - } - else request.setProperty(AC.mode.getURI(), Collections.emptyList()); - // TO-DO: move Dataset logic to a separate ContainerRequestFilter? Resource datasetResource = getSystem().matchDataset(LAPP.Dataset, request.getUriInfo().getAbsolutePath()); if (datasetResource != null) diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthenticationFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthenticationFilter.java index eeac51513..a556d2a07 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthenticationFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthenticationFilter.java @@ -54,7 +54,7 @@ public abstract class AuthenticationFilter implements ContainerRequestFilter public static final String ON_BEHALF_OF = "On-Behalf-Of"; @Inject com.atomgraph.linkeddatahub.Application system; - @Inject jakarta.inject.Provider app; + @Inject jakarta.inject.Provider> app; @Inject jakarta.inject.Provider> dataset; /** @@ -111,14 +111,14 @@ public void filter(ContainerRequestContext request) throws IOException /** * Returns the SPARQL service for agent data. - * + * * @return service resource */ protected Service getAgentService() { - return getApplication().canAs(EndUserApplication.class) ? - getApplication().as(EndUserApplication.class).getAdminApplication().getService() : - getApplication().getService(); + return getApplication().get().canAs(EndUserApplication.class) ? + getApplication().get().as(EndUserApplication.class).getAdminApplication().getService() : + getApplication().get().getService(); } /** @@ -183,10 +183,10 @@ protected Resource getResourceByPropertyValue(Model model, Property property, RD /** * Returns currently matched application. - * - * @return application resource + * + * @return optional application resource */ - public Application getApplication() + public Optional getApplication() { return app.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java index 430887a45..2ddbda545 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/AuthorizationFilter.java @@ -83,7 +83,7 @@ public class AuthorizationFilter implements ContainerRequestFilter ); @Inject com.atomgraph.linkeddatahub.Application system; - @Inject jakarta.inject.Provider app; + @Inject jakarta.inject.Provider> app; @Inject jakarta.inject.Provider> dataset; private ParameterizedSparqlString documentTypeQuery, documentOwnerQuery, aclQuery, ownerAclQuery; @@ -120,8 +120,8 @@ public void filter(ContainerRequestContext request) throws IOException if (log.isWarnEnabled()) log.warn("Skipping authentication/authorization, request method not recognized: {}", request.getMethod()); return; } - - if (getApplication().isReadAllowed()) + + if (getApplication().isPresent() && getApplication().get().isReadAllowed()) { if (request.getMethod().equals(HttpMethod.GET) || request.getMethod().equals(HttpMethod.HEAD)) // allow read-only methods { @@ -169,7 +169,7 @@ public Model authorize(ContainerRequestContext request, Resource agent, Resource createOwnerAuthorization(authorizations, accessTo, agent); } - ResultSetRewindable docTypesResult = loadResultSet(getApplication().getService(), getDocumentTypeQuery(), thisQsm); + ResultSetRewindable docTypesResult = loadResultSet(getApplication().get().getService(), getDocumentTypeQuery(), thisQsm); try { if (!docTypesResult.hasNext()) // if the document resource has no types, we assume the document does not exist @@ -185,7 +185,7 @@ public Model authorize(ContainerRequestContext request, Resource agent, Resource thisQsm.add(SPIN.THIS_VAR_NAME, accessTo); docTypesResult.close(); - docTypesResult = loadResultSet(getApplication().getService(), getDocumentTypeQuery(), thisQsm); + docTypesResult = loadResultSet(getApplication().get().getService(), getDocumentTypeQuery(), thisQsm); Set parentTypes = new HashSet<>(); docTypesResult.forEachRemaining(qs -> parentTypes.add(qs.getResource("Type"))); @@ -205,13 +205,13 @@ public Model authorize(ContainerRequestContext request, Resource agent, Resource else return null; } - ParameterizedSparqlString pss = getApplication().canAs(EndUserApplication.class) ? getACLQuery() : getOwnerACLQuery(); + ParameterizedSparqlString pss = getApplication().get().canAs(EndUserApplication.class) ? getACLQuery() : getOwnerACLQuery(); Query query = new SetResultSetValues().apply(pss.asQuery(), docTypesResult); pss = new ParameterizedSparqlString(query.toString()); // make sure VALUES are now part of the query string assert pss.toString().contains("VALUES"); // note we're not setting the $mode value on the ACL queries as we want to provide the AuthorizationContext with all of the agent's authorizations - authorizations.add(loadModel(getAdminService(), pss, new AuthorizationParams(getApplication().getBase(), accessTo, agent).get())); + authorizations.add(loadModel(getAdminService(), pss, new AuthorizationParams(getAdminBase(), accessTo, agent).get())); // access denied if the agent has no authorization to the requested document with the requested ACL mode if (getAuthorizationByMode(authorizations, accessMode) == null) return null; @@ -256,7 +256,7 @@ protected boolean isOwner(Resource accessTo, Resource agent) ParameterizedSparqlString pss = getDocumentOwnerQuery(); pss.setParams(qsm); - ResultSetRewindable ownerResult = loadResultSet(getApplication().getService(), getDocumentOwnerQuery(), qsm); // could use ASK query in principle + ResultSetRewindable ownerResult = loadResultSet(getApplication().get().getService(), getDocumentOwnerQuery(), qsm); // could use ASK query in principle try { return ownerResult.hasNext() && agent.equals(ownerResult.next().getResource("owner")); @@ -356,22 +356,35 @@ public Resource createOwnerAuthorization(Model model, Resource accessTo, Resourc /** * Returns the SPARQL service for agent data. - * + * * @return service resource */ protected Service getAdminService() { - return getApplication().canAs(EndUserApplication.class) ? - getApplication().as(EndUserApplication.class).getAdminApplication().getService() : - getApplication().getService(); + return getApplication().get().canAs(EndUserApplication.class) ? + getApplication().get().as(EndUserApplication.class).getAdminApplication().getService() : + getApplication().get().getService(); + } + + /** + * Returns the base URI of the admin application. + * Authorization data is always stored in the admin application's dataspace. + * + * @return admin application's base URI + */ + protected Resource getAdminBase() + { + return getApplication().get().canAs(EndUserApplication.class) ? + getApplication().get().as(EndUserApplication.class).getAdminApplication().getBase() : + getApplication().get().getBase(); } /** * Returns currently matched application. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return app.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java index cf002de2a..c996d5214 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java @@ -65,17 +65,19 @@ public void filter(ContainerRequestContext crc) throws IOException /** * Retrieves (optional) ontology from the container request context. - * + * * @param crc request context * @return optional ontology */ public Optional getOntology(ContainerRequestContext crc) { - Application app = getApplication(crc); - + Optional appOpt = getApplication(crc); + + if (!appOpt.isPresent()) return Optional.empty(); + try { - return Optional.ofNullable(getOntology(app)); + return Optional.ofNullable(getOntology(appOpt.get())); } catch (OntologyException ex) { @@ -115,8 +117,7 @@ public Ontology getOntology(Application app, String uri) // only create InfModel if ontology is not already cached if (!ontModelSpec.getDocumentManager().getFileManager().hasCachedModel(uri)) { - OntologyModelGetter modelGetter = new OntologyModelGetter(app.as(EndUserApplication.class), - ontModelSpec, getSystem().getOntologyQuery(), getSystem().getNoCertClient(), getSystem().getMediaTypes()); + OntologyModelGetter modelGetter = new OntologyModelGetter(app.as(EndUserApplication.class), ontModelSpec, getSystem().getOntologyQuery()); ontModelSpec.setImportModelGetter(modelGetter); if (log.isDebugEnabled()) log.debug("Started loading ontology with URI '{}' from the admin dataset", uri); Model baseModel = modelGetter.getModel(uri); @@ -185,13 +186,13 @@ public static void addDocumentModel(OntDocumentManager odm, String importURI) /** * Retrieves application from the container request context. - * + * * @param crc request context - * @return application resource + * @return optional application resource */ - public Application getApplication(ContainerRequestContext crc) + public Optional getApplication(ContainerRequestContext crc) { - return ((Application)crc.getProperty(LAPP.Application.getURI())); + return ((Optional)crc.getProperty(LAPP.Application.getURI())); } /** diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/IDTokenFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/IDTokenFilterBase.java similarity index 70% rename from src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/IDTokenFilter.java rename to src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/IDTokenFilterBase.java index 078c6f7dd..d3ed486c8 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/IDTokenFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/IDTokenFilterBase.java @@ -1,5 +1,5 @@ /** - * Copyright 2019 Martynas Jusevičius + * Copyright 2025 Martynas Jusevičius * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,14 +17,12 @@ package com.atomgraph.linkeddatahub.server.filter.request.auth; import com.atomgraph.linkeddatahub.apps.model.AdminApplication; -import com.atomgraph.linkeddatahub.server.filter.request.AuthenticationFilter; import com.atomgraph.linkeddatahub.apps.model.Application; import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; import com.atomgraph.linkeddatahub.model.auth.Agent; -import static com.atomgraph.linkeddatahub.resource.admin.oauth2.Login.TOKEN_ENDPOINT; +import com.atomgraph.linkeddatahub.server.filter.request.AuthenticationFilter; import com.atomgraph.linkeddatahub.server.security.IDTokenSecurityContext; import com.atomgraph.linkeddatahub.vocabulary.FOAF; -import com.atomgraph.linkeddatahub.vocabulary.Google; import com.atomgraph.linkeddatahub.vocabulary.LACL; import com.atomgraph.linkeddatahub.vocabulary.SIOC; import com.auth0.jwt.JWT; @@ -34,13 +32,13 @@ import java.net.URI; import java.time.Instant; import java.time.temporal.ChronoUnit; -import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.concurrent.TimeUnit; import jakarta.annotation.PostConstruct; import jakarta.annotation.Priority; import jakarta.json.JsonObject; +import jakarta.servlet.http.HttpServletRequest; import jakarta.ws.rs.InternalServerErrorException; import jakarta.ws.rs.NotAuthorizedException; import jakarta.ws.rs.Priorities; @@ -48,9 +46,9 @@ import jakarta.ws.rs.client.Entity; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.container.PreMatching; +import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.Cookie; import jakarta.ws.rs.core.Form; -import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.NewCookie; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.SecurityContext; @@ -65,25 +63,24 @@ import org.slf4j.LoggerFactory; /** - * Authentication filter that matches OIDC JWT tokens against application's user accounts. - * + * Abstract base class for OAuth 2.0 / OpenID Connect ID token authentication filters. + * * @author Martynas Jusevičius {@literal } */ @PreMatching @Priority(Priorities.USER + 10) // has to execute after WebIDFilter -public class IDTokenFilter extends AuthenticationFilter +public abstract class IDTokenFilterBase extends AuthenticationFilter { - - private static final Logger log = LoggerFactory.getLogger(IDTokenFilter.class); + private static final Logger log = LoggerFactory.getLogger(IDTokenFilterBase.class); /** ID of the JWT authentication scheme */ public static final String AUTH_SCHEME = "JWT"; - /** White-list of OIDC issuers */ - public static final List ISSUERS = Arrays.asList("https://accounts.google.com"); /** Name of the cookie that stores the ID token */ public static final String COOKIE_NAME = "LinkedDataHub.id_token"; + + @Context HttpServletRequest httpServletRequest; + private String clientID, clientSecret; - private ParameterizedSparqlString userAccountQuery; /** @@ -93,40 +90,100 @@ public class IDTokenFilter extends AuthenticationFilter public void init() { userAccountQuery = new ParameterizedSparqlString(getSystem().getUserAccountQuery().toString()); - clientID = (String)getSystem().getProperty(Google.clientID.getURI()); - clientSecret = (String)getSystem().getProperty(Google.clientSecret.getURI()); + initClientCredentials(); + } + + /** + * Initializes provider-specific client credentials. + * Subclasses should load their OAuth client ID and secret here. + */ + protected abstract void initClientCredentials(); + + /** + * Returns the list of trusted OIDC issuers for this provider. + * + * @return list of issuer URIs + */ + protected abstract List getIssuers(); + + /** + * Returns the JWKS endpoint URI for fetching public keys. + * + * @return JWKS endpoint URI + */ + protected abstract URI getJWKSEndpoint(); + + /** + * Verifies the validity of the specified JWT ID token using JWKS-based signature verification. + * + * @param idToken ID token + * @return true if valid + * @see com.atomgraph.linkeddatahub.server.util.JWTVerifier#verify + */ + protected boolean verify(DecodedJWT idToken) + { + return com.atomgraph.linkeddatahub.server.util.JWTVerifier.verify( + idToken, + getJWKSEndpoint(), + getIssuers(), + getClientID(), + getSystem().getClient(), + getSystem().getJWKSCache() + ); } - + + /** + * Returns the OAuth token endpoint URI for token refresh. + * + * @return token endpoint URI + */ + protected abstract URI getTokenEndpoint(); + + /** + * Returns the URL of the OAuth login endpoint. + * + * @return endpoint URI + */ + protected abstract URI getLoginURL(); + + /** + * Returns the URL of the OAuth authorization endpoint. + * + * @return endpoint URI + */ + protected abstract URI getAuthorizeURL(); + @Override public String getScheme() { return AUTH_SCHEME; } - + @Override public void filter(ContainerRequestContext request) throws IOException { if (request.getSecurityContext().getUserPrincipal() != null) return; // skip filter if agent already authorized - if (!getApplication().canAs(EndUserApplication.class) && !getApplication().canAs(AdminApplication.class)) return; // skip "primitive" apps + if (!getApplication().isPresent()) return; // skip if no application matched + if (!getApplication().get().canAs(EndUserApplication.class) && !getApplication().get().canAs(AdminApplication.class)) return; // skip "primitive" apps // do not verify token for auth endpoints as that will lead to redirect loops if (request.getUriInfo().getAbsolutePath().equals(getLoginURL())) return; - if (request.getUriInfo().getAbsolutePath().equals(getAuthorizeGoogleURL())) return; - + if (request.getUriInfo().getAbsolutePath().equals(getAuthorizeURL())) return; + super.filter(request); } - + @Override public SecurityContext authenticate(ContainerRequestContext request) { ParameterizedSparqlString pss = getUserAccountQuery(); - + String jwtString = getJWTToken(request); if (jwtString == null) return null; - + DecodedJWT jwt = JWT.decode(jwtString); - if (!jwt.getAudience().contains(getClientID()) || !ISSUERS.contains(jwt.getIssuer())) return null; // in Google's JWT tokens, "aud" is the client ID - + if (!jwt.getAudience().contains(getClientID()) || !getIssuers().contains(jwt.getIssuer())) return null; + if (jwt.getExpiresAt().before(new Date())) { String refreshToken = getSystem().getRefreshToken(jwt.getSubject()); @@ -138,11 +195,11 @@ public SecurityContext authenticate(ContainerRequestContext request) else { if (log.isDebugEnabled()) log.debug("ID token for subject '{}' has expired at {}, refresh token not found", jwt.getSubject(), jwt.getExpiresAt()); - throw new TokenExpiredException("ID token for subject '" + jwt.getSubject() + "' has expired at " + jwt.getExpiresAt()); + throw new TokenExpiredException("ID token for subject '%s' has expired at %s".formatted(jwt.getSubject(), jwt.getExpiresAt())); } } if (!verify(jwt)) return null; - + String cacheKey = jwt.getIssuer() + jwt.getSubject(); final Model agentModel; Literal userId = ResourceFactory.createStringLiteral(jwt.getSubject()); @@ -155,25 +212,25 @@ public SecurityContext authenticate(ContainerRequestContext request) agentModel = loadModel(pss, qsm, getAgentService()); } - + Resource account = getResourceByPropertyValue(agentModel, SIOC.ID, userId); if (account == null) return null; // UserAccount not found // we add token value to the UserAccount. This will allow SecurityContext to carry the token as well as DataManager to delegate it. Resource agent = account.getRequiredProperty(SIOC.ACCOUNT_OF).getResource(); if (agent == null) throw new IllegalStateException("UserAccount is not attached to an agent (sioc:account_of property is missing)"); - + // calculate ID token expiration in seconds and use it in the cache long expiration = ChronoUnit.SECONDS.between(Instant.now(), jwt.getExpiresAt().toInstant()); getSystem().getOIDCModelCache().put(cacheKey, agentModel, expiration, TimeUnit.SECONDS); - + // imitate type inference, otherwise we'll get Jena's polymorphism exception return new IDTokenSecurityContext(getScheme(), agent.addProperty(RDF.type, FOAF.Agent).as(Agent.class), jwtString); } - + /** * Retrieves JWT token from the request context. - * + * * @param request request context * @return token content */ @@ -186,44 +243,11 @@ protected String getJWTToken(ContainerRequestContext request) return null; } - - /** - * Verifies the validity of the specified JWT ID token. - * - * @param idToken ID token - * @return true if valid - */ - protected boolean verify(DecodedJWT idToken) - { - // TO-DO: use keys, this is for debugging purposes only: https://developers.google.com/identity/protocols/oauth2/openid-connect#validatinganidtoken - try (Response cr = getSystem().getNoCertClient(). - target("https://oauth2.googleapis.com/tokeninfo"). - queryParam("id_token", idToken.getToken()). - request(MediaType.APPLICATION_JSON_TYPE). - get()) - { - if (!cr.getStatusInfo().getFamily().equals(Response.Status.Family.SUCCESSFUL)) - { - if (log.isDebugEnabled()) log.debug("Could not verify JWT token for subject '{}'", idToken.getSubject()); -// throw new JWTVerificationException("Could not verify JWT token for subject '" + idToken.getSubject() + "'"); - return false; - } - - JsonObject verifiedIdToken = cr.readEntity(JsonObject.class); - if (idToken.getIssuer().equals(verifiedIdToken.getString("iss")) && - idToken.getSubject().equals(verifiedIdToken.getString("sub")) && - idToken.getKeyId().equals(verifiedIdToken.getString("kid"))) - return true; - } -// throw new JWTVerificationException("Could not verify JWT token for subject '" + idToken.getSubject() + "'"); - return false; - } - @Override public void login(Application app, ContainerRequestContext request) { - Response response = Response.seeOther(getAuthorizeGoogleURL()).build(); + Response response = Response.seeOther(getAuthorizeURL()).build(); throw new WebApplicationException(response); } @@ -235,10 +259,14 @@ public void logout(Application app, ContainerRequestContext request) { // Chrome does not seem to store permanent cookies (with Expires) from Domain=localhost // https://stackoverflow.com/questions/7346919/chrome-localhost-cookie-not-being-set - NewCookie deleteCookie = new NewCookie(cookie.getName(), null, - app.getBase().getURI(), null, - NewCookie.DEFAULT_VERSION, null, NewCookie.DEFAULT_MAX_AGE, new Date(0), true, true); - + NewCookie deleteCookie = new NewCookie.Builder(cookie.getName()). + value(null). + path(app.getBase().getURI()). + expiry(new Date(0)). + secure(true). + httpOnly(true). + build(); + Response response = Response.seeOther(request.getUriInfo().getAbsolutePath()). cookie(deleteCookie). build(); @@ -248,7 +276,7 @@ public void logout(Application app, ContainerRequestContext request) /** * Gets new ID token using a refresh token. - * + * * @param refreshToken refresh token * @return ID token */ @@ -259,8 +287,8 @@ public DecodedJWT refreshIDToken(String refreshToken) param("client_id", getClientID()). param("client_secret", getClientSecret()). param("refresh_token", refreshToken); - - try (Response cr = getSystem().getClient().target(TOKEN_ENDPOINT). + + try (Response cr = getSystem().getClient().target(getTokenEndpoint()). request().post(Entity.form(form))) { JsonObject response = cr.readEntity(JsonObject.class); @@ -274,70 +302,89 @@ public DecodedJWT refreshIDToken(String refreshToken) return JWT.decode(idToken); } } - + /** - * Returns the URL of the OAuth login endpoint. - * - * @return endpoint URI - * @see com.atomgraph.linkeddatahub.resource.admin.oauth2.Login + * Returns the admin application of the current dataspace. + * + * @return admin application resource */ - public URI getLoginURL() + public AdminApplication getAdminApplication() { - return getAdminApplication().getBaseURI().resolve("oauth2/login"); // TO-DO: extract from Login class + if (getApplication().get().canAs(EndUserApplication.class)) + return getApplication().get().as(EndUserApplication.class).getAdminApplication(); + else + return getApplication().get().as(AdminApplication.class); } - + /** - * Returns the URL of the Google authorization endpoint. - * - * @return endpoint URI - * @see com.atomgraph.linkeddatahub.resource.admin.oauth2.google.Authorize + * Returns servlet request. + * + * @return servlet request */ - public URI getAuthorizeGoogleURL() + public HttpServletRequest getHttpServletRequest() { - return getAdminApplication().getBaseURI().resolve("oauth2/authorize/google"); // TO-DO: extract from ontology Template + return httpServletRequest; } - + /** - * Returns the admin application of the current dataspace. - * - * @return admin application resource + * Returns the base URI of this LinkedDataHub instance. + * It equals to the base URI of the root dataspace. + * + * @return root context URI */ - public AdminApplication getAdminApplication() + public URI getContextURI() { - if (getApplication().canAs(EndUserApplication.class)) - return getApplication().as(EndUserApplication.class).getAdminApplication(); - else - return getApplication().as(AdminApplication.class); + return getSystem().getBaseURI(); } - + /** * Returns the user account lookup query. - * + * * @return SPARQL string */ public ParameterizedSparqlString getUserAccountQuery() { return userAccountQuery.copy(); } - + /** - * Returns the configured Google client ID for this application. - * + * Returns the configured OAuth client ID for this application. + * * @return client ID */ - private String getClientID() + protected String getClientID() { return clientID; } - + + /** + * Sets the OAuth client ID. + * + * @param clientID client ID + */ + protected void setClientID(String clientID) + { + this.clientID = clientID; + } + /** - * Returns the configured Google client secret for this application. - * + * Returns the configured OAuth client secret for this application. + * * @return client secret */ - private String getClientSecret() + protected String getClientSecret() { return clientSecret; } - + + /** + * Sets the OAuth client secret. + * + * @param clientSecret client secret + */ + protected void setClientSecret(String clientSecret) + { + this.clientSecret = clientSecret; + } + } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/WebIDFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/WebIDFilter.java index 58955a73f..1fdd78a12 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/WebIDFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/WebIDFilter.java @@ -28,8 +28,10 @@ import com.atomgraph.linkeddatahub.vocabulary.ACL; import com.atomgraph.linkeddatahub.vocabulary.Cert; import com.atomgraph.linkeddatahub.vocabulary.FOAF; +import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; +import java.net.UnknownHostException; import java.security.cert.CertificateException; import java.security.cert.CertificateParsingException; import java.security.cert.X509Certificate; @@ -39,6 +41,7 @@ import jakarta.annotation.PostConstruct; import jakarta.annotation.Priority; import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.BadRequestException; import jakarta.ws.rs.Priorities; import jakarta.ws.rs.ProcessingException; import jakarta.ws.rs.client.Client; @@ -126,7 +129,8 @@ public SecurityContext authenticate(ContainerRequestContext request) return null; } if (log.isTraceEnabled()) log.trace("Client WebID: {}", webID); - + + validateNotInternalURL(webID); // LNK-004: Prevent SSRF via WebID URI Resource agent = authenticate(loadWebID(webID), webID, publicKey); if (agent == null) { @@ -139,6 +143,7 @@ public SecurityContext authenticate(ContainerRequestContext request) if (onBehalfOf != null) { URI principalWebID = new URI(onBehalfOf); + validateNotInternalURL(principalWebID); // LNK-004: Prevent SSRF via On-Behalf-Of header Model principalWebIDModel = loadWebID(principalWebID); Resource principal = principalWebIDModel.createResource(onBehalfOf); // if we verify that the current agent is a secretary of the principal, that principal becomes current agent. Else throw error @@ -296,6 +301,7 @@ public Model loadWebIDFromURI(URI webID) if (certKeyRes != null && certKeyRes.isURIResource()) { URI certKey = URI.create(certKeyRes.getURI()); + validateNotInternalURL(certKey); // LNK-004: Prevent SSRF via cert:key reference in WebID document // remove fragment identifier to get document URI URI certKeyDoc = new URI(certKey.getScheme(), certKey.getSchemeSpecificPart(), null).normalize(); @@ -376,5 +382,41 @@ public void logout(Application app, ContainerRequestContext request) { throw new UnsupportedOperationException("Not supported yet."); // logout not really possible with HTTP certificates } - + + /** + * Validates that the given URI does not point to an internal/private network address. + * Prevents SSRF attacks by blocking access to RFC 1918 private addresses and link-local addresses. + * + * @param uri the URI to validate + * @throws IllegalArgumentException if URI or host is null + * @throws BadRequestException if the URI resolves to an internal address + * @see LNK-004: SSRF primitive via On-Behalf-Of header + */ + protected static void validateNotInternalURL(URI uri) + { + if (uri == null) throw new IllegalArgumentException("URI cannot be null"); + + String host = uri.getHost(); + if (host == null) throw new IllegalArgumentException("URI host cannot be null"); + + // Resolve hostname to IP and check if it's private/internal + try + { + InetAddress address = InetAddress.getByName(host); + + // Note: We don't block loopback addresses (127.0.0.1, localhost) because WebID documents + // may legitimately be hosted on the same server during development/testing + + if (address.isLinkLocalAddress()) + throw new BadRequestException("WebID URI cannot resolve to link-local addresses: " + address.getHostAddress()); + if (address.isSiteLocalAddress()) + throw new BadRequestException("WebID URI cannot resolve to private addresses (RFC 1918): " + address.getHostAddress()); + } + catch (UnknownHostException e) + { + if (log.isWarnEnabled()) log.warn("Could not resolve hostname for SSRF validation: {}", host); + // Allow request to proceed - will fail later with better error message + } + } + } \ No newline at end of file diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/google/IDTokenFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/google/IDTokenFilter.java new file mode 100644 index 000000000..863722806 --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/google/IDTokenFilter.java @@ -0,0 +1,85 @@ +/** + * Copyright 2019 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.server.filter.request.auth.google; + +import com.atomgraph.linkeddatahub.resource.oauth2.google.Authorize; +import com.atomgraph.linkeddatahub.resource.oauth2.google.Login; +import static com.atomgraph.linkeddatahub.resource.oauth2.google.Login.TOKEN_ENDPOINT; +import com.atomgraph.linkeddatahub.server.filter.request.auth.IDTokenFilterBase; +import com.atomgraph.linkeddatahub.vocabulary.Google; +import java.net.URI; +import java.util.Arrays; +import java.util.List; +import jakarta.annotation.Priority; +import jakarta.ws.rs.Priorities; +import jakarta.ws.rs.container.PreMatching; +import jakarta.ws.rs.core.UriBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Google-specific authentication filter that matches OIDC JWT tokens against application's user accounts. + * + * @author Martynas Jusevičius {@literal } + */ +@PreMatching +@Priority(Priorities.USER + 10) // has to execute after WebIDFilter +public class IDTokenFilter extends IDTokenFilterBase +{ + private static final Logger log = LoggerFactory.getLogger(IDTokenFilter.class); + + /** White-list of OIDC issuers */ + private static final List ISSUERS = Arrays.asList("https://accounts.google.com", "accounts.google.com"); + + @Override + protected void initClientCredentials() + { + setClientID((String)getSystem().getProperty(Google.clientID.getURI())); + setClientSecret((String)getSystem().getProperty(Google.clientSecret.getURI())); + } + + @Override + protected List getIssuers() + { + return ISSUERS; + } + + @Override + protected URI getJWKSEndpoint() + { + return Login.JWKS_ENDPOINT; + } + + @Override + protected URI getTokenEndpoint() + { + return TOKEN_ENDPOINT; + } + + @Override + protected URI getLoginURL() + { + return UriBuilder.fromUri(getContextURI()).path(Login.class).build(); + } + + @Override + protected URI getAuthorizeURL() + { + return UriBuilder.fromUri(getContextURI()).path(Authorize.class).build(); + } + +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/orcid/IDTokenFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/orcid/IDTokenFilter.java new file mode 100644 index 000000000..a6a1d37e9 --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/auth/orcid/IDTokenFilter.java @@ -0,0 +1,85 @@ +/** + * Copyright 2025 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.server.filter.request.auth.orcid; + +import com.atomgraph.linkeddatahub.resource.oauth2.orcid.Authorize; +import com.atomgraph.linkeddatahub.resource.oauth2.orcid.Login; +import static com.atomgraph.linkeddatahub.resource.oauth2.orcid.Login.TOKEN_ENDPOINT; +import com.atomgraph.linkeddatahub.server.filter.request.auth.IDTokenFilterBase; +import com.atomgraph.linkeddatahub.vocabulary.ORCID; +import java.net.URI; +import java.util.Arrays; +import java.util.List; +import jakarta.annotation.Priority; +import jakarta.ws.rs.Priorities; +import jakarta.ws.rs.container.PreMatching; +import jakarta.ws.rs.core.UriBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * ORCID-specific authentication filter that matches OIDC JWT tokens against application's user accounts. + * + * @author Martynas Jusevičius {@literal } + */ +@PreMatching +@Priority(Priorities.USER + 10) // has to execute after WebIDFilter +public class IDTokenFilter extends IDTokenFilterBase +{ + private static final Logger log = LoggerFactory.getLogger(IDTokenFilter.class); + + /** White-list of OIDC issuers (supports both production and sandbox) */ + private static final List ISSUERS = Arrays.asList("https://orcid.org", "https://sandbox.orcid.org"); + + @Override + protected void initClientCredentials() + { + setClientID((String)getSystem().getProperty(ORCID.clientID.getURI())); + setClientSecret((String)getSystem().getProperty(ORCID.clientSecret.getURI())); + } + + @Override + protected List getIssuers() + { + return ISSUERS; + } + + @Override + protected URI getJWKSEndpoint() + { + return Login.JWKS_ENDPOINT; + } + + @Override + protected URI getTokenEndpoint() + { + return TOKEN_ENDPOINT; + } + + @Override + protected URI getLoginURL() + { + return UriBuilder.fromUri(getContextURI()).path(Login.class).build(); + } + + @Override + protected URI getAuthorizeURL() + { + return UriBuilder.fromUri(getContextURI()).path(Authorize.class).build(); + } + +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/CORSFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/CORSFilter.java new file mode 100644 index 000000000..5ed23c5ad --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/CORSFilter.java @@ -0,0 +1,77 @@ +/** + * Copyright 2025 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.server.filter.response; + +import jakarta.annotation.Priority; +import jakarta.ws.rs.HttpMethod; +import jakarta.ws.rs.Priorities; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.container.ContainerResponseContext; +import jakarta.ws.rs.container.ContainerResponseFilter; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.ext.Provider; +import java.io.IOException; + +/** + * Response filter that adds CORS (Cross-Origin Resource Sharing) headers to allow cross-origin access. + * Runs at HEADER_DECORATOR priority to ensure CORS headers are present on all responses including errors. + * + * @author {@literal Martynas Jusevičius } + */ +@Provider +@Priority(Priorities.HEADER_DECORATOR) +public class CORSFilter implements ContainerResponseFilter +{ + private static final String ALLOWED_METHODS = String.join(", ", + HttpMethod.GET, + HttpMethod.POST, + HttpMethod.PUT, + HttpMethod.DELETE, + HttpMethod.PATCH, + HttpMethod.HEAD, + HttpMethod.OPTIONS + ); + + @Override + public void filter(ContainerRequestContext request, ContainerResponseContext response) throws IOException + { + if (request.getHeaderString("Origin") != null) + { + response.getHeaders().add("Access-Control-Allow-Origin", "*"); + response.getHeaders().add("Access-Control-Allow-Methods", ALLOWED_METHODS); + response.getHeaders().add("Access-Control-Allow-Headers", "Accept, Content-Type, Authorization"); + response.getHeaders().add("Access-Control-Expose-Headers", "Link, Content-Location, Location"); + + // Handle preflight OPTIONS requests + if (HttpMethod.OPTIONS.equalsIgnoreCase(request.getMethod())) + { + response.setStatus(Response.Status.NO_CONTENT.getStatusCode()); + response.getHeaders().add("Access-Control-Max-Age", String.valueOf(getMaxAge())); + } + } + } + + /** + * Returns the maximum age (in seconds) for which browsers should cache the preflight response. + * + * @return max-age value in seconds (default: 1728000 = 20 days) + */ + public int getMaxAge() + { + return 1728000; // 20 days + } +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/BackendInvalidationFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/CacheInvalidationFilter.java similarity index 54% rename from src/main/java/com/atomgraph/linkeddatahub/server/filter/response/BackendInvalidationFilter.java rename to src/main/java/com/atomgraph/linkeddatahub/server/filter/response/CacheInvalidationFilter.java index 4d95fda1e..25f5789b9 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/BackendInvalidationFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/CacheInvalidationFilter.java @@ -31,6 +31,8 @@ import jakarta.ws.rs.container.ContainerResponseFilter; import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; +import java.util.Optional; +import java.util.Set; import org.apache.jena.rdf.model.Resource; import org.glassfish.jersey.uri.UriComponent; @@ -41,7 +43,7 @@ * @author Martynas Jusevičius {@literal } */ @Priority(Priorities.USER + 400) -public class BackendInvalidationFilter implements ContainerResponseFilter +public class CacheInvalidationFilter implements ContainerResponseFilter { /** @@ -50,65 +52,97 @@ public class BackendInvalidationFilter implements ContainerResponseFilter public static final String HEADER_NAME = "X-Escaped-Request-URI"; @Inject com.atomgraph.linkeddatahub.Application system; - @Inject jakarta.inject.Provider app; - + @Inject jakarta.inject.Provider> app; + @Override public void filter(ContainerRequestContext req, ContainerResponseContext resp) throws IOException { - if (getAdminApplication().getService().getBackendProxy() == null) return; - + // If no application was matched (e.g., non-existent dataspace), skip cache invalidation + if (!getApplication().isPresent()) return; + if (req.getMethod().equals(HttpMethod.POST) && resp.getHeaderString(HttpHeaders.LOCATION) != null) { - URI location = (URI)resp.getHeaders().get(HttpHeaders.LOCATION).get(0); + URI location = URI.create(resp.getHeaderString(HttpHeaders.LOCATION)); URI parentURI = location.resolve("..").normalize(); - - ban(getApplication().getService().getBackendProxy(), location.toString()).close(); + URI relativeParentURI = getApplication().get().getBaseURI().relativize(parentURI); + + banIfNotNull(getApplication().get().getFrontendProxy(), location.toString()); + banIfNotNull(getApplication().get().getService().getBackendProxy(), location.toString()); // ban URI from authorization query results - ban(getAdminApplication().getService().getBackendProxy(), location.toString()).close(); + banIfNotNull(getAdminApplication().getService().getBackendProxy(), location.toString()); + // ban parent resource URI in order to avoid stale children data in containers - ban(getApplication().getService().getBackendProxy(), parentURI.toString()).close(); - ban(getApplication().getService().getBackendProxy(), getApplication().getBaseURI().relativize(parentURI).toString()).close(); // URIs can be relative in queries + banIfNotNull(getApplication().get().getFrontendProxy(), parentURI.toString()); + banIfNotNull(getApplication().get().getService().getBackendProxy(), parentURI.toString()); + + if (!relativeParentURI.toString().isEmpty()) // URIs can be relative in queries + { + banIfNotNull(getApplication().get().getFrontendProxy(), relativeParentURI.toString()); + banIfNotNull(getApplication().get().getService().getBackendProxy(), relativeParentURI.toString()); + } + // ban all results of queries that use forClass type if (req.getUriInfo().getQueryParameters().containsKey(AC.forClass.getLocalName())) { String forClass = req.getUriInfo().getQueryParameters().getFirst(AC.forClass.getLocalName()); - ban(getApplication().getService().getBackendProxy(), forClass).close(); + banIfNotNull(getApplication().get().getFrontendProxy(), forClass); + banIfNotNull(getApplication().get().getService().getBackendProxy(), forClass); } } - if (req.getMethod().equals(HttpMethod.POST) || req.getMethod().equals(HttpMethod.PUT) || req.getMethod().equals(HttpMethod.DELETE) || req.getMethod().equals(HttpMethod.PATCH)) + if (Set.of(HttpMethod.POST, HttpMethod.PUT, HttpMethod.DELETE, HttpMethod.PATCH).contains(req.getMethod())) { - // ban all admin/ entries when the admin dataset is changed - not perfect, but works + // ban all admin. entries when the admin dataset is changed - not perfect, but works if (!getAdminApplication().getBaseURI().relativize(req.getUriInfo().getAbsolutePath()).isAbsolute()) // URL is relative to the admin app's base URI { - ban(getAdminApplication().getService().getBackendProxy(), getAdminApplication().getBaseURI().toString()).close(); -// ban(getAdminApplication().getService().getBackendProxy(), FOAF.Agent.getURI()).close(); - ban(getAdminApplication().getService().getBackendProxy(), "foaf:Agent").close(); // queries use prefixed names instead of absolute URIs -// ban(getAdminApplication().getService().getBackendProxy(), ACL.AuthenticatedAgent.getURI()).close(); - ban(getAdminApplication().getService().getBackendProxy(), "acl:AuthenticatedAgent").close(); + banIfNotNull(getAdminApplication().getService().getBackendProxy(), getAdminApplication().getBaseURI().toString()); + banIfNotNull(getAdminApplication().getService().getBackendProxy(), "foaf:Agent"); // queries use prefixed names instead of absolute URIs + banIfNotNull(getAdminApplication().getService().getBackendProxy(), "acl:AuthenticatedAgent"); } - + if (req.getUriInfo().getAbsolutePath().toString().endsWith("/")) { - ban(getApplication().getService().getBackendProxy(), req.getUriInfo().getAbsolutePath().toString()).close(); + banIfNotNull(getApplication().get().getFrontendProxy(), req.getUriInfo().getAbsolutePath().toString()); + banIfNotNull(getApplication().get().getService().getBackendProxy(), req.getUriInfo().getAbsolutePath().toString()); // ban URI from authorization query results - ban(getAdminApplication().getService().getBackendProxy(), req.getUriInfo().getAbsolutePath().toString()).close(); - + banIfNotNull(getAdminApplication().getService().getBackendProxy(), req.getUriInfo().getAbsolutePath().toString()); + // ban parent document URIs (those that have a trailing slash) in order to avoid stale children data in containers - if (!req.getUriInfo().getAbsolutePath().equals(getApplication().getBaseURI())) + if (!req.getUriInfo().getAbsolutePath().equals(getApplication().get().getBaseURI())) { URI parentURI = req.getUriInfo().getAbsolutePath().resolve("..").normalize(); + URI relativeParentURI = getApplication().get().getBaseURI().relativize(parentURI); - ban(getApplication().getService().getBackendProxy(), parentURI.toString()).close(); - ban(getApplication().getService().getBackendProxy(), getApplication().getBaseURI().relativize(parentURI).toString()).close(); // URIs can be relative in queries + // ban parent resource URI in order to avoid stale children data in containers + banIfNotNull(getApplication().get().getFrontendProxy(), parentURI.toString()); + banIfNotNull(getApplication().get().getService().getBackendProxy(), parentURI.toString()); + + if (!relativeParentURI.toString().isEmpty()) // URIs can be relative in queries + { + banIfNotNull(getApplication().get().getFrontendProxy(), relativeParentURI.toString()); + banIfNotNull(getApplication().get().getService().getBackendProxy(), relativeParentURI.toString()); + } } } } } + /** + * Bans URL from proxy cache if proxy is not null. + * Null-safe wrapper that handles the common pattern of banning and closing the response. + * + * @param proxy proxy resource (can be null) + * @param url URL to be banned + */ + public void banIfNotNull(Resource proxy, String url) + { + if (proxy != null) + ban(proxy, url).close(); + } + /** * Bans URL from proxy cache. - * + * * @param proxy proxy resource * @param url URL to be banned * @return response from proxy @@ -117,7 +151,7 @@ public Response ban(Resource proxy, String url) { if (proxy == null) throw new IllegalArgumentException("Proxy resource cannot be null"); if (url == null) throw new IllegalArgumentException("Resource cannot be null"); - + return getClient().target(proxy.getURI()).request(). header(HEADER_NAME, UriComponent.encode(url, UriComponent.Type.UNRESERVED)). // the value has to be URL-encoded in order to match request URLs in Varnish method("BAN", Response.class); @@ -125,23 +159,24 @@ public Response ban(Resource proxy, String url) /** * Returns admin application of the current dataspace. - * + * * @return admin application resource */ public AdminApplication getAdminApplication() { - if (getApplication().canAs(EndUserApplication.class)) - return getApplication().as(EndUserApplication.class).getAdminApplication(); + com.atomgraph.linkeddatahub.apps.model.Application application = getApplication().get(); + if (application.canAs(EndUserApplication.class)) + return application.as(EndUserApplication.class).getAdminApplication(); else - return getApplication().as(AdminApplication.class); + return application.as(AdminApplication.class); } /** * Returns the current application. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return app.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/ResponseHeadersFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/ResponseHeadersFilter.java index 5d954a016..f444ae697 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/ResponseHeadersFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/ResponseHeadersFilter.java @@ -56,7 +56,7 @@ public class ResponseHeadersFilter implements ContainerResponseFilter private static final Logger log = LoggerFactory.getLogger(ResponseHeadersFilter.class); private static final Pattern LINK_SPLITTER = Pattern.compile(",(?=\\s*<)"); // split on commas before next '<' - @Inject jakarta.inject.Provider app; + @Inject jakarta.inject.Provider> app; @Inject jakarta.inject.Provider> dataset; @Inject jakarta.inject.Provider> authorizationContext; @@ -65,31 +65,36 @@ public void filter(ContainerRequestContext request, ContainerResponseContext res { if (response.getStatusInfo().equals(Response.Status.NO_CONTENT)) response.getHeaders().remove(HttpHeaders.CONTENT_TYPE); // needs to be explicitly unset for some reason - + if (request.getSecurityContext().getUserPrincipal() instanceof Agent) { Agent agent = ((Agent)(request.getSecurityContext().getUserPrincipal())); response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(agent.getURI()), ACL.agent.getURI(), null)); } - + if (getAuthorizationContext().isPresent()) getAuthorizationContext().get().getModeURIs().forEach(mode -> response.getHeaders().add(HttpHeaders.LINK, new Link(mode, ACL.mode.getURI(), null))); - + List linkValues = response.getHeaders().get(HttpHeaders.LINK); List links = parseLinkHeaderValues(linkValues); - + if (getLinksByRel(links, SD.endpoint.getURI()).isEmpty()) // add Link rel=sd:endpoint. // TO-DO: The external SPARQL endpoint URL is different from the internal one currently specified as sd:endpoint in the context dataset response.getHeaders().add(HttpHeaders.LINK, new Link(request.getUriInfo().getBaseUriBuilder().path(Dispatcher.class, "getSPARQLEndpoint").build(), SD.endpoint.getURI(), null)); - // add Link rel=ldt:ontology, if the ontology URI is specified - if (getApplication().getOntology() != null) - response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(getApplication().getOntology().getURI()), LDT.ontology.getURI(), null)); - // add Link rel=ac:stylesheet, if the stylesheet URI is specified - if (getApplication().getStylesheet() != null) - response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(getApplication().getStylesheet().getURI()), AC.stylesheet.getURI(), null)); - + // Only add application-specific links if application is present + if (getApplication().isPresent()) + { + Application application = getApplication().get(); + // add Link rel=ldt:ontology, if the ontology URI is specified + if (application.getOntology() != null) + response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(application.getOntology().getURI()), LDT.ontology.getURI(), null)); + // add Link rel=ac:stylesheet, if the stylesheet URI is specified + if (application.getStylesheet() != null) + response.getHeaders().add(HttpHeaders.LINK, new Link(URI.create(application.getStylesheet().getURI()), AC.stylesheet.getURI(), null)); + } + if (response.getHeaders().get(HttpHeaders.LINK) != null) { // combine Link header values into a single value because Saxon-JS 2.x is not able to deal with duplicate header names: https://saxonica.plan.io/issues/5199 @@ -149,10 +154,10 @@ protected List getLinksByRel(List links, String rel) /** * Returns the current application. - * - * @return application resource. + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return app.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/XsltExecutableFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/XsltExecutableFilter.java index 9d7b66560..ee3b9d288 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/XsltExecutableFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/response/XsltExecutableFilter.java @@ -39,6 +39,7 @@ import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriInfo; import java.net.URISyntaxException; +import java.util.Optional; import javax.xml.transform.Source; import javax.xml.transform.stream.StreamSource; import net.sf.saxon.s9api.SaxonApiException; @@ -60,8 +61,8 @@ public class XsltExecutableFilter implements ContainerResponseFilter private static final Logger log = LoggerFactory.getLogger(XsltExecutableFilter.class); @Inject com.atomgraph.linkeddatahub.Application system; - @Inject jakarta.inject.Provider application; - + @Inject jakarta.inject.Provider> application; + @Context UriInfo uriInfo; @Override @@ -71,7 +72,9 @@ public void filter(ContainerRequestContext req, ContainerResponseContext resp) t if (resp.getMediaType() != null && (resp.getMediaType().isCompatible(MediaType.TEXT_HTML_TYPE) || resp.getMediaType().isCompatible(MediaType.APPLICATION_XHTML_XML_TYPE))) { - URI stylesheet = getApplication().getStylesheet() != null ? URI.create(getApplication().getStylesheet().getURI()) : null; + URI stylesheet = null; + if (getApplication().isPresent() && getApplication().get().getStylesheet() != null) + stylesheet = URI.create(getApplication().get().getStylesheet().getURI()); if (stylesheet != null) req.setProperty(AC.stylesheet.getURI(), getXsltExecutable(stylesheet)); else req.setProperty(AC.stylesheet.getURI(), getSystem().getXsltExecutable()); @@ -282,10 +285,10 @@ public com.atomgraph.linkeddatahub.Application getSystem() /** * Returns current application. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return application.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java b/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java index 2db8f677b..db5ac8908 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/io/ValidatingModelProvider.java @@ -82,7 +82,7 @@ public class ValidatingModelProvider extends com.atomgraph.server.io.ValidatingM @Context UriInfo uriInfo; @Context SecurityContext securityContext; - @Inject jakarta.inject.Provider application; + @Inject jakarta.inject.Provider> application; @Inject com.atomgraph.linkeddatahub.Application system; @Inject jakarta.inject.Provider> agentContextProvider; @@ -236,31 +236,34 @@ public Resource processRead(Resource resource) // this logic really belongs in a throw new SPINConstraintViolationException(cvs, resource.getModel()); } } - - if (getApplication().canAs(AdminApplication.class) && resource.hasProperty(RDF.type, OWL.Ontology)) + + if (getApplication().isPresent() && getApplication().get().canAs(AdminApplication.class) && resource.hasProperty(RDF.type, OWL.Ontology)) { // clear cached OntModel if ontology is updated. TO-DO: send event instead getSystem().getOntModelSpec().getDocumentManager().getFileManager().removeCacheModel(resource.getURI()); } - - if (resource.hasProperty(RDF.type, ACL.Authorization)) + + if (getApplication().isPresent() && resource.hasProperty(RDF.type, ACL.Authorization)) { LinkedDataClient ldc = LinkedDataClient.create(getSystem().getClient(), getSystem().getMediaTypes()). delegation(getUriInfo().getBaseUri(), getAgentContextProvider().get().orElse(null)); getSystem().getEventBus().post(new com.atomgraph.linkeddatahub.server.event.AuthorizationCreated(getEndUserApplication(), ldc, resource)); } - + return resource; } @Override public Model processWrite(Model model) { + // If no application (e.g., error responses), skip mbox processing + if (!getApplication().isPresent()) return super.processWrite(model); + // show foaf:mbox in end-user apps - if (getApplication().canAs(EndUserApplication.class)) return model; + if (getApplication().get().canAs(EndUserApplication.class)) return super.processWrite(model); // show foaf:mbox for authenticated agents - if (getSecurityContext() != null && getSecurityContext().getUserPrincipal() instanceof Agent) return model; + if (getSecurityContext() != null && getSecurityContext().getUserPrincipal() instanceof Agent) return super.processWrite(model); // show foaf:mbox_sha1sum for all other agents (in admin apps) return super.processWrite(hashMboxes(getMessageDigest()).apply(model)); // apply processing from superclasses @@ -317,15 +320,15 @@ public static Statement mboxHashStmt(Statement stmt, MessageDigest messageDigest /** * Returns the end-user application of the current dataspace. - * + * * @return end-user application resource */ public EndUserApplication getEndUserApplication() { - if (getApplication().canAs(EndUserApplication.class)) - return getApplication().as(EndUserApplication.class); + if (getApplication().get().canAs(EndUserApplication.class)) + return getApplication().get().as(EndUserApplication.class); else - return getApplication().as(AdminApplication.class).getEndUserApplication(); + return getApplication().get().as(AdminApplication.class).getEndUserApplication(); } @Override @@ -336,10 +339,10 @@ public UriInfo getUriInfo() /** * Returns current application. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return application.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/mapper/ForbiddenExceptionMapper.java b/src/main/java/com/atomgraph/linkeddatahub/server/mapper/ForbiddenExceptionMapper.java new file mode 100644 index 000000000..9dcc3b3aa --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/server/mapper/ForbiddenExceptionMapper.java @@ -0,0 +1,65 @@ +/** + * Copyright 2025 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.server.mapper; + +import com.atomgraph.core.MediaTypes; +import com.atomgraph.server.mapper.ExceptionMapperBase; +import com.atomgraph.server.vocabulary.HTTP; +import jakarta.inject.Inject; +import jakarta.ws.rs.ForbiddenException; +import jakarta.ws.rs.core.EntityTag; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.ext.ExceptionMapper; +import jakarta.ws.rs.ext.Provider; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; + +/** + * JAX-RS mapper for generic forbidden exceptions. + * Handles ForbiddenException that are not AuthorizationException (which has its own mapper). + * + * @author Martynas Jusevičius {@literal } + */ +@Provider +public class ForbiddenExceptionMapper extends ExceptionMapperBase implements ExceptionMapper +{ + + /** + * Constructs mapper from media types. + * + * @param mediaTypes registry of readable/writable media types + */ + @Inject + public ForbiddenExceptionMapper(MediaTypes mediaTypes) + { + super(mediaTypes); + } + + @Override + public Response toResponse(ForbiddenException ex) + { + Resource exRes = toResource(ex, Response.Status.FORBIDDEN, + ResourceFactory.createResource("http://www.w3.org/2011/http-statusCodes#Forbidden")). + addLiteral(HTTP.sc, ResourceFactory.createResource("http://www.w3.org/2011/http-statusCodes#Forbidden")); + + return getResponseBuilder(exRes.getModel()). + status(Response.Status.FORBIDDEN). + tag((EntityTag)null). // unset EntityTag to prevent caching of 403 responses + build(); + } + +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/AuthorizationExceptionMapper.java b/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/AuthorizationExceptionMapper.java index 6c6e02cff..a709a168b 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/AuthorizationExceptionMapper.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/AuthorizationExceptionMapper.java @@ -29,6 +29,7 @@ import com.atomgraph.server.mapper.ExceptionMapperBase; import com.atomgraph.server.vocabulary.HTTP; import java.net.URI; +import java.util.Optional; import jakarta.inject.Inject; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.EntityTag; @@ -46,7 +47,7 @@ public class AuthorizationExceptionMapper extends ExceptionMapperBase implements { @Context SecurityContext securityContext; - @Inject jakarta.inject.Provider application; + @Inject jakarta.inject.Provider> application; /** * Constructs mapper from media types. @@ -67,11 +68,11 @@ public Response toResponse(AuthorizationException ex) addLiteral(HTTP.absoluteURI, ex.getAbsolutePath().toString()); // add link to the endpoint for access requests. TO-DO: make the URIs configurable or best - retrieve from sitemap/dataset - if (getSecurityContext().getUserPrincipal() != null) + if (getSecurityContext().getUserPrincipal() != null && getApplication().isPresent()) { - if (getApplication().canAs(EndUserApplication.class)) + if (getApplication().get().canAs(EndUserApplication.class)) { - Resource adminBase = getApplication().as(EndUserApplication.class).getAdminApplication().getBase(); + Resource adminBase = getApplication().get().as(EndUserApplication.class).getAdminApplication().getBase(); URI requestAccessURI = UriBuilder.fromUri(adminBase.getURI()). path(REQUEST_ACCESS_PATH). @@ -99,10 +100,10 @@ public SecurityContext getSecurityContext() /** * Returns associated application. - * - * @return application resource + * + * @return optional application resource */ - public Application getApplication() + public Optional getApplication() { return application.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/oauth2/TokenExpiredExceptionMapper.java b/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/oauth2/TokenExpiredExceptionMapper.java index 2f67e5324..3a1a8c3ec 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/oauth2/TokenExpiredExceptionMapper.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/mapper/auth/oauth2/TokenExpiredExceptionMapper.java @@ -19,11 +19,12 @@ import com.atomgraph.core.MediaTypes; import com.atomgraph.linkeddatahub.apps.model.AdminApplication; import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; -import static com.atomgraph.linkeddatahub.resource.admin.oauth2.google.Authorize.REFERER_PARAM_NAME; -import com.atomgraph.linkeddatahub.server.filter.request.auth.IDTokenFilter; +import static com.atomgraph.linkeddatahub.resource.oauth2.google.Authorize.REFERER_PARAM_NAME; +import com.atomgraph.linkeddatahub.server.filter.request.auth.google.IDTokenFilter; import com.atomgraph.server.mapper.ExceptionMapperBase; import com.auth0.jwt.exceptions.TokenExpiredException; import java.net.URI; +import java.util.Optional; import jakarta.inject.Inject; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.NewCookie; @@ -44,7 +45,7 @@ public class TokenExpiredExceptionMapper extends ExceptionMapperBase implements { @Context UriInfo uriInfo; - @Inject jakarta.inject.Provider application; + @Inject jakarta.inject.Provider> application; /** * Constructs mapper from media types. @@ -60,45 +61,58 @@ public TokenExpiredExceptionMapper(MediaTypes mediaTypes) @Override public Response toResponse(TokenExpiredException ex) { - String path = getApplication().getBaseURI().getPath(); - NewCookie expiredCookie = new NewCookie(IDTokenFilter.COOKIE_NAME, "", path, null, NewCookie.DEFAULT_VERSION, null, 0, false); + if (!getApplication().isPresent()) + { + // If no application is present, just return a BAD_REQUEST response without redirect + return getResponseBuilder(toResource(ex, Response.Status.BAD_REQUEST, + ResourceFactory.createResource("http://www.w3.org/2011/http-statusCodes#BadRequest")). + getModel()). + build(); + } + + String path = getApplication().get().getBaseURI().getPath(); + NewCookie expiredCookie = new NewCookie.Builder(IDTokenFilter.COOKIE_NAME). + value(""). + path(path). + maxAge(0). + build(); ResponseBuilder builder = getResponseBuilder(toResource(ex, Response.Status.BAD_REQUEST, ResourceFactory.createResource("http://www.w3.org/2011/http-statusCodes#BadRequest")). getModel()). cookie(expiredCookie); - + URI redirectUri = UriBuilder.fromUri(getAdminApplication().getBaseURI()). path("/oauth2/authorize/google"). // TO-DO: move to config? queryParam(REFERER_PARAM_NAME, getUriInfo().getRequestUri()). // we need to retain URL query parameters build(); - + if (!getUriInfo().getAbsolutePath().equals(redirectUri)) // prevent a perpetual redirect loop builder.status(Status.SEE_OTHER). location(redirectUri); // TO-DO: extract - + return builder.build(); } /** * Returns admin application of the current dataspace. - * + * * @return admin application resource */ public AdminApplication getAdminApplication() { - if (getApplication().canAs(EndUserApplication.class)) - return getApplication().as(EndUserApplication.class).getAdminApplication(); + if (getApplication().get().canAs(EndUserApplication.class)) + return getApplication().get().as(EndUserApplication.class).getAdminApplication(); else - return getApplication().as(AdminApplication.class); + return getApplication().get().as(AdminApplication.class); } - + /** * Returns current application. - * - * @return application resource + * + * @return optional application resource */ - public com.atomgraph.linkeddatahub.apps.model.Application getApplication() + public Optional getApplication() { return application.get(); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java index c3cb7bc7e..91667c801 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/Dispatcher.java @@ -224,32 +224,10 @@ public Class getClearEndpoint() { return getProxyClass().orElse(Clear.class); } - - /** - * Returns Google OAuth endpoint. - * - * @return endpoint resource - */ - @Path("oauth2/authorize/google") - public Class getAuthorizeGoogle() - { - return getProxyClass().orElse(com.atomgraph.linkeddatahub.resource.admin.oauth2.google.Authorize.class); - } - /** - * Returns OAuth login endpoint. - * - * @return endpoint resource - */ - @Path("oauth2/login") - public Class getOAuth2Login() - { - return getProxyClass().orElse(com.atomgraph.linkeddatahub.resource.admin.oauth2.Login.class); - } - /** * Returns the default JAX-RS resource class. - * + * * @return resource class */ public Class getDocumentClass() diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBase.java b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBase.java index 5e14266ac..420a78a95 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBase.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/model/impl/ProxyResourceBase.java @@ -28,17 +28,21 @@ import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.security.IDTokenSecurityContext; import com.atomgraph.linkeddatahub.server.security.WebIDSecurityContext; +import java.net.InetAddress; import java.net.URI; +import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Optional; import jakarta.inject.Inject; import jakarta.servlet.http.HttpServletRequest; +import jakarta.ws.rs.ForbiddenException; import jakarta.ws.rs.NotAllowedException; import jakarta.ws.rs.Consumes; import jakarta.ws.rs.NotAcceptableException; import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.PATCH; import jakarta.ws.rs.POST; import jakarta.ws.rs.PUT; import jakarta.ws.rs.ProcessingException; @@ -151,6 +155,10 @@ protected ProxyResourceBase(@Context UriInfo uriInfo, @Context Request request, @Context Providers providers) { super(uriInfo, request, httpHeaders, mediaTypes, uri, endpoint, query, accept, mode, system.getExternalClient(), httpServletRequest); + + // LNK-009: Validate that proxied URI is not internal/private (SSRF protection) + if (uri != null) validateNotInternalURL(uri); + this.uriInfo = uriInfo; this.application = application; this.service = service.get(); @@ -164,12 +172,12 @@ protected ProxyResourceBase(@Context UriInfo uriInfo, @Context Request request, readableMediaTypesList.addAll(mediaTypes.getReadable(Model.class)); readableMediaTypesList.addAll(mediaTypes.getReadable(ResultSet.class)); this.readableMediaTypes = readableMediaTypesList.toArray(MediaType[]::new); - + if (agentContext.isPresent()) { if (agentContext.get() instanceof WebIDSecurityContext) super.getWebTarget().register(new WebIDDelegationFilter(agentContext.get().getAgent())); - + if (agentContext.get() instanceof IDTokenSecurityContext iDTokenSecurityContext) super.getWebTarget().register(new IDTokenDelegationFilter(agentContext.get().getAgent(), iDTokenSecurityContext.getJWTToken(), uriInfo.getBaseUri().getPath(), null)); @@ -178,7 +186,7 @@ protected ProxyResourceBase(@Context UriInfo uriInfo, @Context Request request, /** * Gets a request invocation builder for the given target. - * + * * @param target web target * @return invocation builder */ @@ -188,6 +196,21 @@ public Invocation.Builder getBuilder(WebTarget target) return target.request(getReadableMediaTypes()). header(HttpHeaders.USER_AGENT, getUserAgentHeaderValue()); } + + /** + * Returns response for the given client response. + * Handles responses without media type (e.g., 204 No Content). + * + * @param clientResponse client response + * @return response + */ + @Override + public Response getResponse(Response clientResponse) + { + if (clientResponse.getMediaType() == null) return Response.status(clientResponse.getStatus()).build(); + + return super.getResponse(clientResponse); + } /** * Forwards GET request and returns response from remote resource. @@ -215,7 +238,7 @@ public Response get(WebTarget target, Invocation.Builder builder) /** * Forwards POST request with SPARQL query body and returns response from remote resource. - * + * * @param sparqlQuery SPARQL query string * @return response */ @@ -224,9 +247,9 @@ public Response get(WebTarget target, Invocation.Builder builder) public Response post(String sparqlQuery) { if (getWebTarget() == null) throw new NotFoundException("Resource URI not supplied"); - + if (log.isDebugEnabled()) log.debug("POSTing SPARQL query to URI: {}", getWebTarget().getUri()); - + try (Response cr = getWebTarget().request() .accept(getReadableMediaTypes()) .post(Entity.entity(sparqlQuery, com.atomgraph.core.MediaType.APPLICATION_SPARQL_QUERY_TYPE))) @@ -244,7 +267,71 @@ public Response post(String sparqlQuery) throw new BadGatewayException(ex); } } - + + /** + * Forwards POST request with form data and returns response from remote resource. + * + * @param formData form data string + * @return response + */ + @POST + @Consumes(MediaType.APPLICATION_FORM_URLENCODED) + public Response postForm(String formData) + { + if (getWebTarget() == null) throw new NotFoundException("Resource URI not supplied"); + + if (log.isDebugEnabled()) log.debug("POSTing form data to URI: {}", getWebTarget().getUri()); + + try (Response cr = getWebTarget().request() + .accept(getReadableMediaTypes()) + .post(Entity.entity(formData, MediaType.APPLICATION_FORM_URLENCODED_TYPE))) + { + return getResponse(cr); + } + catch (MessageBodyProviderNotFoundException ex) + { + if (log.isWarnEnabled()) log.debug("Dereferenced URI {} returned non-RDF media type", getWebTarget().getUri()); + throw new NotAcceptableException(ex); + } + catch (ProcessingException ex) + { + if (log.isWarnEnabled()) log.debug("Could not dereference URI: {}", getWebTarget().getUri()); + throw new BadGatewayException(ex); + } + } + + /** + * Forwards PATCH request with SPARQL update body and returns response from remote resource. + * + * @param sparqlUpdate SPARQL update string + * @return response + */ + @PATCH + @Consumes(com.atomgraph.core.MediaType.APPLICATION_SPARQL_UPDATE) + public Response patch(String sparqlUpdate) + { + if (getWebTarget() == null) throw new NotFoundException("Resource URI not supplied"); + + if (log.isDebugEnabled()) log.debug("PATCHing SPARQL update to URI: {}", getWebTarget().getUri()); + + try (Response cr = getWebTarget().request() + .accept(getReadableMediaTypes()) + .method("PATCH", Entity.entity(sparqlUpdate, com.atomgraph.core.MediaType.APPLICATION_SPARQL_UPDATE_TYPE))) + { + return getResponse(cr); + } + catch (MessageBodyProviderNotFoundException ex) + { + if (log.isWarnEnabled()) log.debug("Dereferenced URI {} returned non-RDF media type", getWebTarget().getUri()); + throw new NotAcceptableException(ex); + } + catch (ProcessingException ex) + { + if (log.isWarnEnabled()) log.debug("Could not dereference URI: {}", getWebTarget().getUri()); + throw new BadGatewayException(ex); + } + } + /** * Forwards a multipart POST request returns RDF response from remote resource. * @@ -394,12 +481,53 @@ public com.atomgraph.linkeddatahub.Application getSystem() /** * Returns the value of the User-Agent request header. - * + * * @return header value */ public String getUserAgentHeaderValue() { return LinkedDataClient.USER_AGENT; } - + + /** + * Validates that a URI does not point to an internal/private network resource. + * This prevents SSRF (Server-Side Request Forgery) attacks by resolving the hostname + * and checking if the IP address is in a private/internal range. + * + * Blocks access to: + * - Loopback addresses (127.0.0.0/8, ::1) + * - RFC 1918 private addresses (10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16) + * - Link-local addresses (169.254.0.0/16, fe80::/10) + * + * @param uri the URI to validate + * @throws IllegalArgumentException if the URI or host is null + * @throws ForbiddenException if the URI resolves to an internal IP address + */ + protected static void validateNotInternalURL(URI uri) + { + if (uri == null) throw new IllegalArgumentException("URI cannot be null"); + + String host = uri.getHost(); + if (host == null) throw new IllegalArgumentException("URI host cannot be null"); + + // Resolve hostname to IP and check if it's private/internal + try + { + InetAddress address = InetAddress.getByName(host); + + // Note: We don't block loopback addresses (127.0.0.1, localhost) because the application + // legitimately proxies its own endpoints (e.g., /clear, admin operations) + + if (address.isLinkLocalAddress()) + throw new ForbiddenException("Access to link-local addresses is not allowed: " + address.getHostAddress()); + if (address.isSiteLocalAddress()) + throw new ForbiddenException("Access to private addresses (RFC 1918) is not allowed: " + address.getHostAddress()); + } + catch (UnknownHostException e) + { + if (log.isWarnEnabled()) log.warn("Could not resolve hostname for SSRF validation: {}", host); + // Allow request to proceed - will fail later with better error message + } + } + } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/util/JWTVerifier.java b/src/main/java/com/atomgraph/linkeddatahub/server/util/JWTVerifier.java new file mode 100644 index 000000000..e4fb9f1ce --- /dev/null +++ b/src/main/java/com/atomgraph/linkeddatahub/server/util/JWTVerifier.java @@ -0,0 +1,178 @@ +/** + * Copyright 2025 Martynas Jusevičius + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.atomgraph.linkeddatahub.server.util; + +import com.auth0.jwt.JWT; +import com.auth0.jwt.interfaces.DecodedJWT; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.ws.rs.client.Client; +import jakarta.ws.rs.core.Response; +import java.math.BigInteger; +import java.net.URI; +import java.security.KeyFactory; +import java.security.NoSuchAlgorithmException; +import java.security.interfaces.RSAPublicKey; +import java.security.spec.InvalidKeySpecException; +import java.security.spec.RSAPublicKeySpec; +import java.util.Base64; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Utility class for verifying JWT ID tokens using JWKS (JSON Web Key Set). + * Provides JWKS-based signature verification for OAuth 2.0 / OpenID Connect ID tokens. + * + * @author Martynas Jusevičius {@literal } + */ +public class JWTVerifier +{ + private static final Logger log = LoggerFactory.getLogger(JWTVerifier.class); + + /** + * Verifies a JWT ID token using JWKS-based signature verification. + * Performs the following validations: + * 1. Fetches public keys from the JWKS endpoint (or uses cached JWKS) + * 2. Verifies the JWT signature using RSA256 algorithm + * 3. Validates the issuer is in the allowed list + * 4. Validates the audience matches the client ID + * 5. Validates the token has not expired + * + * @param jwt decoded JWT ID token to verify + * @param jwksEndpoint JWKS endpoint URI + * @param allowedIssuers list of allowed issuer URIs + * @param clientID OAuth client ID + * @param client JAX-RS client for HTTP requests + * @param jwksCache optional cache for JWKS responses (key: jwksEndpoint.toString(), value: JsonObject) + * @return true if verification succeeds, false otherwise + */ + public static boolean verify(DecodedJWT jwt, URI jwksEndpoint, List allowedIssuers, String clientID, + Client client, Map jwksCache) + { + try + { + // Verify issuer first (before fetching JWKS) + if (!allowedIssuers.contains(jwt.getIssuer())) + { + if (log.isErrorEnabled()) log.error("JWT issuer '{}' not in allowed list: {}", jwt.getIssuer(), allowedIssuers); + return false; + } + + // Get JWKS (from cache or fetch) + JsonObject jwks; + String cacheKey = jwksEndpoint.toString(); + + if (jwksCache != null && jwksCache.containsKey(cacheKey)) + { + jwks = jwksCache.get(cacheKey); + if (log.isDebugEnabled()) log.debug("Using cached JWKS for endpoint: {}", jwksEndpoint); + } + else + { + // Fetch JWKS from the provider + try (Response jwksResponse = client.target(jwksEndpoint).request().get()) + { + if (!jwksResponse.getStatusInfo().getFamily().equals(Response.Status.Family.SUCCESSFUL)) + { + if (log.isErrorEnabled()) log.error("Failed to fetch JWKS from {}", jwksEndpoint); + return false; + } + + jwks = jwksResponse.readEntity(JsonObject.class); + + // Cache the JWKS if cache is provided + if (jwksCache != null) + { + jwksCache.put(cacheKey, jwks); + if (log.isDebugEnabled()) log.debug("Cached JWKS for endpoint: {}", jwksEndpoint); + } + } + } + + // Find the key that matches the JWT's key ID + String kid = jwt.getKeyId(); + if (kid == null) + { + if (log.isErrorEnabled()) log.error("JWT does not contain 'kid' (key ID) header"); + return false; + } + + JsonArray keys = jwks.getJsonArray("keys"); + if (keys == null) + { + if (log.isErrorEnabled()) log.error("JWKS does not contain 'keys' array"); + return false; + } + + // Find matching key + JsonObject matchingKey = null; + for (int i = 0; i < keys.size(); i++) + { + JsonObject key = keys.getJsonObject(i); + if (kid.equals(key.getString("kid", null))) + { + matchingKey = key; + break; + } + } + + if (matchingKey == null) + { + if (log.isErrorEnabled()) log.error("No matching key found in JWKS for kid: {}", kid); + return false; + } + + // Extract RSA public key components + String n = matchingKey.getString("n"); // modulus + String e = matchingKey.getString("e"); // exponent + + // Create RSA public key + BigInteger modulus = new BigInteger(1, Base64.getUrlDecoder().decode(n)); + BigInteger exponent = new BigInteger(1, Base64.getUrlDecoder().decode(e)); + + RSAPublicKeySpec spec = new RSAPublicKeySpec(modulus, exponent); + KeyFactory factory = KeyFactory.getInstance("RSA"); + RSAPublicKey publicKey = (RSAPublicKey) factory.generatePublic(spec); + + // Create algorithm and verifier + com.auth0.jwt.algorithms.Algorithm algorithm = com.auth0.jwt.algorithms.Algorithm.RSA256(publicKey, null); + com.auth0.jwt.JWTVerifier verifier = JWT.require(algorithm). + withIssuer(jwt.getIssuer()). + withAudience(clientID). + build(); + + // Verify the token (this will throw if verification fails) + verifier.verify(jwt.getToken()); + + if (log.isDebugEnabled()) log.debug("Successfully verified JWT for subject '{}'", jwt.getSubject()); + return true; + } + catch (com.auth0.jwt.exceptions.JWTVerificationException ex) + { + if (log.isErrorEnabled()) log.error("JWT verification failed: {}", ex.getMessage()); + return false; + } + catch (IllegalArgumentException | NoSuchAlgorithmException | InvalidKeySpecException ex) + { + if (log.isErrorEnabled()) log.error("Error during JWT verification", ex); + return false; + } + } + +} diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/util/OntologyModelGetter.java b/src/main/java/com/atomgraph/linkeddatahub/server/util/OntologyModelGetter.java index 8aa7caadc..1ea63a98e 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/util/OntologyModelGetter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/util/OntologyModelGetter.java @@ -17,10 +17,8 @@ package com.atomgraph.linkeddatahub.server.util; import com.atomgraph.client.vocabulary.LDT; -import com.atomgraph.core.MediaTypes; import com.atomgraph.linkeddatahub.apps.model.EndUserApplication; import com.atomgraph.server.exception.OntologyException; -import jakarta.ws.rs.client.Client; import org.apache.jena.ontology.OntModelSpec; import org.apache.jena.query.ParameterizedSparqlString; import org.apache.jena.query.Query; @@ -44,21 +42,6 @@ public class OntologyModelGetter implements org.apache.jena.rdf.model.ModelGette private final EndUserApplication app; private final OntModelSpec ontModelSpec; private final Query ontologyQuery; - - - /** - * Constructs ontology getter for application. - * - * @param app end-user application resource - * @param ontModelSpec ontology specification - * @param ontologyQuery SPARQL query that loads ontology terms - * @param client HTTP client - * @param mediaTypes registry of readable/writable media types - */ - public OntologyModelGetter(EndUserApplication app, OntModelSpec ontModelSpec, Query ontologyQuery, Client client, MediaTypes mediaTypes) - { - this(app, ontModelSpec, ontologyQuery); - } /** * Constructs ontology getter for application. diff --git a/src/main/java/com/atomgraph/linkeddatahub/vocabulary/LDH.java b/src/main/java/com/atomgraph/linkeddatahub/vocabulary/LDH.java index 555ffdce5..cea639a6a 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/vocabulary/LDH.java +++ b/src/main/java/com/atomgraph/linkeddatahub/vocabulary/LDH.java @@ -101,6 +101,9 @@ public static String getURI() /** Service property */ public static final ObjectProperty service = m_model.createObjectProperty( NS + "service" ); + /** Origin property for subdomain-based application matching */ + public static final ObjectProperty origin = m_model.createObjectProperty( NS + "origin" ); + /** * For shape property */ public static final ObjectProperty forShape = m_model.createObjectProperty( NS + "forShape" ); diff --git a/src/main/java/com/atomgraph/linkeddatahub/vocabulary/LSMT.java b/src/main/java/com/atomgraph/linkeddatahub/vocabulary/ORCID.java similarity index 50% rename from src/main/java/com/atomgraph/linkeddatahub/vocabulary/LSMT.java rename to src/main/java/com/atomgraph/linkeddatahub/vocabulary/ORCID.java index 0f7847716..7998fc907 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/vocabulary/LSMT.java +++ b/src/main/java/com/atomgraph/linkeddatahub/vocabulary/ORCID.java @@ -1,19 +1,3 @@ -/** - * Copyright 2019 Martynas Jusevičius - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ package com.atomgraph.linkeddatahub.vocabulary; import org.apache.jena.ontology.DatatypeProperty; @@ -23,35 +7,37 @@ import org.apache.jena.rdf.model.Resource; /** - * LSMT vocabulary. - * + * ORCID-specific vocabulary. + * * @author Martynas Jusevičius {@literal } */ -@Deprecated -public class LSMT +public class ORCID { /** The RDF model that holds the vocabulary terms */ private static OntModel m_model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, null); - + /** The namespace of the vocabulary as a string */ - public static final String NS = "https://w3id.org/atomgraph/linkeddatahub/admin/sitemap/templates#"; - + public static final String NS = "https://w3id.org/atomgraph/linkeddatahub/services/orcid#"; + /** * The namespace of the vocabulary as a string - * - * @return namespace URI + * + * @return URI string * @see #NS */ public static String getURI() { return NS; } - + /** The namespace of the vocabulary as a resource */ public static final Resource NAMESPACE = m_model.createResource( NS ); - - /** Clear property */ - public static final DatatypeProperty clear = m_model.createDatatypeProperty( NS + "clear"); + + /** Client ID property */ + public static final DatatypeProperty clientID = m_model.createDatatypeProperty( NS + "clientID" ); + + /** Client secret property */ + public static final DatatypeProperty clientSecret = m_model.createDatatypeProperty( NS + "clientSecret" ); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/writer/ModelXSLTWriter.java b/src/main/java/com/atomgraph/linkeddatahub/writer/ModelXSLTWriter.java index 2eb22962a..bdf1ebb5c 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/writer/ModelXSLTWriter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/writer/ModelXSLTWriter.java @@ -124,14 +124,14 @@ public void writeTo(Model model, Class type, Type genericType, Annotation[] a /** * Hook for RDF model processing before write. - * + * * @param model RDF model * @return RDF model */ public Model processWrite(Model model) { // show foaf:mbox in end-user apps - if (getApplication().get().canAs(EndUserApplication.class)) return model; + if (getApplication().get().isPresent() && getApplication().get().get().canAs(EndUserApplication.class)) return model; // show foaf:mbox for authenticated agents if (getSecurityContext() != null && getSecurityContext().getUserPrincipal() instanceof Agent) return model; diff --git a/src/main/java/com/atomgraph/linkeddatahub/writer/XSLTWriterBase.java b/src/main/java/com/atomgraph/linkeddatahub/writer/XSLTWriterBase.java index 2bdb6ac97..89b574dfb 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/writer/XSLTWriterBase.java +++ b/src/main/java/com/atomgraph/linkeddatahub/writer/XSLTWriterBase.java @@ -26,6 +26,7 @@ import com.atomgraph.linkeddatahub.vocabulary.LDH; import com.atomgraph.linkeddatahub.vocabulary.LDHT; import com.atomgraph.linkeddatahub.vocabulary.Google; +import com.atomgraph.linkeddatahub.vocabulary.ORCID; import com.atomgraph.linkeddatahub.vocabulary.LAPP; import com.atomgraph.client.vocabulary.LDT; import com.atomgraph.core.util.Link; @@ -93,7 +94,7 @@ public abstract class XSLTWriterBase extends com.atomgraph.client.writer.XSLTWri @Context SecurityContext securityContext; @Inject com.atomgraph.linkeddatahub.Application system; - @Inject jakarta.inject.Provider application; + @Inject jakarta.inject.Provider> application; @Inject jakarta.inject.Provider dataManager; @Inject jakarta.inject.Provider xsltExecSupplier; @Inject jakarta.inject.Provider> modes; @@ -126,10 +127,18 @@ public Map getParameters(MultivaluedMap appOpt = getApplication().get(); + if (!appOpt.isPresent()) + { + if (log.isWarnEnabled()) log.warn("Application not present in XSLTWriterBase.getParameters()"); + return params; // return early if no application + } + + com.atomgraph.linkeddatahub.apps.model.Application app = appOpt.get(); if (log.isDebugEnabled()) log.debug("Passing $lapp:Application to XSLT: <{}>", app); params.put(new QName("ldt", LDT.base.getNameSpace(), LDT.base.getLocalName()), new XdmAtomicValue(app.getBaseURI())); + params.put(new QName("ldh", LDH.origin.getNameSpace(), LDH.origin.getLocalName()), new XdmAtomicValue(app.getOriginURI())); params.put(new QName("ldt", LDT.ontology.getNameSpace(), LDT.ontology.getLocalName()), new XdmAtomicValue(URI.create(app.getOntology().getURI()))); params.put(new QName("lapp", LAPP.Application.getNameSpace(), LAPP.Application.getLocalName()), getXsltExecutable().getProcessor().newDocumentBuilder().build(getSource(getAppModel(app, true)))); @@ -174,7 +183,9 @@ public Map getParameters(MultivaluedMap getDataManagerProvider() @@ -370,10 +381,10 @@ public Set getSupportedNamespaces() /** * Returns a JAX-RS provider for the current application. - * + * * @return provider */ - public jakarta.inject.Provider getApplication() + public jakarta.inject.Provider> getApplication() { return application; } @@ -408,4 +419,16 @@ public ContainerRequestContext getContainerRequestContext() return crc.get(); } + /** + * Returns the base URI of this LinkedDataHub instance. + * It equals to the base URI of the root dataspace. + * + * @return root context URI + */ + @Override + public URI getContextURI() + { + return getSystem().getBaseURI(); + } + } \ No newline at end of file diff --git a/src/main/java/com/atomgraph/linkeddatahub/writer/factory/DataManagerFactory.java b/src/main/java/com/atomgraph/linkeddatahub/writer/factory/DataManagerFactory.java index 5b9bc9cae..f242f421a 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/writer/factory/DataManagerFactory.java +++ b/src/main/java/com/atomgraph/linkeddatahub/writer/factory/DataManagerFactory.java @@ -28,6 +28,7 @@ import com.atomgraph.linkeddatahub.writer.impl.DataManagerImpl; import java.net.URI; import java.util.HashMap; +import java.util.Optional; import jakarta.inject.Inject; import jakarta.servlet.http.HttpServletRequest; import jakarta.ws.rs.container.ContainerRequestContext; @@ -53,12 +54,13 @@ public class DataManagerFactory implements Factory @Context HttpServletRequest httpServletRequest; @Context Providers providers; @Context ServiceLocator serviceLocator; - + @Inject com.atomgraph.linkeddatahub.Application system; - + @Override public DataManager provide() { + // Always return DataManager, falling back to system DataManager if no Application (e.g., for error responses) return getDataManager(getApplication()); } @@ -66,32 +68,32 @@ public DataManager provide() public void dispose(DataManager t) { } - + /** * Returns RDF data manager. - * - * @param app end-user application + * + * @param appOpt optional end-user application (if empty, system DataManager is used) * @return data manager */ - public DataManager getDataManager(Application app) + public DataManager getDataManager(Optional appOpt) { final com.atomgraph.core.util.jena.DataManager baseManager; - - if (app.canAs(EndUserApplication.class)) - baseManager = (com.atomgraph.core.util.jena.DataManager)getSystem().getOntModelSpec(app.as(EndUserApplication.class)).getDocumentManager().getFileManager(); + + if (appOpt.isPresent() && appOpt.get().canAs(EndUserApplication.class)) + baseManager = (com.atomgraph.core.util.jena.DataManager)getSystem().getOntModelSpec(appOpt.get().as(EndUserApplication.class)).getDocumentManager().getFileManager(); else baseManager = getSystem().getDataManager(); - + LinkedDataClient ldc = LinkedDataClient.create(getSystem().getClient(), getSystem().getMediaTypes()). delegation(getUriInfo().getBaseUri(), getAgentContext()); - + // copy cached models over from the app's FileManager return new DataManagerImpl(LocationMapper.get(), new HashMap<>(baseManager.getModelCache()), ldc, true, getSystem().isPreemptiveAuth(), getSystem().isResolvingUncached(), - URI.create(getHttpServletRequest().getRequestURL().toString()).resolve(getHttpServletRequest().getContextPath() + "/"), + getSystem().getBaseURI(), getAgentContext()); } - + /** * Returns system application. * @@ -155,12 +157,12 @@ public ContainerRequestContext getContainerRequestContext() /** * Retrieves LDT application from the request context. - * - * @return LDT application + * + * @return optional LDT application */ - public Application getApplication() + public Optional getApplication() { - return (Application)getContainerRequestContext().getProperty(LAPP.Application.getURI()); + return (Optional)getContainerRequestContext().getProperty(LAPP.Application.getURI()); } } \ No newline at end of file diff --git a/src/main/java/com/atomgraph/linkeddatahub/writer/factory/ModeFactory.java b/src/main/java/com/atomgraph/linkeddatahub/writer/factory/ModeFactory.java index 303020eb5..e7016b906 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/writer/factory/ModeFactory.java +++ b/src/main/java/com/atomgraph/linkeddatahub/writer/factory/ModeFactory.java @@ -18,11 +18,13 @@ import com.atomgraph.client.vocabulary.AC; import com.atomgraph.linkeddatahub.writer.Mode; +import java.util.Collections; import java.util.List; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; import org.glassfish.hk2.api.Factory; import org.glassfish.hk2.api.ServiceLocator; +import jakarta.ws.rs.ext.Provider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,6 +33,7 @@ * * @author {@literal Martynas Jusevičius } */ +@Provider public class ModeFactory implements Factory> { diff --git a/src/main/java/com/atomgraph/linkeddatahub/writer/function/URLDecode.java b/src/main/java/com/atomgraph/linkeddatahub/writer/function/URLDecode.java index 41eb586ae..6326ab956 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/writer/function/URLDecode.java +++ b/src/main/java/com/atomgraph/linkeddatahub/writer/function/URLDecode.java @@ -34,7 +34,8 @@ */ public class URLDecode extends ExtensionFunctionDefinition { - + + /** The local name of the XSLT function */ public static final String LOCAL_NAME = "url-decode"; @Override diff --git a/src/main/java/com/atomgraph/linkeddatahub/writer/impl/DataManagerImpl.java b/src/main/java/com/atomgraph/linkeddatahub/writer/impl/DataManagerImpl.java index e6001e72e..46208c1b4 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/writer/impl/DataManagerImpl.java +++ b/src/main/java/com/atomgraph/linkeddatahub/writer/impl/DataManagerImpl.java @@ -20,6 +20,7 @@ import java.net.URI; import com.atomgraph.core.client.LinkedDataClient; import com.atomgraph.linkeddatahub.server.security.AgentContext; +import com.google.common.net.InternetDomainName; import java.util.Map; import org.apache.jena.rdf.model.Model; import org.slf4j.Logger; @@ -66,14 +67,59 @@ public boolean resolvingUncached(String filenameOrURI) { if (super.resolvingUncached(filenameOrURI) && !isMapped(filenameOrURI)) { - // always resolve URIs relative to the root Context base URI - boolean relative = !getRootContextURI().relativize(URI.create(filenameOrURI)).isAbsolute(); - return relative; + // Allow resolving URIs from the same site (e.g., localhost:4443/static/..., admin.localhost:4443/ns) + return isSameSite(getRootContextURI(), URI.create(filenameOrURI)); } - + return false; // super.resolvingUncached(filenameOrURI); // configured in web.xml } + /** + * Checks if two URIs are from the same site (schemeful same-site). + * This allows subdomains like admin.localhost and localhost to be considered part of the same LinkedDataHub instance. + * Ports are ignored per the same-site definition. + * + * @param uri1 first URI + * @param uri2 second URI + * @return true if both URIs are from the same site + */ + private boolean isSameSite(URI uri1, URI uri2) + { + if (uri1 == null || uri2 == null) return false; + if (!uri1.getScheme().equals(uri2.getScheme())) return false; + + String host1 = uri1.getHost(); + String host2 = uri2.getHost(); + + if (host1 == null || host2 == null) return false; + if (host1.equals(host2)) return true; + + try + { + InternetDomainName domain1 = InternetDomainName.from(host1); + InternetDomainName domain2 = InternetDomainName.from(host2); + + // For localhost domains, compare the full host (localhost == localhost, admin.localhost != localhost at domain level) + // But we want to treat them as same root domain, so just check if both end with "localhost" + if (host1.equals("localhost") || host1.endsWith(".localhost")) + return host2.equals("localhost") || host2.endsWith(".localhost"); + + // For regular domains, compare top private domains + if (domain1.isTopPrivateDomain() && domain2.isTopPrivateDomain()) + return domain1.equals(domain2); + if (domain1.hasPublicSuffix() && domain2.hasPublicSuffix()) + return domain1.topPrivateDomain().equals(domain2.topPrivateDomain()); + + return false; + } + catch (IllegalArgumentException ex) + { + // Invalid domain name, fall back to simple equality check + if (log.isDebugEnabled()) log.debug("Could not parse domain names for comparison: {} and {}", host1, host2); + return false; + } + } + /** * Returns the root URI of the JAX-RS application. * diff --git a/src/main/resources/com/atomgraph/linkeddatahub/lapp.ttl b/src/main/resources/com/atomgraph/linkeddatahub/lapp.ttl index fee16d648..81402fb3d 100644 --- a/src/main/resources/com/atomgraph/linkeddatahub/lapp.ttl +++ b/src/main/resources/com/atomgraph/linkeddatahub/lapp.ttl @@ -17,8 +17,8 @@ : a owl:Ontology ; owl:imports ldh:, ldt:, sp:, spin: ; - rdfs:label "AtomGraph Application ontology" ; - rdfs:comment "Ontology of AtomGraph applications" ; + rdfs:label "LinkedDataHub application ontology" ; + rdfs:comment "Ontology of LinkedDataHub applications" ; owl:versionInfo "1.1.4" . # PROPERTIES diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml index 606e76265..a31768cc5 100644 --- a/src/main/webapp/WEB-INF/web.xml +++ b/src/main/webapp/WEB-INF/web.xml @@ -57,29 +57,39 @@ PREFIX acl: PREFIX foaf: DESCRIBE ?auth -FROM WHERE { - { ?auth acl:agent $agent } - UNION - { ?auth acl:agentGroup ?Group . - ?Group foaf:member $agent - } - UNION - { ?auth acl:agentClass foaf:Agent } - UNION - { ?auth acl:agentClass $AuthenticatedAgentClass } + GRAPH ?g { + { ?auth acl:agent $agent } + UNION + { ?auth acl:agentGroup ?Group - ?auth acl:mode ?Mode . - - { ?auth acl:accessTo $this } - UNION - { { ?auth acl:accessToClass $Type } - UNION - { ?auth acl:accessToClass ?Class . - $Type (rdfs:subClassOf)* ?Class + GRAPH ?groupG { + ?Group foaf:member $agent } - } + FILTER(strstarts(str(?groupG), str($base))) + + } + UNION + { ?auth acl:agentClass foaf:Agent } + UNION + { ?auth acl:agentClass $AuthenticatedAgentClass } + + ?auth acl:mode ?Mode . + + { ?auth acl:accessTo $this } + UNION + { { ?auth acl:accessToClass $Type } + UNION + { ?auth acl:accessToClass ?Class . + GRAPH { + $Type (rdfs:subClassOf)* ?Class + } + } + } + } + + FILTER(strstarts(str(?g), str($base))) } ]]> @@ -91,41 +101,53 @@ PREFIX acl: PREFIX foaf: DESCRIBE ?auth -FROM WHERE - { { ?auth acl:mode acl:Control . - { ?auth acl:agent $agent } - UNION - { ?auth acl:agentGroup ?Group . - ?Group foaf:member $agent - } - } - UNION - { ?auth acl:agentClass $AuthenticatedAgentClass ; - acl:mode ?Mode - { ?auth acl:accessTo $this + { GRAPH ?g + { { ?auth acl:mode acl:Control . + { ?auth acl:agent $agent } + UNION + { ?auth acl:agentGroup ?Group + + GRAPH ?groupG { + ?Group foaf:member $agent + } + FILTER(strstarts(str(?groupG), str($base))) + + } } UNION - { ?auth acl:accessToClass $Type } - UNION - { ?auth acl:accessToClass ?Class . - $Type (rdfs:subClassOf)* ?Class + { ?auth acl:agentClass $AuthenticatedAgentClass ; + acl:mode ?Mode + { ?auth acl:accessTo $this + } + UNION + { ?auth acl:accessToClass $Type } + UNION + { ?auth acl:accessToClass ?Class . + GRAPH { + $Type (rdfs:subClassOf)* ?Class + } + } } - } - UNION - { ?auth acl:agentClass foaf:Agent ; - acl:mode acl:Read - { ?auth acl:accessTo $this - } - UNION - { ?auth acl:accessToClass $Type } UNION - { ?auth acl:accessToClass ?Class . - $Type (rdfs:subClassOf)* ?Class + { ?auth acl:agentClass foaf:Agent ; + acl:mode acl:Read + { ?auth acl:accessTo $this + } + UNION + { ?auth acl:accessToClass $Type } + UNION + { ?auth acl:accessToClass ?Class . + GRAPH { + $Type (rdfs:subClassOf)* ?Class + } + } + # only namespace, signup, OAuth2 login and WebID profiles can be public in admin app, nothing else + FILTER ( $this IN (uri(concat(str($base), "ns")), uri(concat(str($base), "sign%20up")), uri(concat(str($base), "transform"))) || strstarts(str($this), concat(str($base), "acl/agents/")) || strstarts(str($this), concat(str($base), "acl/public-keys/"))) } - # only namespace, signup, OAuth2 login and WebID profiles can be public in admin app, nothing else - FILTER ( $this IN (uri(concat(str($base), "ns")), uri(concat(str($base), "sign%20up")), uri(concat(str($base), "oauth2/login")), uri(concat(str($base), "oauth2/authorize/google")), uri(concat(str($base), "transform"))) || strstarts(str($this), concat(str($base), "acl/agents/")) || strstarts(str($this), concat(str($base), "acl/public-keys/"))) } + + FILTER(strstarts(str(?g), str($base))) } ]]> @@ -134,10 +156,11 @@ WHERE -DESCRIBE ?Agent +SELECT ?Agent ?agentGraph WHERE - { GRAPH ?g - { ?Agent foaf:mbox $mbox + { + { GRAPH ?agentGraph + { ?Agent foaf:mbox $mbox } } } ]]> @@ -152,7 +175,7 @@ DESCRIBE ?account ?agent WHERE { GRAPH ?g { ?account sioc:id $id ; - lacl:issuer ?issuer ; + lacl:issuer $issuer ; sioc:account_of ?agent } } @@ -350,6 +373,22 @@ support@atomgraph.com]]> com.atomgraph.linkeddatahub.Application /* + + CORS filter + org.apache.catalina.filters.CorsFilter + + cors.allowed.origins + * + + + cors.allowed.methods + GET,HEAD,OPTIONS + + + + CORS filter + /static/* + HSTS filter org.apache.catalina.filters.HttpHeaderSecurityFilter diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css b/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css index 6dd359dd9..24ee6b806 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css @@ -85,6 +85,7 @@ li button.btn-edit-constructors, li button.btn-add-data, li button.btn-add-ontol .btn.btn-expanded-tree { height: 24px; width: 24px; background-image: url('../icons/chevron_right_black_24dp.svg'); } .btn.btn-expanded-tree:hover, .btn.btn-expanded-tree:focus { background-position: 0 0; } .left-nav .nav, .right-nav .nav { max-height: 20em; overflow: auto; } +.left-nav, .right-nav { position: sticky; top: 0; } .btn.btn-order-by { font-size: 0; color: transparent; background-image: url('../icons/arrow_downward-black-24dp.svg'); background-position: 12px center; background-repeat: no-repeat; padding: 5px 5px 5px 40px; } .btn.btn-order-by.btn-order-by-desc { font-size: 0; color: transparent; background-image: url('../icons/arrow_upward-black-24dp.svg'); background-position: 12px center; background-repeat: no-repeat; padding: 5px 5px 5px 40px; } .caret.caret-reversed { border-bottom: 4px solid #000000; border-top-width: 0; } diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/admin/signup.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/admin/signup.xsl index fc566fdd3..6e56358fb 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/admin/signup.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/admin/signup.xsl @@ -10,7 +10,6 @@ - diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl index acf3589bf..1cb4c8f1b 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl @@ -345,60 +345,80 @@ exclude-result-prefixes="#all" - + - - + + + + + + + - + - + + + + + + - + + + + + - - - + + + + - + - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl index 174cf2a81..5541a334b 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl @@ -115,7 +115,7 @@ exclude-result-prefixes="#all" - + @@ -635,24 +635,24 @@ exclude-result-prefixes="#all" - + - + - + - + - + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/object.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/object.xsl index b56d84580..fa77ecdbe 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/object.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/object.xsl @@ -180,8 +180,8 @@ exclude-result-prefixes="#all" - ldh:block-object-value-response $resource-uri: - + + - @@ -813,7 +812,7 @@ exclude-result-prefixes="#all" - + - + - + - + - + @@ -117,7 +117,7 @@ exclude-result-prefixes="#all"

- +

@@ -195,7 +195,7 @@ exclude-result-prefixes="#all"