diff --git a/bin/imports/create-query.sh b/bin/add-construct.sh similarity index 72% rename from bin/imports/create-query.sh rename to bin/add-construct.sh index f9d793498..5549b25ab 100755 --- a/bin/imports/create-query.sh +++ b/bin/add-construct.sh @@ -5,7 +5,7 @@ print_usage() { printf "Creates a SPARQL CONSTRUCT query.\n" printf "\n" - printf "Usage: %s options\n" "$0" + printf "Usage: %s options TARGET_URI\n" "$0" printf "\n" printf "Options:\n" printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" @@ -13,20 +13,16 @@ print_usage() printf " -b, --base BASE_URI Base URI of the application\n" printf " --proxy PROXY_URL The host this request will be proxied through (optional)\n" printf "\n" - printf " --title TITLE Title of the chart\n" - printf " --description DESCRIPTION Description of the chart (optional)\n" - printf " --slug STRING String that will be used as URI path segment (optional)\n" + printf " --title TITLE Title of the query\n" + printf " --description DESCRIPTION Description of the query (optional)\n" + printf " --uri URI URI of the query (optional)\n" printf "\n" printf " --query-file ABS_PATH Absolute path to the text file with the SPARQL query string\n" + printf " --service SERVICE_URI URI of the SPARQL service specific to this query (optional)\n" } hash turtle 2>/dev/null || { echo >&2 "turtle not on \$PATH. Aborting."; exit 1; } -urlencode() { - python -c 'import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1], sys.argv[2]))' \ - "$1" "$urlencode_safe" -} - args=() while [[ $# -gt 0 ]] do @@ -63,8 +59,8 @@ do shift # past argument shift # past value ;; - --slug) - slug="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -73,6 +69,11 @@ do shift # past argument shift # past value ;; + --service) + service="$2" + shift # past argument + shift # past value + ;; *) # unknown arguments args+=("$1") # save it in an array for later shift # past argument @@ -81,6 +82,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -102,43 +105,38 @@ if [ -z "$query_file" ] ; then exit 1 fi -if [ -z "$slug" ] ; then - slug=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase -fi -encoded_slug=$(urlencode "$slug") - -container="${base}queries/" query=$(<"$query_file") # read query string from file -target="${container}${encoded_slug}/" - args+=("-f") args+=("$cert_pem_file") args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("$target") if [ -n "$proxy" ]; then args+=("--proxy") args+=("$proxy") fi +if [ -n "$uri" ] ; then + subject="<${uri}>" +else + subject="_:subject" +fi + turtle+="@prefix ldh: .\n" -turtle+="@prefix dh: .\n" turtle+="@prefix dct: .\n" -turtle+="@prefix foaf: .\n" turtle+="@prefix sp: .\n" -turtle+="_:query a sp:Construct .\n" -turtle+="_:query dct:title \"${title}\" .\n" -turtle+="_:query sp:text \"\"\"${query}\"\"\" .\n" -turtle+="<${target}> a dh:Item .\n" -turtle+="<${target}> foaf:primaryTopic _:query .\n" -turtle+="<${target}> dct:title \"${title}\" .\n" +turtle+="${subject} a sp:Construct .\n" +turtle+="${subject} dct:title \"${title}\" .\n" +turtle+="${subject} sp:text \"\"\"${query}\"\"\" .\n" +if [ -n "$service" ] ; then + turtle+="${subject} ldh:service <${service}> .\n" +fi if [ -n "$description" ] ; then - turtle+="_:query dct:description \"${description}\" .\n" + turtle+="${subject} dct:description \"${description}\" .\n" fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/imports/create-file.sh b/bin/add-file.sh similarity index 56% rename from bin/imports/create-file.sh rename to bin/add-file.sh index 36413d34c..dce1dcb13 100755 --- a/bin/imports/create-file.sh +++ b/bin/add-file.sh @@ -5,7 +5,7 @@ print_usage() { printf "Uploads a file.\n" printf "\n" - printf "Usage: %s options\n" "$0" + printf "Usage: %s options TARGET_URI\n" "$0" printf "\n" printf "Options:\n" printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" @@ -14,22 +14,14 @@ print_usage() printf " --proxy PROXY_URL The host this request will be proxied through (optional)\n" printf "\n" printf " --title TITLE Title of the file\n" - printf " --container CONTAINER_URI URI of the parent container (optional)\n" printf " --description DESCRIPTION Description of the file (optional)\n" - printf " --slug STRING String that will be used as URI path segment (optional)\n" printf "\n" printf " --file ABS_PATH Absolute path to the file\n" - printf " --file-content-type MEDIA_TYPE Media type of the file (optional)\n" - #printf " --file-slug STRING String that will be used as the file's URI path segment (optional)\n" + printf " --content-type MEDIA_TYPE Media type of the file (optional)\n" } hash curl 2>/dev/null || { echo >&2 "curl not on \$PATH. Aborting."; exit 1; } -urlencode() { - python -c 'import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1], sys.argv[2]))' \ - "$1" "$urlencode_safe" -} - args=() while [[ $# -gt 0 ]] do @@ -66,28 +58,13 @@ do shift # past argument shift # past value ;; - --slug) - slug="$2" - shift # past argument - shift # past value - ;; - --container) - container="$2" - shift # past argument - shift # past value - ;; --file) file="$2" shift # past argument shift # past value ;; - --file-content-type) - file_content_type="$2" - shift # past argument - shift # past value - ;; - --file-slug) - file_slug="$2" + --content-type) + content_type="$2" shift # past argument shift # past value ;; @@ -99,6 +76,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -119,50 +98,23 @@ if [ -z "$file" ] ; then print_usage exit 1 fi -if [ -z "$file_content_type" ] ; then +if [ -z "$content_type" ] ; then # determine content-type if not provided - file_content_type=$(file -b --mime-type "$file") + content_type=$(file -b --mime-type "$file") fi -if [ -z "$slug" ] ; then - slug=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase -fi -encoded_slug=$(urlencode "$slug") - -# need to create explicit file URI since that is what this script returns (not the graph URI) - -#if [ -z "$file_slug" ] ; then -# file_slug=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase -#fi - -if [ -z "$container" ] ; then - container="${base}files/" -fi - -target="${container}${encoded_slug}/" - # https://stackoverflow.com/questions/19116016/what-is-the-right-way-to-post-multipart-form-data-using-curl rdf_post+="-F \"rdf=\"\n" rdf_post+="-F \"sb=file\"\n" rdf_post+="-F \"pu=http://www.semanticdesktop.org/ontologies/2007/03/22/nfo#fileName\"\n" -rdf_post+="-F \"ol=@${file};type=${file_content_type}\"\n" +rdf_post+="-F \"ol=@${file};type=${content_type}\"\n" rdf_post+="-F \"pu=http://purl.org/dc/terms/title\"\n" rdf_post+="-F \"ol=${title}\"\n" rdf_post+="-F \"pu=http://www.w3.org/1999/02/22-rdf-syntax-ns#type\"\n" rdf_post+="-F \"ou=http://www.semanticdesktop.org/ontologies/2007/03/22/nfo#FileDataObject\"\n" -rdf_post+="-F \"su=${target}\"\n" -rdf_post+="-F \"pu=http://purl.org/dc/terms/title\"\n" -rdf_post+="-F \"ol=${title}\"\n" -rdf_post+="-F \"pu=http://www.w3.org/1999/02/22-rdf-syntax-ns#type\"\n" -rdf_post+="-F \"ou=https://www.w3.org/ns/ldt/document-hierarchy#Item\"\n" -rdf_post+="-F \"pu=http://xmlns.com/foaf/0.1/primaryTopic\"\n" -rdf_post+="-F \"ob=file\"\n" -rdf_post+="-F \"pu=http://rdfs.org/sioc/ns#has_container\"\n" -rdf_post+="-F \"ou=${container}\"\n" if [ -n "$description" ] ; then - rdf_post+="-F \"sb=file\"\n" rdf_post+="-F \"pu=http://purl.org/dc/terms/description\"\n" rdf_post+="-F \"ol=${description}\"\n" fi @@ -176,14 +128,5 @@ if [ -n "$proxy" ]; then target="${target/$target_host/$proxy_host}" fi -# POST RDF/POST multipart form and capture the effective URL -effective_url=$(echo -e "$rdf_post" | curl -w '%{url_effective}' -f -v -s -k -X PUT -H "Accept: text/turtle" -E "$cert_pem_file":"$cert_password" -o /dev/null --config - "$target") - -# If using proxy, rewrite the effective URL back to original hostname -if [ -n "$proxy" ]; then - # Replace proxy host with original host in the effective URL - rewritten_url="${effective_url/$proxy_host/$target_host}" - echo "$rewritten_url" -else - echo "$effective_url" -fi +# POST RDF/POST multipart form +echo -e "$rdf_post" | curl -f -v -s -k -X POST -H "Accept: text/turtle" -E "$cert_pem_file":"$cert_password" -o /dev/null --config - "$target" diff --git a/bin/delete.sh b/bin/delete.sh new file mode 100755 index 000000000..1099bb488 --- /dev/null +++ b/bin/delete.sh @@ -0,0 +1,71 @@ +#!/usr/bin/env bash + +print_usage() +{ + printf "Deletes an RDF document.\n" + printf "\n" + printf "Usage: %s options TARGET_URI\n" "$0" + printf "\n" + printf "Options:\n" + printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" + printf " -p, --cert-password CERT_PASSWORD Password of the WebID certificate\n" + printf " --proxy PROXY_URL The host this request will be proxied through (optional)\n" +} + +hash curl 2>/dev/null || { echo >&2 "curl not on \$PATH. Aborting."; exit 1; } + +unknown=() +while [[ $# -gt 0 ]] +do + key="$1" + + case $key in + -f|--cert-pem-file) + cert_pem_file="$2" + shift # past argument + shift # past value + ;; + -p|--cert-password) + cert_password="$2" + shift # past argument + shift # past value + ;; + --proxy) + proxy="$2" + shift # past argument + shift # past value + ;; + *) # unknown option + unknown+=("$1") # save it in an array for later + shift # past argument + ;; + esac +done +set -- "${unknown[@]}" # restore args + +if [ -z "$cert_pem_file" ] ; then + print_usage + exit 1 +fi +if [ -z "$cert_password" ] ; then + print_usage + exit 1 +fi +if [ "$#" -ne 1 ]; then + print_usage + exit 1 +fi + +url="$1" + +if [ -n "$proxy" ]; then + # rewrite target hostname to proxy hostname + url_host=$(echo "$url" | cut -d '/' -f 1,2,3) + proxy_host=$(echo "$proxy" | cut -d '/' -f 1,2,3) + final_url="${url/$url_host/$proxy_host}" +else + final_url="$url" +fi + +# DELETE the document +curl -f -v -k -E "$cert_pem_file":"$cert_password" -X DELETE -o /dev/null "$final_url" diff --git a/bin/imports/create-csv-import.sh b/bin/imports/add-csv-import.sh similarity index 73% rename from bin/imports/create-csv-import.sh rename to bin/imports/add-csv-import.sh index f7edac6cd..5b01392b1 100755 --- a/bin/imports/create-csv-import.sh +++ b/bin/imports/add-csv-import.sh @@ -5,7 +5,7 @@ print_usage() { printf "Transforms CSV data into RDF using a SPARQL query and imports it.\n" printf "\n" - printf "Usage: %s options\n" "$0" + printf "Usage: %s options TARGET_URI\n" "$0" printf "\n" printf "Options:\n" printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" @@ -13,9 +13,9 @@ print_usage() printf " -b, --base BASE_URI Base URI of the application\n" printf " --proxy PROXY_URL The host this request will be proxied through (optional)\n" printf "\n" - printf " --title TITLE Title of the container\n" - printf " --description DESCRIPTION Description of the container (optional)\n" - printf " --slug STRING String that will be used as URI path segment (optional)\n" + printf " --title TITLE Title of the import\n" + printf " --description DESCRIPTION Description of the import (optional)\n" + printf " --uri URI URI of the import resource (optional)\n" printf "\n" printf " --query QUERY_URI URI of the CONSTRUCT mapping query\n" printf " --file FILE_URI URI of the CSV file\n" @@ -24,11 +24,6 @@ print_usage() hash turtle 2>/dev/null || { echo >&2 "turtle not on \$PATH. Aborting."; exit 1; } -urlencode() { - python -c 'import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1], sys.argv[2]))' \ - "$1" "$urlencode_safe" -} - args=() while [[ $# -gt 0 ]] do @@ -65,8 +60,8 @@ do shift # past argument shift # past value ;; - --slug) - slug="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -93,6 +88,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -122,14 +119,11 @@ if [ -z "$delimiter" ] ; then exit 1 fi -if [ -z "$slug" ] ; then - slug=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase +if [ -n "$uri" ] ; then + subject="<${uri}>" +else + subject="_:import" fi -encoded_slug=$(urlencode "$slug") - -container="${base}imports/" - -target="${container}${encoded_slug}/" args+=("-f") args+=("$cert_pem_file") @@ -137,29 +131,23 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("$target") if [ -n "$proxy" ]; then args+=("--proxy") args+=("$proxy") fi turtle+="@prefix ldh: .\n" -turtle+="@prefix dh: .\n" turtle+="@prefix dct: .\n" -turtle+="@prefix foaf: .\n" turtle+="@prefix spin: .\n" -turtle+="_:import a ldh:CSVImport .\n" -turtle+="_:import dct:title \"${title}\" .\n" -turtle+="_:import spin:query <${query}> .\n" -turtle+="_:import ldh:file <${file}> .\n" -turtle+="_:import ldh:delimiter \"${delimiter}\" .\n" -turtle+="<${target}> a dh:Item .\n" -turtle+="<${target}> foaf:primaryTopic _:import .\n" -turtle+="<${target}> dct:title \"${title}\" .\n" +turtle+="${subject} a ldh:CSVImport .\n" +turtle+="${subject} dct:title \"${title}\" .\n" +turtle+="${subject} spin:query <${query}> .\n" +turtle+="${subject} ldh:file <${file}> .\n" +turtle+="${subject} ldh:delimiter \"${delimiter}\" .\n" if [ -n "$description" ] ; then - turtle+="_:import dct:description \"${description}\" .\n" + turtle+="${subject} dct:description \"${description}\" .\n" fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/imports/create-rdf-import.sh b/bin/imports/add-rdf-import.sh similarity index 73% rename from bin/imports/create-rdf-import.sh rename to bin/imports/add-rdf-import.sh index 8d76b5e48..c47e68011 100755 --- a/bin/imports/create-rdf-import.sh +++ b/bin/imports/add-rdf-import.sh @@ -5,7 +5,7 @@ print_usage() { printf "Imports RDF data.\n" printf "\n" - printf "Usage: %s options\n" "$0" + printf "Usage: %s options TARGET_URI\n" "$0" printf "\n" printf "Options:\n" printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" @@ -13,9 +13,9 @@ print_usage() printf " -b, --base BASE_URI Base URI of the application\n" printf " --proxy PROXY_URL The host this request will be proxied through (optional)\n" printf "\n" - printf " --title TITLE Title of the container\n" - printf " --description DESCRIPTION Description of the container (optional)\n" - printf " --slug STRING String that will be used as URI path segment (optional)\n" + printf " --title TITLE Title of the import\n" + printf " --description DESCRIPTION Description of the import (optional)\n" + printf " --uri URI URI of the import resource (optional)\n" printf "\n" printf " --query QUERY_URI URI of the CONSTRUCT mapping query (optional)\n" printf " --graph GRAPH_URI URI of the graph (optional)\n" @@ -24,11 +24,6 @@ print_usage() hash turtle 2>/dev/null || { echo >&2 "turtle not on \$PATH. Aborting."; exit 1; } -urlencode() { - python -c 'import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1], sys.argv[2]))' \ - "$1" "$urlencode_safe" -} - args=() while [[ $# -gt 0 ]] do @@ -65,8 +60,8 @@ do shift # past argument shift # past value ;; - --slug) - slug="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -93,6 +88,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -114,14 +111,11 @@ if [ -z "$file" ] ; then exit 1 fi -if [ -z "$slug" ] ; then - slug=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase +if [ -n "$uri" ] ; then + subject="<${uri}>" +else + subject="_:import" fi -encoded_slug=$(urlencode "$slug") - -container="${base}imports/" - -target="${container}${encoded_slug}/" args+=("-f") args+=("$cert_pem_file") @@ -129,34 +123,28 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("$target") if [ -n "$proxy" ]; then args+=("--proxy") args+=("$proxy") fi turtle+="@prefix ldh: .\n" -turtle+="@prefix dh: .\n" turtle+="@prefix dct: .\n" -turtle+="@prefix foaf: .\n" -turtle+="_:import a ldh:RDFImport .\n" -turtle+="_:import dct:title \"${title}\" .\n" -turtle+="_:import ldh:file <${file}> .\n" -turtle+="<${target}> a dh:Item .\n" -turtle+="<${target}> foaf:primaryTopic _:import .\n" -turtle+="<${target}> dct:title \"${title}\" .\n" +turtle+="${subject} a ldh:RDFImport .\n" +turtle+="${subject} dct:title \"${title}\" .\n" +turtle+="${subject} ldh:file <${file}> .\n" if [ -n "$graph" ] ; then turtle+="@prefix sd: .\n" - turtle+="_:import sd:name <${graph}> .\n" + turtle+="${subject} sd:name <${graph}> .\n" fi if [ -n "$query" ] ; then turtle+="@prefix spin: .\n" - turtle+="_:import spin:query <${query}> .\n" + turtle+="${subject} spin:query <${query}> .\n" fi if [ -n "$description" ] ; then - turtle+="_:import dct:description \"${description}\" .\n" + turtle+="${subject} dct:description \"${description}\" .\n" fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/imports/import-csv.sh b/bin/imports/import-csv.sh index d7c55dd38..55838d7ff 100755 --- a/bin/imports/import-csv.sh +++ b/bin/imports/import-csv.sh @@ -12,7 +12,7 @@ print_usage() { printf "Transforms CSV data into RDF using a SPARQL query and imports it.\n" printf "\n" - printf "Usage: %s options\n" "$0" + printf "Usage: %s options TARGET_URI\n" "$0" printf "\n" printf "Options:\n" printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" @@ -25,12 +25,8 @@ print_usage() printf " --slug STRING String that will be used as URI path segment (optional)\n" printf "\n" printf " --query-file ABS_PATH Absolute path to the text file with the SPARQL query string\n" - printf " --query-doc-slug STRING String that will be used as the query's URI path segment (optional)\n" - printf " --file ABS_PATH Absolute path to the CSV file\n" - printf " --file-slug STRING String that will be used as the file's URI path segment (optional)\n" - printf " --file-doc-slug STRING String that will be used as the file document's URI path segment (optional)\n" + printf " --csv-file ABS_PATH Absolute path to the CSV file\n" printf " --delimiter CHAR CSV delimiter char (default: ',')\n" - printf " --import-slug STRING String that will be used as the import's URI path segment (optional)\n" } args=() @@ -69,23 +65,8 @@ do shift # past argument shift # past value ;; - --query-doc-slug) - query_doc_slug="$2" - shift # past argument - shift # past value - ;; - --file) - file="$2" - shift # past argument - shift # past value - ;; - --file-slug) - file_slug="$2" - shift # past argument - shift # past value - ;; - --file-doc-slug) - file_doc_slug="$2" + --csv-file) + csv_file="$2" shift # past argument shift # past value ;; @@ -94,11 +75,6 @@ do shift # past argument shift # past value ;; - --import-slug) - import_slug="$2" - shift # past argument - shift # past value - ;; *) # unknown arguments args+=("$1") # save it in an array for later shift # past argument @@ -107,6 +83,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -127,7 +105,7 @@ if [ -z "$query_file" ] ; then print_usage exit 1 fi -if [ -z "$file" ] ; then +if [ -z "$csv_file" ] ; then print_usage exit 1 fi @@ -139,55 +117,52 @@ if [ -z "$proxy" ] ; then proxy="$base" fi -query_doc=$(create-query.sh \ +# Generate query ID for fragment identifier +query_id=$(uuidgen | tr '[:upper:]' '[:lower:]') + +# Add the CONSTRUCT query to the item using fragment identifier +# TO-DO: fix ambigous add-construct.sh script names +"$(dirname "$0")/../add-construct.sh" \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$query_doc_slug" \ - --query-file "$query_file" -) - -query_ntriples=$(get.sh \ - -f "$cert_pem_file" \ - -p "$cert_password" \ - --proxy "$proxy" \ - --accept 'application/n-triples' \ - "$query_doc" -) + --uri "#${query_id}" \ + --query-file "$query_file" \ + "$target" -query=$(echo "$query_ntriples" | sed -rn "s/<${query_doc//\//\\/}> <(.*)> \./\1/p" | head -1) +# The query URI is the document with fragment +query="${target}#${query_id}" -file_doc=$(create-file.sh \ +# Add the file to the import item +add-file.sh \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$file_doc_slug" \ - --file-slug "$file_slug" \ - --file "$file" \ - --file-content-type "text/csv" -) + --file "$csv_file" \ + --content-type "text/csv" \ + "$target" -file_ntriples=$(get.sh \ - -f "$cert_pem_file" \ - -p "$cert_password" \ - --proxy "$proxy" \ - --accept 'application/n-triples' \ - "$file_doc") +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$csv_file" | awk '{print $1}') +file_uri="${base}uploads/${sha1sum}" -file=$(echo "$file_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p" | head -1) +# Generate import ID for fragment identifier +import_id=$(uuidgen | tr '[:upper:]' '[:lower:]') -create-csv-import.sh \ +# Add the import metadata to the import item using fragment identifier +add-csv-import.sh \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$import_slug" \ + --uri "#${import_id}" \ --query "$query" \ - --file "$file" \ - --delimiter "$delimiter" + --file "$file_uri" \ + --delimiter "$delimiter" \ + "$target" \ No newline at end of file diff --git a/bin/imports/import-rdf.sh b/bin/imports/import-rdf.sh index 086d4d303..d0ce8dae0 100755 --- a/bin/imports/import-rdf.sh +++ b/bin/imports/import-rdf.sh @@ -10,9 +10,9 @@ function onexit() { print_usage() { - printf "Transforms CSV data into RDF using a SPARQL query and imports it.\n" + printf "Transforms RDF data using a SPARQL query and imports it.\n" printf "\n" - printf "Usage: %s options\n" "$0" + printf "Usage: %s options TARGET_URI\n" "$0" printf "\n" printf "Options:\n" printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" @@ -25,13 +25,9 @@ print_usage() printf " --slug STRING String that will be used as URI path segment (optional)\n" printf "\n" printf " --query-file ABS_PATH Absolute path to the text file with the SPARQL query string (optional)\n" - printf " --query-doc-slug STRING String that will be used as the query's URI path segment (optional)\n" printf " --graph GRAPH_URI URI of the graph (optional)\n" - printf " --file ABS_PATH Absolute path to the CSV file (optional)\n" - printf " --file-slug STRING String that will be used as the file's URI path segment (optional)\n" - printf " --file-doc-slug STRING String that will be used as the file document's URI path segment (optional)\n" - printf " --file-content-type MEDIA_TYPE Media type of the file\n" - printf " --import-slug STRING String that will be used as the import's URI path segment (optional)\n" + printf " --rdf-file ABS_PATH Absolute path to the RDF file (optional)\n" + printf " --content-type MEDIA_TYPE Media type of the file\n" } args=() @@ -75,33 +71,13 @@ do shift # past argument shift # past value ;; - --query-doc-slug) - query_doc_slug="$2" + --rdf-file) + rdf_file="$2" shift # past argument shift # past value ;; - --file) - file="$2" - shift # past argument - shift # past value - ;; - --file-slug) - file_slug="$2" - shift # past argument - shift # past value - ;; - --file-doc-slug) - file_doc_slug="$2" - shift # past argument - shift # past value - ;; - --file-content-type) - file_content_type="$2" - shift # past argument - shift # past value - ;; - --import-slug) - import_slug="$2" + --content-type) + content_type="$2" shift # past argument shift # past value ;; @@ -113,6 +89,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -129,11 +107,11 @@ if [ -z "$title" ] ; then print_usage exit 1 fi -if [ -z "$file" ] ; then +if [ -z "$rdf_file" ] ; then print_usage exit 1 fi -if [ -z "$file_content_type" ] ; then +if [ -z "$content_type" ] ; then print_usage exit 1 fi @@ -143,67 +121,64 @@ if [ -z "$proxy" ] ; then fi if [ -n "$query_file" ] ; then - query_doc=$(create-query.sh \ + # Generate query ID for fragment identifier + query_id=$(uuidgen | tr '[:upper:]' '[:lower:]') + + # Add the CONSTRUCT query to the item using fragment identifier + # TO-DO: fix ambigous add-construct.sh script names + "$(dirname "$0")/../add-construct.sh" \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$query_doc_slug" \ - --query-file "$query_file" - ) - - query_ntriples=$(get.sh \ - -f "$cert_pem_file" \ - -p "$cert_password" \ - --proxy "$proxy" \ - --accept 'application/n-triples' \ - "$query_doc" - ) + --uri "#${query_id}" \ + --query-file "$query_file" \ + "$target" - query=$(echo "$query_ntriples" | sed -rn "s/<${query_doc//\//\\/}> <(.*)> \./\1/p" | head -1) + # The query URI is the document with fragment + query="${target}#${query_id}" fi -file_doc=$(create-file.sh \ +# Add the file to the import item +add-file.sh \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$file_doc_slug" \ - --file-slug "$file_slug" \ - --file "$file" \ - --file-content-type "$file_content_type" -) + --file "$rdf_file" \ + --content-type "$content_type" \ + "$target" -file_ntriples=$(get.sh \ - -f "$cert_pem_file" \ - -p "$cert_password" \ - --proxy "$proxy" \ - --accept 'application/n-triples' \ - "$file_doc" -) +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$rdf_file" | awk '{print $1}') +rdf_file_uri="${base}uploads/${sha1sum}" -file=$(echo "$file_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p" | head -1) +# Generate import ID for fragment identifier +import_id=$(uuidgen | tr '[:upper:]' '[:lower:]') +# Add the import metadata to the import item using fragment identifier if [ -n "$query" ] ; then - create-rdf-import.sh \ + add-rdf-import.sh \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$import_slug" \ + --uri "#${import_id}" \ --query "$query" \ - --file "$file" + --file "$rdf_file_uri" \ + "$target" else - create-rdf-import.sh \ + add-rdf-import.sh \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$import_slug" \ + --uri "#${import_id}" \ --graph "$graph" \ - --file "$file" + --file "$rdf_file_uri" \ + "$target" fi \ No newline at end of file diff --git a/http-tests/admin/model/ontology-import-upload-no-deadlock.sh b/http-tests/admin/model/ontology-import-upload-no-deadlock.sh new file mode 100755 index 000000000..935facd7e --- /dev/null +++ b/http-tests/admin/model/ontology-import-upload-no-deadlock.sh @@ -0,0 +1,108 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Test that ontology imports of uploaded files do not cause deadlock +# This verifies the fix for circular dependency when: +# 1. Request arrives for /uploads/xyz +# 2. OntologyFilter intercepts it and loads ontology +# 3. Ontology has owl:imports for /uploads/xyz +# 4. Jena FileManager makes HTTP request to /uploads/xyz +# 5. Would cause infinite loop/deadlock without the fix + +initialize_dataset "$END_USER_BASE_URL" "$TMP_END_USER_DATASET" "$END_USER_ENDPOINT_URL" +initialize_dataset "$ADMIN_BASE_URL" "$TMP_ADMIN_DATASET" "$ADMIN_ENDPOINT_URL" +purge_cache "$END_USER_VARNISH_SERVICE" +purge_cache "$ADMIN_VARNISH_SERVICE" +purge_cache "$FRONTEND_VARNISH_SERVICE" + +pwd=$(realpath "$PWD") + +# add agent to the writers group so they can upload files + +add-agent-to-group.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + --agent "$AGENT_URI" \ + "${ADMIN_BASE_URL}acl/groups/writers/" + +# Step 1: Upload an RDF file + +file_content_type="text/turtle" +slug=$(uuidgen | tr '[:upper:]' '[:lower:]') + +# Create an item document to hold the file +file_doc=$(create-item.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test ontology for upload import" \ + --container "$END_USER_BASE_URL" \ + --slug "$slug") + +# Add the file to the document +add-file.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test ontology for upload import" \ + --file "$pwd/test-ontology-import.ttl" \ + --content-type "${file_content_type}" \ + "$file_doc" + +# Step 2: Extract the uploaded file URI (content-addressed) + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$pwd/test-ontology-import.ttl" | awk '{print $1}') +upload_uri="${END_USER_BASE_URL}uploads/${sha1sum}" + +# Verify the uploaded file is accessible before we add it as an import +curl -k -f -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: ${file_content_type}" \ + "$upload_uri" > /dev/null + +# Step 3: Add the uploaded file as an owl:import to the namespace ontology + +namespace_doc="${END_USER_BASE_URL}ns" +namespace="${namespace_doc}#" +ontology_doc="${ADMIN_BASE_URL}ontologies/namespace/" + +add-ontology-import.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + --import "$upload_uri" \ + "$ontology_doc" + +# Step 4: Clear the namespace ontology from memory to force reload on next request + +clear-ontology.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + -b "$ADMIN_BASE_URL" \ + --ontology "$namespace" + +# Step 5: Verify the import is present in the loaded ontology +# This request also triggers ontology loading and would detect deadlock + +curl -k -f -s \ + -H "Accept: application/n-triples" \ + "$namespace_doc" \ +| grep "<${namespace}> <${upload_uri}>" > /dev/null + +# Step 6: Verify the uploaded file is still accessible after ontology loading + +curl -k -f -s \ + -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ + -H "Accept: ${file_content_type}" \ + "$upload_uri" > /dev/null + +# Step 7: Verify that the imported ontology content is accessible via the namespace document +# This confirms the import was actually loaded (not just skipped) + +curl -k -f -s \ + -G \ + -E "$OWNER_CERT_FILE":"$OWNER_CERT_PWD" \ + -H 'Accept: application/sparql-results+xml' \ + --data-urlencode "query=SELECT * { ?p ?o }" \ + "$namespace_doc" \ +| grep 'Test Class' > /dev/null diff --git a/http-tests/admin/model/test-ontology-import.ttl b/http-tests/admin/model/test-ontology-import.ttl new file mode 100644 index 000000000..24361b035 --- /dev/null +++ b/http-tests/admin/model/test-ontology-import.ttl @@ -0,0 +1,17 @@ +@prefix : . +@prefix owl: . +@prefix rdfs: . +@prefix xsd: . + +: a owl:Ontology ; + rdfs:label "Test ontology for upload import" ; + rdfs:comment "This ontology is uploaded to test that ontology imports of uploaded files do not cause deadlock" . + +:TestClass a owl:Class ; + rdfs:label "Test Class" ; + rdfs:comment "A test class to verify ontology was loaded" . + +:testProperty a owl:DatatypeProperty ; + rdfs:label "Test Property" ; + rdfs:domain :TestClass ; + rdfs:range xsd:string . diff --git a/http-tests/imports/GET-file-304.sh b/http-tests/imports/GET-file-304.sh index 8b4f3728c..1f38581f7 100755 --- a/http-tests/imports/GET-file-304.sh +++ b/http-tests/imports/GET-file-304.sh @@ -7,6 +7,7 @@ purge_cache "$END_USER_VARNISH_SERVICE" purge_cache "$ADMIN_VARNISH_SERVICE" purge_cache "$FRONTEND_VARNISH_SERVICE" +# Run the create-file test and capture the file URI it outputs file=$(./create-file.sh) etag=$( diff --git a/http-tests/imports/GET-file-range.sh b/http-tests/imports/GET-file-range.sh index 649215916..c9c416308 100755 --- a/http-tests/imports/GET-file-range.sh +++ b/http-tests/imports/GET-file-range.sh @@ -22,22 +22,30 @@ add-agent-to-group.sh \ filename="/tmp/random-file" time dd if=/dev/urandom of="$filename" bs=1 count=1024 file_content_type="application/octet-stream" +slug=$(uuidgen | tr '[:upper:]' '[:lower:]') -file_doc=$(create-file.sh \ --f "$AGENT_CERT_FILE" \ --p "$AGENT_CERT_PWD" \ --b "$END_USER_BASE_URL" \ ---title "Random file" \ ---file "$filename" \ ---file-content-type "${file_content_type}") - -file_doc_ntriples=$(get.sh \ +# Create an item document to hold the file +file_doc=$(create-item.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ - --accept 'application/n-triples' \ - "$file_doc") + -b "$END_USER_BASE_URL" \ + --title "Random file" \ + --container "$END_USER_BASE_URL" \ + --slug "$slug") -file=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p") +# Add the file to the document +add-file.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Random file" \ + --file "$filename" \ + --content-type "${file_content_type}" \ + "$file_doc" + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$filename" | awk '{print $1}') +file="${END_USER_BASE_URL}uploads/${sha1sum}" from=100 length=42 diff --git a/http-tests/imports/GET-file-sha1sum.sh b/http-tests/imports/GET-file-sha1sum.sh index 3384ffc4a..5b62d6bbc 100755 --- a/http-tests/imports/GET-file-sha1sum.sh +++ b/http-tests/imports/GET-file-sha1sum.sh @@ -23,21 +23,36 @@ filename="/tmp/random-file" time dd if=/dev/urandom of="$filename" bs=1 count=1024 file_content_type="application/octet-stream" -file_doc=$(create-file.sh \ --f "$AGENT_CERT_FILE" \ --p "$AGENT_CERT_PWD" \ --b "$END_USER_BASE_URL" \ ---title "Random file" \ ---file "$filename" \ ---file-content-type "${file_content_type}") - -file_doc_ntriples=$(get.sh \ +# Create a container for files first +create-container.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ - --accept 'application/n-triples' \ - "$file_doc") + -b "$END_USER_BASE_URL" \ + --title "Files" \ + --parent "$END_USER_BASE_URL" \ + --slug "files" -file=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p") +# Create an item document to hold the file +file_doc=$(create-item.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Random file" \ + --container "${END_USER_BASE_URL}files/") + +# Add the file to the document +add-file.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Random file" \ + --file "$filename" \ + --content-type "${file_content_type}" \ + "$file_doc" + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$filename" | awk '{print $1}') +file="${END_USER_BASE_URL}uploads/${sha1sum}" server_sha1sum=$(echo "$file" | cut -d "/" -f 5) # cut the last URL path segment diff --git a/http-tests/imports/PUT-file-format-explicit.sh b/http-tests/imports/PUT-file-format-explicit.sh new file mode 100755 index 000000000..d480fcb4a --- /dev/null +++ b/http-tests/imports/PUT-file-format-explicit.sh @@ -0,0 +1,111 @@ +#!/usr/bin/env bash +set -euo pipefail + +initialize_dataset "$END_USER_BASE_URL" "$TMP_END_USER_DATASET" "$END_USER_ENDPOINT_URL" +initialize_dataset "$ADMIN_BASE_URL" "$TMP_ADMIN_DATASET" "$ADMIN_ENDPOINT_URL" +purge_cache "$END_USER_VARNISH_SERVICE" +purge_cache "$ADMIN_VARNISH_SERVICE" +purge_cache "$FRONTEND_VARNISH_SERVICE" + +pwd=$(realpath "$PWD") + +# add agent to the writers group + +add-agent-to-group.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + --agent "$AGENT_URI" \ + "${ADMIN_BASE_URL}acl/groups/writers/" + +# create test file with sample content + +test_file=$(mktemp) +echo "test,data,sample" > "$test_file" +echo "1,2,3" >> "$test_file" +echo "4,5,6" >> "$test_file" + +slug=$(uuidgen | tr '[:upper:]' '[:lower:]') + +# Create an item document to hold the file +file_doc=$(create-item.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test File for Media Type Update" \ + --container "$END_USER_BASE_URL" \ + --slug "$slug") + +# upload file with explicit media type: text/plain +add-file.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test File for Media Type Update" \ + --file "$test_file" \ + --content-type "text/plain" \ + "$file_doc" + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$test_file" | awk '{print $1}') +file_uri="${END_USER_BASE_URL}uploads/${sha1sum}" + +# get the file resource URI and initial dct:format + +file_doc_ntriples=$(get.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + --accept 'application/n-triples' \ + "$file_doc") + +# get initial SHA1 hash +initial_sha1=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_uri//\//\\/}> \"(.*)\" \./\1/p") + +# get initial dct:format +initial_format=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_uri//\//\\/}> <(.*)> \./\1/p") + +# verify initial format is text/plain +if [[ ! "$initial_format" =~ text/plain ]]; then + echo "ERROR: Initial format should contain text/plain but got: $initial_format" + exit 1 +fi + +# re-upload the same file but different explicit media type: text/csv +# this simulates editing the file document through the UI and uploading a new file + +add-file.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test File for Media Type Update" \ + --file "$test_file" \ + --content-type "text/csv" \ + "$file_doc" + +# get updated document + +updated_ntriples=$(get.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + --accept 'application/n-triples' \ + "$file_doc") + +# get updated SHA1 hash (should be same as initial) +updated_sha1=$(echo "$updated_ntriples" | sed -rn "s/<${file_uri//\//\\/}> \"(.*)\" \./\1/p") + +# get updated dct:format (should be text/csv) +updated_format=$(echo "$updated_ntriples" | sed -rn "s/<${file_uri//\//\\/}> <(.*)> \./\1/p") + +# verify SHA1 is unchanged (same file content) +if [ "$initial_sha1" != "$updated_sha1" ]; then + echo "ERROR: SHA1 hash changed! Initial: $initial_sha1, Updated: $updated_sha1" + exit 1 +fi + +# verify dct:format was updated to text/csv +if [[ ! "$updated_format" =~ text/csv ]]; then + echo "ERROR: Format should have been updated to text/csv but got: $updated_format" + exit 1 +fi + +# cleanup +rm -f "$test_file" diff --git a/http-tests/imports/PUT-file-format.sh b/http-tests/imports/PUT-file-format.sh new file mode 100755 index 000000000..f066be396 --- /dev/null +++ b/http-tests/imports/PUT-file-format.sh @@ -0,0 +1,106 @@ +#!/usr/bin/env bash +set -euo pipefail + +initialize_dataset "$END_USER_BASE_URL" "$TMP_END_USER_DATASET" "$END_USER_ENDPOINT_URL" +initialize_dataset "$ADMIN_BASE_URL" "$TMP_ADMIN_DATASET" "$ADMIN_ENDPOINT_URL" +purge_cache "$END_USER_VARNISH_SERVICE" +purge_cache "$ADMIN_VARNISH_SERVICE" +purge_cache "$FRONTEND_VARNISH_SERVICE" + +pwd=$(realpath "$PWD") + +# add agent to the writers group + +add-agent-to-group.sh \ + -f "$OWNER_CERT_FILE" \ + -p "$OWNER_CERT_PWD" \ + --agent "$AGENT_URI" \ + "${ADMIN_BASE_URL}acl/groups/writers/" + +# create test file with sample content + +test_file=$(mktemp) +echo "test,data,sample" > "$test_file" +echo "1,2,3" >> "$test_file" +echo "4,5,6" >> "$test_file" + +slug=$(uuidgen | tr '[:upper:]' '[:lower:]') + +# Create an item document to hold the file +file_doc=$(create-item.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test File for Browser Media Type" \ + --container "$END_USER_BASE_URL" \ + --slug "$slug") + +# upload file WITHOUT explicit media type (rely on browser detection via `file -b --mime-type`) +add-file.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test File for Browser Media Type" \ + --file "$test_file" \ + "$file_doc" + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$test_file" | awk '{print $1}') +file_uri="${END_USER_BASE_URL}uploads/${sha1sum}" + +# get the file resource URI and initial dct:format + +file_doc_ntriples=$(get.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + --accept 'application/n-triples' \ + "$file_doc") + +# get initial SHA1 hash +initial_sha1=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_uri//\//\\/}> \"(.*)\" \./\1/p") + +# get initial dct:format (should be browser-detected) +initial_format=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_uri//\//\\/}> <(.*)> \./\1/p") + +# re-upload the same file but WITH explicit media type: text/csv +# this simulates editing and uploading with a corrected format after browser auto-detection was wrong + +add-file.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test File for Browser Media Type" \ + --file "$test_file" \ + --content-type "text/csv" \ + "$file_doc" \ + > /dev/null + +# get updated document + +updated_ntriples=$(get.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + --accept 'application/n-triples' \ + "$file_doc") + +# get updated SHA1 hash (should be same as initial) +updated_sha1=$(echo "$updated_ntriples" | sed -rn "s/<${file_uri//\//\\/}> \"(.*)\" \./\1/p") + +# get updated dct:format (should be text/csv) +updated_format=$(echo "$updated_ntriples" | sed -rn "s/<${file_uri//\//\\/}> <(.*)> \./\1/p") + +# verify SHA1 is unchanged (same file content) +if [ "$initial_sha1" != "$updated_sha1" ]; then + echo "ERROR: SHA1 hash changed! Initial: $initial_sha1, Updated: $updated_sha1" + exit 1 +fi + +# verify dct:format was updated to text/csv +if [[ ! "$updated_format" =~ text/csv ]]; then + echo "ERROR: Format should have been updated to text/csv but got: $updated_format" + echo "Initial format was: $initial_format" + exit 1 +fi + +# cleanup +rm -f "$test_file" diff --git a/http-tests/imports/create-file.sh b/http-tests/imports/create-file.sh index d7e5c462c..e5d5c5541 100755 --- a/http-tests/imports/create-file.sh +++ b/http-tests/imports/create-file.sh @@ -20,24 +20,30 @@ add-agent-to-group.sh \ # create file file_content_type="text/csv" +slug=$(uuidgen | tr '[:upper:]' '[:lower:]') -file_doc=$(create-file.sh \ --f "$AGENT_CERT_FILE" \ --p "$AGENT_CERT_PWD" \ --b "$END_USER_BASE_URL" \ ---title "Test CSV" \ ---file "$pwd/test.csv" \ ---file-content-type "${file_content_type}") - -file_doc_ntriples=$(get.sh \ +# Create an item document to hold the file +file_doc=$(create-item.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ - --accept 'application/n-triples' \ - "$file_doc") - -# echo "FILE NTRIPLES: $file_doc_ntriples" + -b "$END_USER_BASE_URL" \ + --title "Test CSV" \ + --container "$END_USER_BASE_URL" \ + --slug "$slug") -file=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p") +# Add the file to the document +add-file.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test CSV" \ + --file "$pwd/test.csv" \ + --content-type "${file_content_type}" \ + "$file_doc" + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$pwd/test.csv" | awk '{print $1}') +file="${END_USER_BASE_URL}uploads/${sha1sum}" echo "$file" # file URL used in other tests diff --git a/http-tests/imports/import-csv.sh b/http-tests/imports/import-csv.sh index 89d8458b9..85835aaaa 100755 --- a/http-tests/imports/import-csv.sh +++ b/http-tests/imports/import-csv.sh @@ -17,7 +17,16 @@ add-agent-to-group.sh \ --agent "$AGENT_URI" \ "${ADMIN_BASE_URL}acl/groups/writers/" -# create container +# create import item + +item=$(create-item.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "RDF import" \ + --container "$END_USER_BASE_URL") + +# create target container container=$(create-container.sh \ -f "$AGENT_CERT_FILE" \ @@ -35,7 +44,8 @@ import-csv.sh \ -b "$END_USER_BASE_URL" \ --title "Test" \ --query-file "$pwd/csv-test.rq" \ - --file "$pwd/test.csv" + --csv-file "$pwd/test.csv" \ + "$item" csv_id="test-item" csv_value="42" diff --git a/http-tests/imports/import-rdf-no-query.sh b/http-tests/imports/import-rdf-no-query.sh index d33158689..1b63a5bd1 100755 --- a/http-tests/imports/import-rdf-no-query.sh +++ b/http-tests/imports/import-rdf-no-query.sh @@ -17,9 +17,18 @@ add-agent-to-group.sh \ --agent "$AGENT_URI" \ "${ADMIN_BASE_URL}acl/groups/writers/" -# create item +# create import item item=$(create-item.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "RDF import" \ + --container "$END_USER_BASE_URL") + +# create target item + +graph=$(create-item.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ -b "$END_USER_BASE_URL" \ @@ -34,9 +43,10 @@ import-rdf.sh \ -p "$AGENT_CERT_PWD" \ -b "$END_USER_BASE_URL" \ --title "Test" \ - --file "$pwd/test.ttl" \ - --file-content-type "text/turtle" \ - --graph "$item" + --rdf-file "$pwd/test.ttl" \ + --content-type "text/turtle" \ + --graph "$graph" \ + "$item" # wait until the imported data appears (since import is executed asynchronously) @@ -51,7 +61,7 @@ do test_triples=$(curl -G -k -f -s -N \ -E "$AGENT_CERT_FILE":"$AGENT_CERT_PWD" \ -H "Accept: application/n-triples" \ - "$item" \ + "$graph" \ | grep " " || [[ $? == 1 ]]) sleep 1 ; diff --git a/http-tests/imports/import-rdf.sh b/http-tests/imports/import-rdf.sh index 2e4e75acd..20ed50376 100755 --- a/http-tests/imports/import-rdf.sh +++ b/http-tests/imports/import-rdf.sh @@ -17,7 +17,16 @@ add-agent-to-group.sh \ --agent "$AGENT_URI" \ "${ADMIN_BASE_URL}acl/groups/writers/" -# create container +# create import item + +item=$(create-item.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "RDF import" \ + --container "$END_USER_BASE_URL") + +# create target container container=$(create-container.sh \ -f "$AGENT_CERT_FILE" \ @@ -35,8 +44,9 @@ import-rdf.sh \ -b "$END_USER_BASE_URL" \ --title "Test" \ --query-file "$pwd/rdf-test.rq" \ - --file "$pwd/test.ttl" \ - --file-content-type "text/turtle" + --rdf-file "$pwd/test.ttl" \ + --content-type "text/turtle" \ + "$item" rdf_id="concept7367" rdf_value="http://vocabularies.unesco.org/thesaurus/concept7367" diff --git a/platform/datasets/admin.trig b/platform/datasets/admin.trig index 4756fa90b..07eac47c0 100644 --- a/platform/datasets/admin.trig +++ b/platform/datasets/admin.trig @@ -9,7 +9,6 @@ @prefix sioc: . @prefix foaf: . @prefix dct: . -@prefix spin: . <> { @@ -58,287 +57,6 @@ } -# CONTAINERS - - -{ - - a dh:Container ; - dct:title "Queries" ; - dct:description "SPARQL queries" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Queries" ; - spin:query . - - a sp:Select ; - dct:title "Select query resources" ; - sp:text """PREFIX sp: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a sp:Select } - UNION - { ?s a sp:Construct } - UNION - { ?s a sp:Describe } - UNION - { ?s a sp:Ask } - } - }""" . - -} - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Select instances" ; - foaf:primaryTopic . - - a sp:Select ; - dct:title "Select instances" ; - dct:description "Selects instances of type from the default graph" ; - sp:text """SELECT DISTINCT ?s -WHERE - { ?s a $type ; - ?p ?o - }""" . - -} - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Select instances in graphs" ; - foaf:primaryTopic . - - a sp:Select ; - dct:title "Select instances in graphs" ; - dct:description "Selects instances of type from named graphs" ; - sp:text """SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a $type ; - ?p ?o - } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Files" ; - dct:description "Uploaded files" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Files" ; - spin:query . - - a sp:Select ; - dct:title "Select file resources" ; - sp:text """PREFIX nfo: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a nfo:FileDataObject } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Imports" ; - dct:description "Data imports" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Imports" ; - spin:query . - - a sp:Select ; - dct:title "Select import resources" ; - sp:text """PREFIX ldh: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a ldh:CSVImport } - UNION - { ?s a ldh:RDFImport } - } - }""" . - -} - - -{ - - a dh:Item ; - dct:title "Geo" ; - dct:description "Geolocated resources" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Geo resources" ; - spin:query ; - ac:mode ac:MapMode . - - a sp:Select ; - dct:title "Select geo resources" ; - sp:text """PREFIX geo: -PREFIX dct: - -SELECT DISTINCT ?resource -WHERE -{ GRAPH ?graph - { ?resource geo:lat ?lat ; - geo:long ?long - OPTIONAL - { ?resource a ?type } - OPTIONAL - { ?resource dct:title ?title } - } -} -ORDER BY ?title""" . - -} - - -{ - - a dh:Item ; - dct:title "Latest" ; - dct:description "Latest resources" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Latest resources" ; - spin:query . - - a sp:Select ; - dct:title "Select latest" ; - sp:text """PREFIX dct: - -SELECT DISTINCT ?dated -WHERE -{ GRAPH ?graph - { ?dated dct:created ?created } -} -ORDER BY DESC(?created)""" . - -} - - -{ - - a dh:Container ; - dct:title "Charts" ; - dct:description "Saved charts" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Charts" ; - spin:query . - - a sp:Select ; - dct:title "Select chart resources" ; - sp:text """PREFIX ldh: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a ldh:GraphChart } - UNION - { ?s a ldh:ResultSetChart } - } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Apps" ; - dct:description "Linked Data applications" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Applications" ; - spin:query . - - a sp:Select ; - dct:title "Select application resources" ; - sp:text """PREFIX lapp: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a lapp:Application } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Services" ; - dct:description "SPARQL services" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Services" ; - spin:query . - - a sp:Select ; - dct:title "Select service resources" ; - sp:text """PREFIX sd: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a sd:Service } - }""" . - -} - ### ADMIN-SPECIFIC @prefix lacl: . diff --git a/platform/datasets/end-user.trig b/platform/datasets/end-user.trig index 2608b6a39..65c624610 100644 --- a/platform/datasets/end-user.trig +++ b/platform/datasets/end-user.trig @@ -9,7 +9,6 @@ @prefix sioc: . @prefix foaf: . @prefix dct: . -@prefix spin: . <> { @@ -58,287 +57,6 @@ } -# CONTAINERS - - -{ - - a dh:Container ; - dct:title "Queries" ; - dct:description "SPARQL queries" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Queries" ; - spin:query . - - a sp:Select ; - dct:title "Select query resources" ; - sp:text """PREFIX sp: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a sp:Select } - UNION - { ?s a sp:Construct } - UNION - { ?s a sp:Describe } - UNION - { ?s a sp:Ask } - } - }""" . - -} - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Select instances" ; - foaf:primaryTopic . - - a sp:Select ; - dct:title "Select instances" ; - dct:description "Selects instances of type from the default graph" ; - sp:text """SELECT DISTINCT ?s -WHERE - { ?s a $type ; - ?p ?o - }""" . - -} - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Select instances in graphs" ; - foaf:primaryTopic . - - a sp:Select ; - dct:title "Select instances in graphs" ; - dct:description "Selects instances of type from named graphs" ; - sp:text """SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a $type ; - ?p ?o - } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Files" ; - dct:description "Uploaded files" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Files" ; - spin:query . - - a sp:Select ; - dct:title "Select file resources" ; - sp:text """PREFIX nfo: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a nfo:FileDataObject } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Imports" ; - dct:description "Data imports" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Imports" ; - spin:query . - - a sp:Select ; - dct:title "Select import resources" ; - sp:text """PREFIX ldh: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a ldh:CSVImport } - UNION - { ?s a ldh:RDFImport } - } - }""" . - -} - - -{ - - a dh:Item ; - dct:title "Geo" ; - dct:description "Geolocated resources" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Geo resources" ; - spin:query ; - ac:mode ac:MapMode . - - a sp:Select ; - dct:title "Select geo resources" ; - sp:text """PREFIX geo: -PREFIX dct: - -SELECT DISTINCT ?resource -WHERE -{ GRAPH ?graph - { ?resource geo:lat ?lat ; - geo:long ?long - OPTIONAL - { ?resource a ?type } - OPTIONAL - { ?resource dct:title ?title } - } -} -ORDER BY ?title""" . - -} - - -{ - - a dh:Item ; - dct:title "Latest" ; - dct:description "Latest resources" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Latest resources" ; - spin:query . - - a sp:Select ; - dct:title "Select latest" ; - sp:text """PREFIX dct: - -SELECT DISTINCT ?dated -WHERE -{ GRAPH ?graph - { ?dated dct:created ?created } -} -ORDER BY DESC(?created)""" . - -} - - -{ - - a dh:Container ; - dct:title "Charts" ; - dct:description "Saved charts" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Charts" ; - spin:query . - - a sp:Select ; - dct:title "Select chart resources" ; - sp:text """PREFIX ldh: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a ldh:GraphChart } - UNION - { ?s a ldh:ResultSetChart } - } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Apps" ; - dct:description "Linked Data applications" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Applications" ; - spin:query . - - a sp:Select ; - dct:title "Select application resources" ; - sp:text """PREFIX lapp: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a lapp:Application } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Services" ; - dct:description "SPARQL services" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Services" ; - spin:query . - - a sp:Select ; - dct:title "Select service resources" ; - sp:text """PREFIX sd: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a sd:Service } - }""" . - -} - ### END-USER-SPECIFIC diff --git a/pom.xml b/pom.xml index 32e9246e3..6f440bfff 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ com.atomgraph linkeddatahub - 5.2.1 + 5.2.2-SNAPSHOT ${packaging.type} AtomGraph LinkedDataHub @@ -46,7 +46,7 @@ https://github.com/AtomGraph/LinkedDataHub scm:git:git://github.com/AtomGraph/LinkedDataHub.git scm:git:git@github.com:AtomGraph/LinkedDataHub.git - linkeddatahub-5.2.1 + linkeddatahub-2.1.1 diff --git a/src/main/java/com/atomgraph/linkeddatahub/Application.java b/src/main/java/com/atomgraph/linkeddatahub/Application.java index 0a5851110..ac7c6dba8 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/Application.java +++ b/src/main/java/com/atomgraph/linkeddatahub/Application.java @@ -16,7 +16,6 @@ */ package com.atomgraph.linkeddatahub; -import com.atomgraph.linkeddatahub.server.mapper.ResourceExistsExceptionMapper; import com.atomgraph.linkeddatahub.server.mapper.HttpHostConnectExceptionMapper; import com.atomgraph.linkeddatahub.server.mapper.InternalURLExceptionMapper; import com.atomgraph.linkeddatahub.server.mapper.MessagingExceptionMapper; @@ -1104,7 +1103,6 @@ protected void registerExceptionMappers() register(WebIDDelegationExceptionMapper.class); register(WebIDLoadingExceptionMapper.class); register(TokenExpiredExceptionMapper.class); - register(ResourceExistsExceptionMapper.class); register(QueryParseExceptionMapper.class); register(AuthenticationExceptionMapper.class); register(ForbiddenExceptionMapper.class); diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java index 716289439..cecd10dd6 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java @@ -18,8 +18,6 @@ import com.atomgraph.core.MediaTypes; import com.atomgraph.linkeddatahub.apps.model.Application; -import com.atomgraph.linkeddatahub.client.GraphStoreClient; -import com.atomgraph.linkeddatahub.imports.QueryLoader; import com.atomgraph.linkeddatahub.server.model.impl.DirectGraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.util.Skolemizer; @@ -44,8 +42,10 @@ import jakarta.ws.rs.core.Response.Status; import jakarta.ws.rs.core.UriBuilder; import jakarta.ws.rs.core.UriInfo; +import org.apache.jena.ontology.Ontology; import org.apache.jena.query.ParameterizedSparqlString; import org.apache.jena.query.Query; +import org.apache.jena.query.QueryFactory; import org.apache.jena.query.Syntax; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; @@ -69,10 +69,11 @@ public class Generate private final UriInfo uriInfo; private final MediaTypes mediaTypes; private final Application application; + private final Ontology ontology; private final Optional agentContext; private final com.atomgraph.linkeddatahub.Application system; private final ResourceContext resourceContext; - + /** * Constructs endpoint for container generation. * @@ -80,18 +81,21 @@ public class Generate * @param uriInfo current URI info * @param mediaTypes supported media types * @param application matched application + * @param ontology ontology of the current application * @param system system application * @param agentContext authenticated agent's context * @param resourceContext resource context for creating resources */ @Inject public Generate(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, - com.atomgraph.linkeddatahub.apps.model.Application application, Optional agentContext, + com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional agentContext, com.atomgraph.linkeddatahub.Application system, @Context ResourceContext resourceContext) { + if (ontology.isEmpty()) throw new InternalServerErrorException("Ontology is not specified"); this.uriInfo = uriInfo; this.mediaTypes = mediaTypes; this.application = application; + this.ontology = ontology.get(); this.agentContext = agentContext; this.system = system; this.resourceContext = resourceContext; @@ -129,10 +133,13 @@ public Response post(Model model) Resource queryRes = part.getPropertyResourceValue(SPIN.query); if (queryRes == null) throw new BadRequestException("Container query string (spin:query) not provided"); - GraphStoreClient gsc = GraphStoreClient.create(getSystem().getClient(), getSystem().getMediaTypes()). - delegation(getUriInfo().getBaseUri(), getAgentContext().orElse(null)); - QueryLoader queryLoader = new QueryLoader(URI.create(queryRes.getURI()), getApplication().getBase().getURI(), Syntax.syntaxARQ, gsc); - Query query = queryLoader.get(); + // Lookup query in ontology + Resource queryResource = getOntology().getOntModel().getResource(queryRes.getURI()); + if (queryResource == null || !queryResource.hasProperty(SP.text)) + throw new BadRequestException("Query resource not found in ontology: " + queryRes.getURI()); + + String queryString = queryResource.getProperty(SP.text).getString(); + Query query = QueryFactory.create(queryString, Syntax.syntaxARQ); if (!query.isSelectType()) throw new BadRequestException("Container query is not of SELECT type"); ParameterizedSparqlString pss = new ParameterizedSparqlString(query.toString()); @@ -253,6 +260,16 @@ public Application getApplication() return application; } + /** + * Returns the ontology. + * + * @return the ontology + */ + public Ontology getOntology() + { + return ontology; + } + /** * Returns the current URI info. * diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java index c996d5214..0390a989b 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java +++ b/src/main/java/com/atomgraph/linkeddatahub/server/filter/request/OntologyFilter.java @@ -54,12 +54,43 @@ public class OntologyFilter implements ContainerRequestFilter private static final Logger log = LoggerFactory.getLogger(OntologyFilter.class); + /** + * Paths that should not trigger ontology loading to avoid circular dependencies. + * + * When an ontology contains owl:imports pointing to URIs within these paths, + * loading the ontology would trigger HTTP requests to those URIs. If those requests + * are intercepted by this filter, it creates a circular dependency: + * + * 1. Request arrives for /uploads/xyz + * 2. OntologyFilter intercepts it and loads ontology + * 3. Ontology has owl:imports for /uploads/xyz + * 4. Jena FileManager makes HTTP request to /uploads/xyz + * 5. OntologyFilter intercepts it again → infinite loop/deadlock + * + * Additionally, uploaded files are binary/RDF content that don't require + * ontology context for their serving logic. + */ + private static final java.util.Set IGNORED_PATH_PREFIXES = java.util.Set.of( + "uploads/" + ); + @Inject com.atomgraph.linkeddatahub.Application system; @Override public void filter(ContainerRequestContext crc) throws IOException { + String path = crc.getUriInfo().getPath(); + + // Skip ontology loading for paths that may be referenced in owl:imports + // to prevent circular dependency deadlocks during ontology resolution + if (IGNORED_PATH_PREFIXES.stream().anyMatch(path::startsWith)) + { + if (log.isTraceEnabled()) log.trace("Skipping ontology loading for path: {}", path); + crc.setProperty(OWL.Ontology.getURI(), Optional.empty()); + return; + } + crc.setProperty(OWL.Ontology.getURI(), getOntology(crc)); } diff --git a/src/main/java/com/atomgraph/linkeddatahub/server/mapper/ResourceExistsExceptionMapper.java b/src/main/java/com/atomgraph/linkeddatahub/server/mapper/ResourceExistsExceptionMapper.java deleted file mode 100644 index 7cfe91c52..000000000 --- a/src/main/java/com/atomgraph/linkeddatahub/server/mapper/ResourceExistsExceptionMapper.java +++ /dev/null @@ -1,63 +0,0 @@ -/** - * Copyright 2019 Martynas Jusevičius - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package com.atomgraph.linkeddatahub.server.mapper; - -import com.atomgraph.core.MediaTypes; -import com.atomgraph.linkeddatahub.server.exception.ResourceExistsException; -import com.atomgraph.server.mapper.ExceptionMapperBase; -import jakarta.inject.Inject; -import jakarta.ws.rs.core.HttpHeaders; -import org.apache.jena.rdf.model.ResourceFactory; - -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.ext.ExceptionMapper; -import org.apache.jena.rdf.model.Resource; - -/** - * JAX-RS mapper for resource conflict exceptions. - * - * @author Martynas Jusevičius {@literal } - */ -@Deprecated -public class ResourceExistsExceptionMapper extends ExceptionMapperBase implements ExceptionMapper -{ - - /** - * Constructs mapper from media types. - * - * @param mediaTypes registry of readable/writeable media types - */ - @Inject - public ResourceExistsExceptionMapper(MediaTypes mediaTypes) - { - super(mediaTypes); - } - - @Override - public Response toResponse(ResourceExistsException ex) - { - Resource exception = toResource(ex, Response.Status.CONFLICT, - ResourceFactory.createResource("http://www.w3.org/2011/http-statusCodes#Conflict")); - ex.getModel().add(exception.getModel()); - - return getResponseBuilder(ex.getModel()). - status(Response.Status.CONFLICT). - header(HttpHeaders.LOCATION, ex.getURI()). - build(); - } - -} diff --git a/src/main/java/com/atomgraph/linkeddatahub/vocabulary/ACL.java b/src/main/java/com/atomgraph/linkeddatahub/vocabulary/ACL.java index 1a528e187..259d34a0e 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/vocabulary/ACL.java +++ b/src/main/java/com/atomgraph/linkeddatahub/vocabulary/ACL.java @@ -60,6 +60,9 @@ public static String getURI() /** acl:Append access mode */ public static final OntClass Append = m_model.createClass( NS + "Append" ); + /** acl:Control access mode */ + public static final OntClass Control = m_model.createClass( NS + "Control" ); + /** acl:AuthenticatedAgent class */ public static final OntClass AuthenticatedAgent = m_model.createClass( NS + "AuthenticatedAgent" ); diff --git a/src/main/resources/com/atomgraph/linkeddatahub/ldh.ttl b/src/main/resources/com/atomgraph/linkeddatahub/ldh.ttl index 589ae75b9..caa46a07f 100644 --- a/src/main/resources/com/atomgraph/linkeddatahub/ldh.ttl +++ b/src/main/resources/com/atomgraph/linkeddatahub/ldh.ttl @@ -493,6 +493,28 @@ ORDER BY ?title """ ; rdfs:isDefinedBy : . +:SelectInstances a sp:Select ; + rdfs:label "Select instances" ; + dct:description "Selects instances of type from the default graph" ; + sp:text """SELECT DISTINCT ?s +WHERE + { ?s a $type ; + ?p ?o + }""" ; + rdfs:isDefinedBy : . + +:SelectInstancesInGraphs a sp:Select ; + rdfs:label "Select instances in graphs" ; + dct:description "Selects instances of type from named graphs" ; + sp:text """SELECT DISTINCT ?s +WHERE + { GRAPH ?g + { ?s a $type ; + ?p ?o + } + }""" ; + rdfs:isDefinedBy : . + :ChildrenView a :View ; rdfs:label "Children view" ; spin:query :SelectChildren ; diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css b/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css index 66155e480..03abdc375 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css @@ -37,6 +37,8 @@ button.btn.create-action { height: 30px; } a.external::after { content: "⤴"; padding-left: 0.2em; } a.btn.create-action { height: 20px; } .create-resource .btn.create-action { margin-top: 1em; } +.btn-class { background: inherit; } +.btn-class span { color: black; } .btn-group.open .btn.dropdown-toggle.create-action { background-image: url('../icons/ic_note_add_black_24px.svg'); } li button.btn-edit-constructors, li button.btn-add-data, li button.btn-add-ontology, li button.btn-generate-containers { text-align: left; width: 100%; background-color: inherit; } .btn-container { background-image: url('../icons/folder.svg'); } @@ -48,8 +50,6 @@ li button.btn-edit-constructors, li button.btn-add-data, li button.btn-add-ontol .btn-import { background-image: url('../icons/ic_transform_black_24px.svg'); } .btn-chart { background-image: url('../icons/ic_show_chart_black_24px.svg'); } .btn-view { background-image: url('../icons/ic_view_list_black_24px.svg'); } -.btn-latest { background-image: url('../icons/ic_new_releases_black_24px.svg'); } -.btn-geo { background-image: url('../icons/ic_location_on_black_24px.svg'); } .btn-logo { background-position: left; background-repeat: no-repeat; padding-left: 32px; } .dropdown-menu .btn-logo { background-position: 12px center; padding-left: 40px; } .btn.btn-toggle-content { font-size: 0; color: transparent; background-image: url('../icons/baseline-expand_less-24px.svg'); background-position: center center; background-repeat: no-repeat; width: 48px; } @@ -82,17 +82,20 @@ li button.btn-edit-constructors, li button.btn-add-data, li button.btn-add-ontol .dropdown-menu > li > a.btn-list { background-image: url('../icons/view_list_black_24dp.svg'); background-position: 12px center; background-repeat: no-repeat; padding: 5px 5px 5px 40px; } .dropdown-menu > li > a.btn-table { background-image: url('../icons/ic_border_all_black_24px.svg'); background-position: 12px center; background-repeat: no-repeat; padding: 5px 5px 5px 40px; } .dropdown-menu > li > a.btn-grid { background-image: url('../icons/ic_grid_on_black_24px.svg'); background-position: 12px center; background-repeat: no-repeat; padding: 5px 5px 5px 40px; } -#doc-tree { display: none; width: 15%; position: fixed; left: 0; top: 106px; height: calc(100% - 106px); } +#left-sidebar { display: none; width: 15%; position: fixed; left: 0; top: 106px; height: calc(100% - 106px); } @media (max-width: 979px) { body { padding-top: 0; } - #doc-tree { display: block; width: auto; position: unset; top: unset; height: auto; } - #doc-tree .nav { max-height: 20em; overflow: auto; } + #left-sidebar { display: block; width: auto; position: unset; top: unset; height: auto; } + #left-sidebar .nav { max-height: 20em; overflow: auto; } } -#doc-tree .nav-list > li > a { margin-left: 0; margin-right: 0; } -#doc-tree .nav-list > li > a.btn-container, #doc-tree .nav-list > li > a.btn-app, #doc-tree .nav-list > li > a.btn-chart, #doc-tree .nav-list > li > a.btn-file, #doc-tree .nav-list > li > a.btn-geo, #doc-tree .nav-list > li > a.btn-import, #doc-tree .nav-list > li > a.btn-latest, #doc-tree .nav-list > li > a.btn-query, #doc-tree .nav-list > li > a.btn-service { padding-left: 24px; } -#doc-tree li { max-height: 20em; overflow: auto; } -#doc-tree li > a { display: inline-block; } +#left-sidebar .nav-list > li > a.btn-container { padding-left: 24px; } +#left-sidebar .nav-list > li > a { margin-left: 0; margin-right: 0; } +#left-sidebar ul { max-height: 22em; overflow: auto; } +#left-sidebar li > a { display: inline-block; } +#left-sidebar .btn-latest { background-image: url('../icons/ic_new_releases_black_24px.svg'); background-color: inherit; } +#left-sidebar .btn-geo { background-image: url('../icons/ic_location_on_black_24px.svg'); background-color: inherit; } + .btn.btn-expand-tree { height: 24px; width: 24px; background-image: url('../icons/expand_more_black_24dp.svg'); } .btn.btn-expand-tree:hover, .btn.btn-expand-tree:focus { background-position: 0 0; } .btn.btn-expanded-tree { height: 24px; width: 24px; background-image: url('../icons/chevron_right_black_24dp.svg'); } diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl index 1060d7ba2..745d5aab7 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl @@ -151,28 +151,31 @@ exclude-result-prefixes="#all" - - - - - - - - - - + - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + @@ -454,7 +457,71 @@ exclude-result-prefixes="#all" - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl index 5541a334b..53307ebbc 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl @@ -286,11 +286,16 @@ exclude-result-prefixes="#all" - - - - - + + + + + + + + + + - - - - + + - - - - - - - @@ -816,8 +813,7 @@ exclude-result-prefixes="#all" - - + - @@ -867,11 +862,10 @@ exclude-result-prefixes="#all" - - - + + - + @@ -880,7 +874,10 @@ exclude-result-prefixes="#all" - + + + + @@ -896,8 +893,7 @@ exclude-result-prefixes="#all" - - + - ldh:block-object-metadata-response + ldh:block-object-metadata-response $block/@about: + + @@ -302,18 +304,29 @@ exclude-result-prefixes="#all" + - + + + + + + + + + + - - + + + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/query.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/query.xsl index 550630534..fc2c10194 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/query.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/query.xsl @@ -280,6 +280,12 @@ exclude-result-prefixes="#all" + + + + + + @@ -325,7 +331,7 @@ exclude-result-prefixes="#all" - + @@ -536,7 +542,7 @@ exclude-result-prefixes="#all" - + @@ -581,11 +587,10 @@ exclude-result-prefixes="#all" - - - + + - + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/view.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/view.xsl index 3ebcc4766..e5d3cd2db 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/view.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/view.xsl @@ -69,11 +69,15 @@ exclude-result-prefixes="#all" - - - - - + + + + + + + + + @@ -86,9 +90,13 @@ exclude-result-prefixes="#all" 'container': $container, 'mode': $mode, 'refresh-content': $refresh-content, - 'query-uri': $query-uri + 'query-uri': $query-uri, + 'cache': ixsl:get(ixsl:get(ixsl:window(), 'LinkedDataHub.contents'), '`' || $block/@about || '`') }"/> - + + + + - + ldh:view-results-thunk + + + + ldh:load-object-metadata + @@ -154,8 +167,12 @@ exclude-result-prefixes="#all" - + + + + + @@ -177,7 +194,6 @@ exclude-result-prefixes="#all" - - + + ldh:set-object-metadata + + + + @@ -208,7 +229,7 @@ exclude-result-prefixes="#all" - + @@ -331,6 +352,7 @@ exclude-result-prefixes="#all" + @@ -365,8 +387,13 @@ exclude-result-prefixes="#all" map { 'request': $request, 'container': ., - 'count-var-name': $count-var-name + 'count-var-name': $count-var-name, + 'cache': $cache }"/> + + + + - @@ -495,11 +521,8 @@ exclude-result-prefixes="#all" + - - - - @@ -522,7 +545,6 @@ exclude-result-prefixes="#all" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - @@ -544,7 +606,7 @@ exclude-result-prefixes="#all" - + @@ -575,13 +637,27 @@ exclude-result-prefixes="#all" + + + + + + + + + + + + + + - - + + @@ -591,7 +667,7 @@ exclude-result-prefixes="#all" - + @@ -681,7 +757,6 @@ exclude-result-prefixes="#all" - @@ -694,13 +769,13 @@ exclude-result-prefixes="#all" + - - + $initial-load: @@ -769,6 +844,7 @@ exclude-result-prefixes="#all" + @@ -785,8 +861,10 @@ exclude-result-prefixes="#all" 'container': id($order-by-container-id, ixsl:page()), 'id': $id, 'predicate': $predicate, - 'order-by-predicate': $order-by-predicate + 'order-by-predicate': $order-by-predicate, + 'cache': $cache }"/> + - - - - - @@ -809,7 +882,7 @@ exclude-result-prefixes="#all" - + @@ -1113,46 +1186,114 @@ exclude-result-prefixes="#all" - - + + BLOCK DELEGATION: view-mode handler triggered + - - - + BLOCK DELEGATION: block URI = + + BLOCK DELEGATION: cache found: + + + + + + + + + + BLOCK DELEGATION: pager previous triggered + + + BLOCK DELEGATION: block URI = + + BLOCK DELEGATION: cache found: + + + + + + + + + + BLOCK DELEGATION: pager next triggered + + + BLOCK DELEGATION: block URI = + + BLOCK DELEGATION: cache found: + + + + + + + + + + BLOCK DELEGATION: container-order triggered + + + BLOCK DELEGATION: block URI = + + BLOCK DELEGATION: cache found: + + + + + + + + + + + BLOCK DELEGATION: btn-order-by triggered + + + BLOCK DELEGATION: block URI = + + BLOCK DELEGATION: cache found: + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + - + + - - - - - - - + + + + + + + + - - - - - - - - - - - - - - + + + + - - - - + + + + - + - + + - - - - - - - + + + + + + + + + - - - - - - - - - - - - - - + + + + + - - - - + + + + - + - + + - - - - - - - + + + + + + + + + + - - - - - - - - - - - - - - + + + + - - - + + + - + - + + + + + - - - - - - - - - + + + + + + - - - - - - - + + + + - - - - - - - - - + + + + + + + + - + - + - + + - - - - + - + @@ -1371,8 +1486,8 @@ exclude-result-prefixes="#all" - - + + @@ -1395,9 +1510,19 @@ exclude-result-prefixes="#all" - - - + + + + + + + + + + + + + @@ -1459,7 +1584,7 @@ exclude-result-prefixes="#all" - + @@ -1469,12 +1594,11 @@ exclude-result-prefixes="#all" - - - - - - + + + + + @@ -1487,20 +1611,21 @@ exclude-result-prefixes="#all" - + - + - + - + + - + - - - - - - + + + + + @@ -1535,20 +1659,21 @@ exclude-result-prefixes="#all" - + - + - + - + + + ldh:view-query-response @@ -1605,14 +1731,11 @@ exclude-result-prefixes="#all" - + - - - - - - + + + @@ -1630,23 +1753,25 @@ exclude-result-prefixes="#all" - - - - - + + + + + + + + + + + + + + + + + - - - - - - - - - - - + @@ -1675,7 +1800,6 @@ exclude-result-prefixes="#all" - + - - - - - + + ldh:render-view + @@ -1737,7 +1860,6 @@ exclude-result-prefixes="#all" - @@ -1750,6 +1872,7 @@ exclude-result-prefixes="#all" + @@ -1773,11 +1896,16 @@ exclude-result-prefixes="#all" - + + + + + + - + @@ -1788,7 +1916,7 @@ exclude-result-prefixes="#all" ldh:facet-filter-response - + @@ -1804,7 +1932,7 @@ exclude-result-prefixes="#all" - + @@ -1816,7 +1944,7 @@ exclude-result-prefixes="#all" ldh:parallax-response - + @@ -1872,7 +2000,7 @@ exclude-result-prefixes="#all" ldh:parallax-property-response - + @@ -1932,7 +2060,7 @@ exclude-result-prefixes="#all" ldh:facet-value-response - + @@ -2078,6 +2206,9 @@ exclude-result-prefixes="#all" ldh:result-count-response + + + @@ -2101,7 +2232,7 @@ exclude-result-prefixes="#all" - + @@ -2116,7 +2247,7 @@ exclude-result-prefixes="#all" ldh:order-by-response - + @@ -2130,7 +2261,7 @@ exclude-result-prefixes="#all" - + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/functions.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/functions.xsl index 5731d47c9..27531e32f 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/functions.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/functions.xsl @@ -93,7 +93,7 @@ exclude-result-prefixes="#all" - + @@ -494,7 +494,7 @@ exclude-result-prefixes="#all" - + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/map.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/map.xsl index 0fd72e1b5..8c2a46433 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/map.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/map.xsl @@ -122,11 +122,11 @@ exclude-result-prefixes="#all" - - - + + + @@ -142,7 +142,8 @@ exclude-result-prefixes="#all" 'request': $request, 'container': $container, 'container-id': $container-id, - 'block-uri': $block-uri + 'map': $map, + 'initial-load': $initial-load }"/> - - + + - - - + @@ -331,17 +330,21 @@ exclude-result-prefixes="#all" - - + + + + + - - + + + @@ -429,6 +432,9 @@ exclude-result-prefixes="#all" + + + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/translations.rdf b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/translations.rdf index a7dfaff31..df130e8df 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/translations.rdf +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/translations.rdf @@ -104,6 +104,10 @@ Geo Geo + + Other + Otro + Files Archivos diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/client.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/client.xsl index 052116f39..3f0ec1505 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/client.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/client.xsl @@ -286,7 +286,7 @@ WHERE - + @@ -323,15 +323,15 @@ WHERE - - + + - + - - - + + + @@ -481,22 +481,6 @@ WHERE - - - - - -
  • - - - - / - -
  • -
    - @@ -607,9 +591,19 @@ WHERE + + + + + + + + + - - + + + @@ -751,7 +745,7 @@ WHERE - + Application change. Base URI: @@ -842,7 +836,8 @@ WHERE - + + @@ -853,24 +848,22 @@ WHERE
    - - - - - - - - - - - + + - + - +