diff --git a/bin/imports/create-query.sh b/bin/add-construct.sh similarity index 72% rename from bin/imports/create-query.sh rename to bin/add-construct.sh index f9d793498..5549b25ab 100755 --- a/bin/imports/create-query.sh +++ b/bin/add-construct.sh @@ -5,7 +5,7 @@ print_usage() { printf "Creates a SPARQL CONSTRUCT query.\n" printf "\n" - printf "Usage: %s options\n" "$0" + printf "Usage: %s options TARGET_URI\n" "$0" printf "\n" printf "Options:\n" printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" @@ -13,20 +13,16 @@ print_usage() printf " -b, --base BASE_URI Base URI of the application\n" printf " --proxy PROXY_URL The host this request will be proxied through (optional)\n" printf "\n" - printf " --title TITLE Title of the chart\n" - printf " --description DESCRIPTION Description of the chart (optional)\n" - printf " --slug STRING String that will be used as URI path segment (optional)\n" + printf " --title TITLE Title of the query\n" + printf " --description DESCRIPTION Description of the query (optional)\n" + printf " --uri URI URI of the query (optional)\n" printf "\n" printf " --query-file ABS_PATH Absolute path to the text file with the SPARQL query string\n" + printf " --service SERVICE_URI URI of the SPARQL service specific to this query (optional)\n" } hash turtle 2>/dev/null || { echo >&2 "turtle not on \$PATH. Aborting."; exit 1; } -urlencode() { - python -c 'import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1], sys.argv[2]))' \ - "$1" "$urlencode_safe" -} - args=() while [[ $# -gt 0 ]] do @@ -63,8 +59,8 @@ do shift # past argument shift # past value ;; - --slug) - slug="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -73,6 +69,11 @@ do shift # past argument shift # past value ;; + --service) + service="$2" + shift # past argument + shift # past value + ;; *) # unknown arguments args+=("$1") # save it in an array for later shift # past argument @@ -81,6 +82,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -102,43 +105,38 @@ if [ -z "$query_file" ] ; then exit 1 fi -if [ -z "$slug" ] ; then - slug=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase -fi -encoded_slug=$(urlencode "$slug") - -container="${base}queries/" query=$(<"$query_file") # read query string from file -target="${container}${encoded_slug}/" - args+=("-f") args+=("$cert_pem_file") args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("$target") if [ -n "$proxy" ]; then args+=("--proxy") args+=("$proxy") fi +if [ -n "$uri" ] ; then + subject="<${uri}>" +else + subject="_:subject" +fi + turtle+="@prefix ldh: .\n" -turtle+="@prefix dh: .\n" turtle+="@prefix dct: .\n" -turtle+="@prefix foaf: .\n" turtle+="@prefix sp: .\n" -turtle+="_:query a sp:Construct .\n" -turtle+="_:query dct:title \"${title}\" .\n" -turtle+="_:query sp:text \"\"\"${query}\"\"\" .\n" -turtle+="<${target}> a dh:Item .\n" -turtle+="<${target}> foaf:primaryTopic _:query .\n" -turtle+="<${target}> dct:title \"${title}\" .\n" +turtle+="${subject} a sp:Construct .\n" +turtle+="${subject} dct:title \"${title}\" .\n" +turtle+="${subject} sp:text \"\"\"${query}\"\"\" .\n" +if [ -n "$service" ] ; then + turtle+="${subject} ldh:service <${service}> .\n" +fi if [ -n "$description" ] ; then - turtle+="_:query dct:description \"${description}\" .\n" + turtle+="${subject} dct:description \"${description}\" .\n" fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/imports/create-file.sh b/bin/add-file.sh similarity index 61% rename from bin/imports/create-file.sh rename to bin/add-file.sh index 36413d34c..ecc6104b1 100755 --- a/bin/imports/create-file.sh +++ b/bin/add-file.sh @@ -5,7 +5,7 @@ print_usage() { printf "Uploads a file.\n" printf "\n" - printf "Usage: %s options\n" "$0" + printf "Usage: %s options TARGET_URI\n" "$0" printf "\n" printf "Options:\n" printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" @@ -14,22 +14,14 @@ print_usage() printf " --proxy PROXY_URL The host this request will be proxied through (optional)\n" printf "\n" printf " --title TITLE Title of the file\n" - printf " --container CONTAINER_URI URI of the parent container (optional)\n" printf " --description DESCRIPTION Description of the file (optional)\n" - printf " --slug STRING String that will be used as URI path segment (optional)\n" printf "\n" printf " --file ABS_PATH Absolute path to the file\n" printf " --file-content-type MEDIA_TYPE Media type of the file (optional)\n" - #printf " --file-slug STRING String that will be used as the file's URI path segment (optional)\n" } hash curl 2>/dev/null || { echo >&2 "curl not on \$PATH. Aborting."; exit 1; } -urlencode() { - python -c 'import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1], sys.argv[2]))' \ - "$1" "$urlencode_safe" -} - args=() while [[ $# -gt 0 ]] do @@ -66,16 +58,6 @@ do shift # past argument shift # past value ;; - --slug) - slug="$2" - shift # past argument - shift # past value - ;; - --container) - container="$2" - shift # past argument - shift # past value - ;; --file) file="$2" shift # past argument @@ -86,11 +68,6 @@ do shift # past argument shift # past value ;; - --file-slug) - file_slug="$2" - shift # past argument - shift # past value - ;; *) # unknown arguments args+=("$1") # save it in an array for later shift # past argument @@ -99,6 +76,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -124,23 +103,6 @@ if [ -z "$file_content_type" ] ; then file_content_type=$(file -b --mime-type "$file") fi -if [ -z "$slug" ] ; then - slug=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase -fi -encoded_slug=$(urlencode "$slug") - -# need to create explicit file URI since that is what this script returns (not the graph URI) - -#if [ -z "$file_slug" ] ; then -# file_slug=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase -#fi - -if [ -z "$container" ] ; then - container="${base}files/" -fi - -target="${container}${encoded_slug}/" - # https://stackoverflow.com/questions/19116016/what-is-the-right-way-to-post-multipart-form-data-using-curl rdf_post+="-F \"rdf=\"\n" @@ -151,18 +113,8 @@ rdf_post+="-F \"pu=http://purl.org/dc/terms/title\"\n" rdf_post+="-F \"ol=${title}\"\n" rdf_post+="-F \"pu=http://www.w3.org/1999/02/22-rdf-syntax-ns#type\"\n" rdf_post+="-F \"ou=http://www.semanticdesktop.org/ontologies/2007/03/22/nfo#FileDataObject\"\n" -rdf_post+="-F \"su=${target}\"\n" -rdf_post+="-F \"pu=http://purl.org/dc/terms/title\"\n" -rdf_post+="-F \"ol=${title}\"\n" -rdf_post+="-F \"pu=http://www.w3.org/1999/02/22-rdf-syntax-ns#type\"\n" -rdf_post+="-F \"ou=https://www.w3.org/ns/ldt/document-hierarchy#Item\"\n" -rdf_post+="-F \"pu=http://xmlns.com/foaf/0.1/primaryTopic\"\n" -rdf_post+="-F \"ob=file\"\n" -rdf_post+="-F \"pu=http://rdfs.org/sioc/ns#has_container\"\n" -rdf_post+="-F \"ou=${container}\"\n" if [ -n "$description" ] ; then - rdf_post+="-F \"sb=file\"\n" rdf_post+="-F \"pu=http://purl.org/dc/terms/description\"\n" rdf_post+="-F \"ol=${description}\"\n" fi @@ -176,14 +128,5 @@ if [ -n "$proxy" ]; then target="${target/$target_host/$proxy_host}" fi -# POST RDF/POST multipart form and capture the effective URL -effective_url=$(echo -e "$rdf_post" | curl -w '%{url_effective}' -f -v -s -k -X PUT -H "Accept: text/turtle" -E "$cert_pem_file":"$cert_password" -o /dev/null --config - "$target") - -# If using proxy, rewrite the effective URL back to original hostname -if [ -n "$proxy" ]; then - # Replace proxy host with original host in the effective URL - rewritten_url="${effective_url/$proxy_host/$target_host}" - echo "$rewritten_url" -else - echo "$effective_url" -fi +# POST RDF/POST multipart form +echo -e "$rdf_post" | curl -f -v -s -k -X POST -H "Accept: text/turtle" -E "$cert_pem_file":"$cert_password" -o /dev/null --config - "$target" diff --git a/bin/imports/create-csv-import.sh b/bin/imports/add-csv-import.sh similarity index 73% rename from bin/imports/create-csv-import.sh rename to bin/imports/add-csv-import.sh index f7edac6cd..5b01392b1 100755 --- a/bin/imports/create-csv-import.sh +++ b/bin/imports/add-csv-import.sh @@ -5,7 +5,7 @@ print_usage() { printf "Transforms CSV data into RDF using a SPARQL query and imports it.\n" printf "\n" - printf "Usage: %s options\n" "$0" + printf "Usage: %s options TARGET_URI\n" "$0" printf "\n" printf "Options:\n" printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" @@ -13,9 +13,9 @@ print_usage() printf " -b, --base BASE_URI Base URI of the application\n" printf " --proxy PROXY_URL The host this request will be proxied through (optional)\n" printf "\n" - printf " --title TITLE Title of the container\n" - printf " --description DESCRIPTION Description of the container (optional)\n" - printf " --slug STRING String that will be used as URI path segment (optional)\n" + printf " --title TITLE Title of the import\n" + printf " --description DESCRIPTION Description of the import (optional)\n" + printf " --uri URI URI of the import resource (optional)\n" printf "\n" printf " --query QUERY_URI URI of the CONSTRUCT mapping query\n" printf " --file FILE_URI URI of the CSV file\n" @@ -24,11 +24,6 @@ print_usage() hash turtle 2>/dev/null || { echo >&2 "turtle not on \$PATH. Aborting."; exit 1; } -urlencode() { - python -c 'import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1], sys.argv[2]))' \ - "$1" "$urlencode_safe" -} - args=() while [[ $# -gt 0 ]] do @@ -65,8 +60,8 @@ do shift # past argument shift # past value ;; - --slug) - slug="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -93,6 +88,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -122,14 +119,11 @@ if [ -z "$delimiter" ] ; then exit 1 fi -if [ -z "$slug" ] ; then - slug=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase +if [ -n "$uri" ] ; then + subject="<${uri}>" +else + subject="_:import" fi -encoded_slug=$(urlencode "$slug") - -container="${base}imports/" - -target="${container}${encoded_slug}/" args+=("-f") args+=("$cert_pem_file") @@ -137,29 +131,23 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("$target") if [ -n "$proxy" ]; then args+=("--proxy") args+=("$proxy") fi turtle+="@prefix ldh: .\n" -turtle+="@prefix dh: .\n" turtle+="@prefix dct: .\n" -turtle+="@prefix foaf: .\n" turtle+="@prefix spin: .\n" -turtle+="_:import a ldh:CSVImport .\n" -turtle+="_:import dct:title \"${title}\" .\n" -turtle+="_:import spin:query <${query}> .\n" -turtle+="_:import ldh:file <${file}> .\n" -turtle+="_:import ldh:delimiter \"${delimiter}\" .\n" -turtle+="<${target}> a dh:Item .\n" -turtle+="<${target}> foaf:primaryTopic _:import .\n" -turtle+="<${target}> dct:title \"${title}\" .\n" +turtle+="${subject} a ldh:CSVImport .\n" +turtle+="${subject} dct:title \"${title}\" .\n" +turtle+="${subject} spin:query <${query}> .\n" +turtle+="${subject} ldh:file <${file}> .\n" +turtle+="${subject} ldh:delimiter \"${delimiter}\" .\n" if [ -n "$description" ] ; then - turtle+="_:import dct:description \"${description}\" .\n" + turtle+="${subject} dct:description \"${description}\" .\n" fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/imports/create-rdf-import.sh b/bin/imports/add-rdf-import.sh similarity index 73% rename from bin/imports/create-rdf-import.sh rename to bin/imports/add-rdf-import.sh index 8d76b5e48..c47e68011 100755 --- a/bin/imports/create-rdf-import.sh +++ b/bin/imports/add-rdf-import.sh @@ -5,7 +5,7 @@ print_usage() { printf "Imports RDF data.\n" printf "\n" - printf "Usage: %s options\n" "$0" + printf "Usage: %s options TARGET_URI\n" "$0" printf "\n" printf "Options:\n" printf " -f, --cert-pem-file CERT_FILE .pem file with the WebID certificate of the agent\n" @@ -13,9 +13,9 @@ print_usage() printf " -b, --base BASE_URI Base URI of the application\n" printf " --proxy PROXY_URL The host this request will be proxied through (optional)\n" printf "\n" - printf " --title TITLE Title of the container\n" - printf " --description DESCRIPTION Description of the container (optional)\n" - printf " --slug STRING String that will be used as URI path segment (optional)\n" + printf " --title TITLE Title of the import\n" + printf " --description DESCRIPTION Description of the import (optional)\n" + printf " --uri URI URI of the import resource (optional)\n" printf "\n" printf " --query QUERY_URI URI of the CONSTRUCT mapping query (optional)\n" printf " --graph GRAPH_URI URI of the graph (optional)\n" @@ -24,11 +24,6 @@ print_usage() hash turtle 2>/dev/null || { echo >&2 "turtle not on \$PATH. Aborting."; exit 1; } -urlencode() { - python -c 'import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1], sys.argv[2]))' \ - "$1" "$urlencode_safe" -} - args=() while [[ $# -gt 0 ]] do @@ -65,8 +60,8 @@ do shift # past argument shift # past value ;; - --slug) - slug="$2" + --uri) + uri="$2" shift # past argument shift # past value ;; @@ -93,6 +88,8 @@ do done set -- "${args[@]}" # restore args +target="$1" + if [ -z "$cert_pem_file" ] ; then print_usage exit 1 @@ -114,14 +111,11 @@ if [ -z "$file" ] ; then exit 1 fi -if [ -z "$slug" ] ; then - slug=$(uuidgen | tr '[:upper:]' '[:lower:]') # lowercase +if [ -n "$uri" ] ; then + subject="<${uri}>" +else + subject="_:import" fi -encoded_slug=$(urlencode "$slug") - -container="${base}imports/" - -target="${container}${encoded_slug}/" args+=("-f") args+=("$cert_pem_file") @@ -129,34 +123,28 @@ args+=("-p") args+=("$cert_password") args+=("-t") args+=("text/turtle") # content type -args+=("$target") if [ -n "$proxy" ]; then args+=("--proxy") args+=("$proxy") fi turtle+="@prefix ldh: .\n" -turtle+="@prefix dh: .\n" turtle+="@prefix dct: .\n" -turtle+="@prefix foaf: .\n" -turtle+="_:import a ldh:RDFImport .\n" -turtle+="_:import dct:title \"${title}\" .\n" -turtle+="_:import ldh:file <${file}> .\n" -turtle+="<${target}> a dh:Item .\n" -turtle+="<${target}> foaf:primaryTopic _:import .\n" -turtle+="<${target}> dct:title \"${title}\" .\n" +turtle+="${subject} a ldh:RDFImport .\n" +turtle+="${subject} dct:title \"${title}\" .\n" +turtle+="${subject} ldh:file <${file}> .\n" if [ -n "$graph" ] ; then turtle+="@prefix sd: .\n" - turtle+="_:import sd:name <${graph}> .\n" + turtle+="${subject} sd:name <${graph}> .\n" fi if [ -n "$query" ] ; then turtle+="@prefix spin: .\n" - turtle+="_:import spin:query <${query}> .\n" + turtle+="${subject} spin:query <${query}> .\n" fi if [ -n "$description" ] ; then - turtle+="_:import dct:description \"${description}\" .\n" + turtle+="${subject} dct:description \"${description}\" .\n" fi # submit Turtle doc to the server -echo -e "$turtle" | turtle --base="$target" | put.sh "${args[@]}" \ No newline at end of file +echo -e "$turtle" | turtle --base="$target" | post.sh "${args[@]}" \ No newline at end of file diff --git a/bin/imports/import-csv.sh b/bin/imports/import-csv.sh index d7c55dd38..5ebde7b7b 100755 --- a/bin/imports/import-csv.sh +++ b/bin/imports/import-csv.sh @@ -139,55 +139,73 @@ if [ -z "$proxy" ] ; then proxy="$base" fi -query_doc=$(create-query.sh \ +# Generate query ID for fragment identifier +query_id=$(uuidgen | tr '[:upper:]' '[:lower:]') + +# Create the imports/ container first (ignore error if it already exists) +create-container.sh \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ - --title "$title" \ - --slug "$query_doc_slug" \ - --query-file "$query_file" -) + --title "Imports" \ + --parent "$base" \ + --slug "imports" 2>/dev/null || true -query_ntriples=$(get.sh \ +# Create the import item document +import_doc=$(create-item.sh \ + -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ - --accept 'application/n-triples' \ - "$query_doc" + --title "$title" \ + --container "${base}imports/" \ + --slug "$query_doc_slug" ) -query=$(echo "$query_ntriples" | sed -rn "s/<${query_doc//\//\\/}> <(.*)> \./\1/p" | head -1) - -file_doc=$(create-file.sh \ +# Add the CONSTRUCT query to the item using fragment identifier +# TO-DO: fix ambigous add-construct.sh script names +"$(dirname "$0")/../add-construct.sh" \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$file_doc_slug" \ - --file-slug "$file_slug" \ - --file "$file" \ - --file-content-type "text/csv" -) + --uri "#${query_id}" \ + --query-file "$query_file" \ + "$import_doc" + +# The query URI is the document with fragment +query="${import_doc}#${query_id}" -file_ntriples=$(get.sh \ +# Add the file to the import item +add-file.sh \ + -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ - --accept 'application/n-triples' \ - "$file_doc") + --title "$title" \ + --file "$file" \ + --file-content-type "text/csv" \ + "$import_doc" + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$file" | awk '{print $1}') +file_uri="${base}uploads/${sha1sum}" -file=$(echo "$file_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p" | head -1) +# Generate import ID for fragment identifier +import_id=$(uuidgen | tr '[:upper:]' '[:lower:]') -create-csv-import.sh \ +# Add the import metadata to the import item using fragment identifier +add-csv-import.sh \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$import_slug" \ + --uri "#${import_id}" \ --query "$query" \ - --file "$file" \ - --delimiter "$delimiter" + --file "$file_uri" \ + --delimiter "$delimiter" \ + "$import_doc" \ No newline at end of file diff --git a/bin/imports/import-rdf.sh b/bin/imports/import-rdf.sh index 086d4d303..cdf398ac5 100755 --- a/bin/imports/import-rdf.sh +++ b/bin/imports/import-rdf.sh @@ -142,68 +142,86 @@ if [ -z "$proxy" ] ; then proxy="$base" fi +# Create the imports/ container first +create-container.sh \ + -b "$base" \ + -f "$cert_pem_file" \ + -p "$cert_password" \ + --proxy "$proxy" \ + --title "Imports" \ + --parent "$base" \ + --slug "imports" + +# Create the import item document +import_doc=$(create-item.sh \ + -b "$base" \ + -f "$cert_pem_file" \ + -p "$cert_password" \ + --proxy "$proxy" \ + --title "$title" \ + --container "${base}imports/" \ + --slug "$query_doc_slug" +) + if [ -n "$query_file" ] ; then - query_doc=$(create-query.sh \ + # Generate query ID for fragment identifier + query_id=$(uuidgen | tr '[:upper:]' '[:lower:]') + + # Add the CONSTRUCT query to the item using fragment identifier + # TO-DO: fix ambigous add-construct.sh script names + "$(dirname "$0")/../add-construct.sh" \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$query_doc_slug" \ - --query-file "$query_file" - ) - - query_ntriples=$(get.sh \ - -f "$cert_pem_file" \ - -p "$cert_password" \ - --proxy "$proxy" \ - --accept 'application/n-triples' \ - "$query_doc" - ) + --uri "#${query_id}" \ + --query-file "$query_file" \ + "$import_doc" - query=$(echo "$query_ntriples" | sed -rn "s/<${query_doc//\//\\/}> <(.*)> \./\1/p" | head -1) + # The query URI is the document with fragment + query="${import_doc}#${query_id}" fi -file_doc=$(create-file.sh \ +# Add the file to the import item +add-file.sh \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$file_doc_slug" \ - --file-slug "$file_slug" \ --file "$file" \ - --file-content-type "$file_content_type" -) + --file-content-type "$file_content_type" \ + "$import_doc" -file_ntriples=$(get.sh \ - -f "$cert_pem_file" \ - -p "$cert_password" \ - --proxy "$proxy" \ - --accept 'application/n-triples' \ - "$file_doc" -) +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$file" | awk '{print $1}') +file_uri="${base}uploads/${sha1sum}" -file=$(echo "$file_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p" | head -1) +# Generate import ID for fragment identifier +import_id=$(uuidgen | tr '[:upper:]' '[:lower:]') +# Add the import metadata to the import item using fragment identifier if [ -n "$query" ] ; then - create-rdf-import.sh \ + add-rdf-import.sh \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$import_slug" \ + --uri "#${import_id}" \ --query "$query" \ - --file "$file" + --file "$file_uri" \ + "$import_doc" else - create-rdf-import.sh \ + add-rdf-import.sh \ -b "$base" \ -f "$cert_pem_file" \ -p "$cert_password" \ --proxy "$proxy" \ --title "$title" \ - --slug "$import_slug" \ + --uri "#${import_id}" \ --graph "$graph" \ - --file "$file" + --file "$file_uri" \ + "$import_doc" fi \ No newline at end of file diff --git a/http-tests/admin/model/ontology-import-upload-no-deadlock.sh b/http-tests/admin/model/ontology-import-upload-no-deadlock.sh index 939da9687..7079f86d5 100755 --- a/http-tests/admin/model/ontology-import-upload-no-deadlock.sh +++ b/http-tests/admin/model/ontology-import-upload-no-deadlock.sh @@ -28,24 +28,32 @@ add-agent-to-group.sh \ # Step 1: Upload an RDF file file_content_type="text/turtle" +slug=$(uuidgen | tr '[:upper:]' '[:lower:]') -file_doc=$(create-file.sh \ +# Create an item document to hold the file +file_doc=$(create-item.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ -b "$END_USER_BASE_URL" \ --title "Test ontology for upload import" \ - --file "$pwd/test-ontology-import.ttl" \ - --file-content-type "${file_content_type}") - -# Step 2: Extract the uploaded file URI (content-addressed) + --container "$END_USER_BASE_URL" \ + --slug "$slug") -file_doc_ntriples=$(get.sh \ +# Add the file to the document +add-file.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ - --accept 'application/n-triples' \ - "$file_doc") + -b "$END_USER_BASE_URL" \ + --title "Test ontology for upload import" \ + --file "$pwd/test-ontology-import.ttl" \ + --file-content-type "${file_content_type}" \ + "$file_doc" + +# Step 2: Extract the uploaded file URI (content-addressed) -upload_uri=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p") +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$pwd/test-ontology-import.ttl" | awk '{print $1}') +upload_uri="${END_USER_BASE_URL}uploads/${sha1sum}" # Verify the uploaded file is accessible before we add it as an import curl -k -f -s \ diff --git a/http-tests/imports/GET-file-304.sh b/http-tests/imports/GET-file-304.sh index 8b4f3728c..1f38581f7 100755 --- a/http-tests/imports/GET-file-304.sh +++ b/http-tests/imports/GET-file-304.sh @@ -7,6 +7,7 @@ purge_cache "$END_USER_VARNISH_SERVICE" purge_cache "$ADMIN_VARNISH_SERVICE" purge_cache "$FRONTEND_VARNISH_SERVICE" +# Run the create-file test and capture the file URI it outputs file=$(./create-file.sh) etag=$( diff --git a/http-tests/imports/GET-file-range.sh b/http-tests/imports/GET-file-range.sh index 649215916..e7eceb0a8 100755 --- a/http-tests/imports/GET-file-range.sh +++ b/http-tests/imports/GET-file-range.sh @@ -22,22 +22,30 @@ add-agent-to-group.sh \ filename="/tmp/random-file" time dd if=/dev/urandom of="$filename" bs=1 count=1024 file_content_type="application/octet-stream" +slug=$(uuidgen | tr '[:upper:]' '[:lower:]') -file_doc=$(create-file.sh \ --f "$AGENT_CERT_FILE" \ --p "$AGENT_CERT_PWD" \ --b "$END_USER_BASE_URL" \ ---title "Random file" \ ---file "$filename" \ ---file-content-type "${file_content_type}") - -file_doc_ntriples=$(get.sh \ +# Create an item document to hold the file +file_doc=$(create-item.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ - --accept 'application/n-triples' \ - "$file_doc") + -b "$END_USER_BASE_URL" \ + --title "Random file" \ + --container "$END_USER_BASE_URL" \ + --slug "$slug") -file=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p") +# Add the file to the document +add-file.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Random file" \ + --file "$filename" \ + --file-content-type "${file_content_type}" \ + "$file_doc" + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$filename" | awk '{print $1}') +file="${END_USER_BASE_URL}uploads/${sha1sum}" from=100 length=42 diff --git a/http-tests/imports/GET-file-sha1sum.sh b/http-tests/imports/GET-file-sha1sum.sh index 3384ffc4a..08a0bd3fb 100755 --- a/http-tests/imports/GET-file-sha1sum.sh +++ b/http-tests/imports/GET-file-sha1sum.sh @@ -23,21 +23,36 @@ filename="/tmp/random-file" time dd if=/dev/urandom of="$filename" bs=1 count=1024 file_content_type="application/octet-stream" -file_doc=$(create-file.sh \ --f "$AGENT_CERT_FILE" \ --p "$AGENT_CERT_PWD" \ --b "$END_USER_BASE_URL" \ ---title "Random file" \ ---file "$filename" \ ---file-content-type "${file_content_type}") - -file_doc_ntriples=$(get.sh \ +# Create a container for files first +create-container.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ - --accept 'application/n-triples' \ - "$file_doc") + -b "$END_USER_BASE_URL" \ + --title "Files" \ + --parent "$END_USER_BASE_URL" \ + --slug "files" -file=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p") +# Create an item document to hold the file +file_doc=$(create-item.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Random file" \ + --container "${END_USER_BASE_URL}files/") + +# Add the file to the document +add-file.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Random file" \ + --file "$filename" \ + --file-content-type "${file_content_type}" \ + "$file_doc" + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$filename" | awk '{print $1}') +file="${END_USER_BASE_URL}uploads/${sha1sum}" server_sha1sum=$(echo "$file" | cut -d "/" -f 5) # cut the last URL path segment diff --git a/http-tests/imports/PUT-file-format-explicit.sh b/http-tests/imports/PUT-file-format-explicit.sh index 3c9dffd8b..3d5b3d38c 100755 --- a/http-tests/imports/PUT-file-format-explicit.sh +++ b/http-tests/imports/PUT-file-format-explicit.sh @@ -24,20 +24,30 @@ echo "test,data,sample" > "$test_file" echo "1,2,3" >> "$test_file" echo "4,5,6" >> "$test_file" -# generate slug for the file document - slug=$(uuidgen | tr '[:upper:]' '[:lower:]') -# upload file with explicit media type: text/plain +# Create an item document to hold the file +file_doc=$(create-item.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test File for Media Type Update" \ + --container "$END_USER_BASE_URL" \ + --slug "$slug") -file_doc=$(create-file.sh \ +# upload file with explicit media type: text/plain +add-file.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ -b "$END_USER_BASE_URL" \ --title "Test File for Media Type Update" \ - --slug "$slug" \ --file "$test_file" \ - --file-content-type "text/plain") + --file-content-type "text/plain" \ + "$file_doc" + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$test_file" | awk '{print $1}') +file_uri="${END_USER_BASE_URL}uploads/${sha1sum}" # get the file resource URI and initial dct:format @@ -47,8 +57,6 @@ file_doc_ntriples=$(get.sh \ --accept 'application/n-triples' \ "$file_doc") -file_uri=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p") - # get initial SHA1 hash initial_sha1=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_uri//\//\\/}> \"(.*)\" \./\1/p") @@ -61,18 +69,17 @@ if [[ ! "$initial_format" =~ text/plain ]]; then exit 1 fi -# re-upload the same file with same slug but different explicit media type: text/csv +# re-upload the same file but different explicit media type: text/csv # this simulates editing the file document through the UI and uploading a new file -create-file.sh \ +add-file.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ -b "$END_USER_BASE_URL" \ --title "Test File for Media Type Update" \ - --slug "$slug" \ --file "$test_file" \ --file-content-type "text/csv" \ - > /dev/null + "$file_doc" # get updated document diff --git a/http-tests/imports/PUT-file-format.sh b/http-tests/imports/PUT-file-format.sh index 4a30ad9d6..fa503fbac 100755 --- a/http-tests/imports/PUT-file-format.sh +++ b/http-tests/imports/PUT-file-format.sh @@ -24,19 +24,29 @@ echo "test,data,sample" > "$test_file" echo "1,2,3" >> "$test_file" echo "4,5,6" >> "$test_file" -# generate slug for the file document - slug=$(uuidgen | tr '[:upper:]' '[:lower:]') -# upload file WITHOUT explicit media type (rely on browser detection via `file -b --mime-type`) +# Create an item document to hold the file +file_doc=$(create-item.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test File for Browser Media Type" \ + --container "$END_USER_BASE_URL" \ + --slug "$slug") -file_doc=$(create-file.sh \ +# upload file WITHOUT explicit media type (rely on browser detection via `file -b --mime-type`) +add-file.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ -b "$END_USER_BASE_URL" \ --title "Test File for Browser Media Type" \ - --slug "$slug" \ - --file "$test_file") + --file "$test_file" \ + "$file_doc" + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$test_file" | awk '{print $1}') +file_uri="${END_USER_BASE_URL}uploads/${sha1sum}" # get the file resource URI and initial dct:format @@ -46,25 +56,23 @@ file_doc_ntriples=$(get.sh \ --accept 'application/n-triples' \ "$file_doc") -file_uri=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p") - # get initial SHA1 hash initial_sha1=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_uri//\//\\/}> \"(.*)\" \./\1/p") # get initial dct:format (should be browser-detected) initial_format=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_uri//\//\\/}> <(.*)> \./\1/p") -# re-upload the same file with same slug but WITH explicit media type: text/csv +# re-upload the same file but WITH explicit media type: text/csv # this simulates editing and uploading with a corrected format after browser auto-detection was wrong -create-file.sh \ +add-file.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ -b "$END_USER_BASE_URL" \ --title "Test File for Browser Media Type" \ - --slug "$slug" \ --file "$test_file" \ --file-content-type "text/csv" \ + "$file_doc" \ > /dev/null # get updated document diff --git a/http-tests/imports/create-file.sh b/http-tests/imports/create-file.sh index d7e5c462c..a054bb1af 100755 --- a/http-tests/imports/create-file.sh +++ b/http-tests/imports/create-file.sh @@ -20,24 +20,30 @@ add-agent-to-group.sh \ # create file file_content_type="text/csv" +slug=$(uuidgen | tr '[:upper:]' '[:lower:]') -file_doc=$(create-file.sh \ --f "$AGENT_CERT_FILE" \ --p "$AGENT_CERT_PWD" \ --b "$END_USER_BASE_URL" \ ---title "Test CSV" \ ---file "$pwd/test.csv" \ ---file-content-type "${file_content_type}") - -file_doc_ntriples=$(get.sh \ +# Create an item document to hold the file +file_doc=$(create-item.sh \ -f "$AGENT_CERT_FILE" \ -p "$AGENT_CERT_PWD" \ - --accept 'application/n-triples' \ - "$file_doc") - -# echo "FILE NTRIPLES: $file_doc_ntriples" + -b "$END_USER_BASE_URL" \ + --title "Test CSV" \ + --container "$END_USER_BASE_URL" \ + --slug "$slug") -file=$(echo "$file_doc_ntriples" | sed -rn "s/<${file_doc//\//\\/}> <(.*)> \./\1/p") +# Add the file to the document +add-file.sh \ + -f "$AGENT_CERT_FILE" \ + -p "$AGENT_CERT_PWD" \ + -b "$END_USER_BASE_URL" \ + --title "Test CSV" \ + --file "$pwd/test.csv" \ + --file-content-type "${file_content_type}" \ + "$file_doc" + +# Calculate file URI from SHA1 hash +sha1sum=$(shasum -a 1 "$pwd/test.csv" | awk '{print $1}') +file="${END_USER_BASE_URL}uploads/${sha1sum}" echo "$file" # file URL used in other tests diff --git a/platform/datasets/admin.trig b/platform/datasets/admin.trig index 4756fa90b..07eac47c0 100644 --- a/platform/datasets/admin.trig +++ b/platform/datasets/admin.trig @@ -9,7 +9,6 @@ @prefix sioc: . @prefix foaf: . @prefix dct: . -@prefix spin: . <> { @@ -58,287 +57,6 @@ } -# CONTAINERS - - -{ - - a dh:Container ; - dct:title "Queries" ; - dct:description "SPARQL queries" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Queries" ; - spin:query . - - a sp:Select ; - dct:title "Select query resources" ; - sp:text """PREFIX sp: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a sp:Select } - UNION - { ?s a sp:Construct } - UNION - { ?s a sp:Describe } - UNION - { ?s a sp:Ask } - } - }""" . - -} - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Select instances" ; - foaf:primaryTopic . - - a sp:Select ; - dct:title "Select instances" ; - dct:description "Selects instances of type from the default graph" ; - sp:text """SELECT DISTINCT ?s -WHERE - { ?s a $type ; - ?p ?o - }""" . - -} - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Select instances in graphs" ; - foaf:primaryTopic . - - a sp:Select ; - dct:title "Select instances in graphs" ; - dct:description "Selects instances of type from named graphs" ; - sp:text """SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a $type ; - ?p ?o - } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Files" ; - dct:description "Uploaded files" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Files" ; - spin:query . - - a sp:Select ; - dct:title "Select file resources" ; - sp:text """PREFIX nfo: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a nfo:FileDataObject } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Imports" ; - dct:description "Data imports" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Imports" ; - spin:query . - - a sp:Select ; - dct:title "Select import resources" ; - sp:text """PREFIX ldh: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a ldh:CSVImport } - UNION - { ?s a ldh:RDFImport } - } - }""" . - -} - - -{ - - a dh:Item ; - dct:title "Geo" ; - dct:description "Geolocated resources" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Geo resources" ; - spin:query ; - ac:mode ac:MapMode . - - a sp:Select ; - dct:title "Select geo resources" ; - sp:text """PREFIX geo: -PREFIX dct: - -SELECT DISTINCT ?resource -WHERE -{ GRAPH ?graph - { ?resource geo:lat ?lat ; - geo:long ?long - OPTIONAL - { ?resource a ?type } - OPTIONAL - { ?resource dct:title ?title } - } -} -ORDER BY ?title""" . - -} - - -{ - - a dh:Item ; - dct:title "Latest" ; - dct:description "Latest resources" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Latest resources" ; - spin:query . - - a sp:Select ; - dct:title "Select latest" ; - sp:text """PREFIX dct: - -SELECT DISTINCT ?dated -WHERE -{ GRAPH ?graph - { ?dated dct:created ?created } -} -ORDER BY DESC(?created)""" . - -} - - -{ - - a dh:Container ; - dct:title "Charts" ; - dct:description "Saved charts" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Charts" ; - spin:query . - - a sp:Select ; - dct:title "Select chart resources" ; - sp:text """PREFIX ldh: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a ldh:GraphChart } - UNION - { ?s a ldh:ResultSetChart } - } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Apps" ; - dct:description "Linked Data applications" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Applications" ; - spin:query . - - a sp:Select ; - dct:title "Select application resources" ; - sp:text """PREFIX lapp: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a lapp:Application } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Services" ; - dct:description "SPARQL services" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Services" ; - spin:query . - - a sp:Select ; - dct:title "Select service resources" ; - sp:text """PREFIX sd: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a sd:Service } - }""" . - -} - ### ADMIN-SPECIFIC @prefix lacl: . diff --git a/platform/datasets/end-user.trig b/platform/datasets/end-user.trig index 2608b6a39..65c624610 100644 --- a/platform/datasets/end-user.trig +++ b/platform/datasets/end-user.trig @@ -9,7 +9,6 @@ @prefix sioc: . @prefix foaf: . @prefix dct: . -@prefix spin: . <> { @@ -58,287 +57,6 @@ } -# CONTAINERS - - -{ - - a dh:Container ; - dct:title "Queries" ; - dct:description "SPARQL queries" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Queries" ; - spin:query . - - a sp:Select ; - dct:title "Select query resources" ; - sp:text """PREFIX sp: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a sp:Select } - UNION - { ?s a sp:Construct } - UNION - { ?s a sp:Describe } - UNION - { ?s a sp:Ask } - } - }""" . - -} - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Select instances" ; - foaf:primaryTopic . - - a sp:Select ; - dct:title "Select instances" ; - dct:description "Selects instances of type from the default graph" ; - sp:text """SELECT DISTINCT ?s -WHERE - { ?s a $type ; - ?p ?o - }""" . - -} - - -{ - - a dh:Item ; - sioc:has_container ; - dct:title "Select instances in graphs" ; - foaf:primaryTopic . - - a sp:Select ; - dct:title "Select instances in graphs" ; - dct:description "Selects instances of type from named graphs" ; - sp:text """SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a $type ; - ?p ?o - } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Files" ; - dct:description "Uploaded files" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Files" ; - spin:query . - - a sp:Select ; - dct:title "Select file resources" ; - sp:text """PREFIX nfo: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a nfo:FileDataObject } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Imports" ; - dct:description "Data imports" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Imports" ; - spin:query . - - a sp:Select ; - dct:title "Select import resources" ; - sp:text """PREFIX ldh: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a ldh:CSVImport } - UNION - { ?s a ldh:RDFImport } - } - }""" . - -} - - -{ - - a dh:Item ; - dct:title "Geo" ; - dct:description "Geolocated resources" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Geo resources" ; - spin:query ; - ac:mode ac:MapMode . - - a sp:Select ; - dct:title "Select geo resources" ; - sp:text """PREFIX geo: -PREFIX dct: - -SELECT DISTINCT ?resource -WHERE -{ GRAPH ?graph - { ?resource geo:lat ?lat ; - geo:long ?long - OPTIONAL - { ?resource a ?type } - OPTIONAL - { ?resource dct:title ?title } - } -} -ORDER BY ?title""" . - -} - - -{ - - a dh:Item ; - dct:title "Latest" ; - dct:description "Latest resources" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Latest resources" ; - spin:query . - - a sp:Select ; - dct:title "Select latest" ; - sp:text """PREFIX dct: - -SELECT DISTINCT ?dated -WHERE -{ GRAPH ?graph - { ?dated dct:created ?created } -} -ORDER BY DESC(?created)""" . - -} - - -{ - - a dh:Container ; - dct:title "Charts" ; - dct:description "Saved charts" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Charts" ; - spin:query . - - a sp:Select ; - dct:title "Select chart resources" ; - sp:text """PREFIX ldh: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { { ?s a ldh:GraphChart } - UNION - { ?s a ldh:ResultSetChart } - } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Apps" ; - dct:description "Linked Data applications" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Applications" ; - spin:query . - - a sp:Select ; - dct:title "Select application resources" ; - sp:text """PREFIX lapp: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a lapp:Application } - }""" . - -} - - -{ - - a dh:Container ; - dct:title "Services" ; - dct:description "SPARQL services" ; - rdf:_1 . - - a ldh:Object ; - rdf:value . - - a ldh:View ; - dct:title "Services" ; - spin:query . - - a sp:Select ; - dct:title "Select service resources" ; - sp:text """PREFIX sd: - -SELECT DISTINCT ?s -WHERE - { GRAPH ?g - { ?s a sd:Service } - }""" . - -} - ### END-USER-SPECIFIC diff --git a/src/main/java/com/atomgraph/linkeddatahub/Application.java b/src/main/java/com/atomgraph/linkeddatahub/Application.java index 0a5851110..ac7c6dba8 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/Application.java +++ b/src/main/java/com/atomgraph/linkeddatahub/Application.java @@ -16,7 +16,6 @@ */ package com.atomgraph.linkeddatahub; -import com.atomgraph.linkeddatahub.server.mapper.ResourceExistsExceptionMapper; import com.atomgraph.linkeddatahub.server.mapper.HttpHostConnectExceptionMapper; import com.atomgraph.linkeddatahub.server.mapper.InternalURLExceptionMapper; import com.atomgraph.linkeddatahub.server.mapper.MessagingExceptionMapper; @@ -1104,7 +1103,6 @@ protected void registerExceptionMappers() register(WebIDDelegationExceptionMapper.class); register(WebIDLoadingExceptionMapper.class); register(TokenExpiredExceptionMapper.class); - register(ResourceExistsExceptionMapper.class); register(QueryParseExceptionMapper.class); register(AuthenticationExceptionMapper.class); register(ForbiddenExceptionMapper.class); diff --git a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java index 716289439..cecd10dd6 100644 --- a/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java +++ b/src/main/java/com/atomgraph/linkeddatahub/resource/Generate.java @@ -18,8 +18,6 @@ import com.atomgraph.core.MediaTypes; import com.atomgraph.linkeddatahub.apps.model.Application; -import com.atomgraph.linkeddatahub.client.GraphStoreClient; -import com.atomgraph.linkeddatahub.imports.QueryLoader; import com.atomgraph.linkeddatahub.server.model.impl.DirectGraphStoreImpl; import com.atomgraph.linkeddatahub.server.security.AgentContext; import com.atomgraph.linkeddatahub.server.util.Skolemizer; @@ -44,8 +42,10 @@ import jakarta.ws.rs.core.Response.Status; import jakarta.ws.rs.core.UriBuilder; import jakarta.ws.rs.core.UriInfo; +import org.apache.jena.ontology.Ontology; import org.apache.jena.query.ParameterizedSparqlString; import org.apache.jena.query.Query; +import org.apache.jena.query.QueryFactory; import org.apache.jena.query.Syntax; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; @@ -69,10 +69,11 @@ public class Generate private final UriInfo uriInfo; private final MediaTypes mediaTypes; private final Application application; + private final Ontology ontology; private final Optional agentContext; private final com.atomgraph.linkeddatahub.Application system; private final ResourceContext resourceContext; - + /** * Constructs endpoint for container generation. * @@ -80,18 +81,21 @@ public class Generate * @param uriInfo current URI info * @param mediaTypes supported media types * @param application matched application + * @param ontology ontology of the current application * @param system system application * @param agentContext authenticated agent's context * @param resourceContext resource context for creating resources */ @Inject public Generate(@Context Request request, @Context UriInfo uriInfo, MediaTypes mediaTypes, - com.atomgraph.linkeddatahub.apps.model.Application application, Optional agentContext, + com.atomgraph.linkeddatahub.apps.model.Application application, Optional ontology, Optional agentContext, com.atomgraph.linkeddatahub.Application system, @Context ResourceContext resourceContext) { + if (ontology.isEmpty()) throw new InternalServerErrorException("Ontology is not specified"); this.uriInfo = uriInfo; this.mediaTypes = mediaTypes; this.application = application; + this.ontology = ontology.get(); this.agentContext = agentContext; this.system = system; this.resourceContext = resourceContext; @@ -129,10 +133,13 @@ public Response post(Model model) Resource queryRes = part.getPropertyResourceValue(SPIN.query); if (queryRes == null) throw new BadRequestException("Container query string (spin:query) not provided"); - GraphStoreClient gsc = GraphStoreClient.create(getSystem().getClient(), getSystem().getMediaTypes()). - delegation(getUriInfo().getBaseUri(), getAgentContext().orElse(null)); - QueryLoader queryLoader = new QueryLoader(URI.create(queryRes.getURI()), getApplication().getBase().getURI(), Syntax.syntaxARQ, gsc); - Query query = queryLoader.get(); + // Lookup query in ontology + Resource queryResource = getOntology().getOntModel().getResource(queryRes.getURI()); + if (queryResource == null || !queryResource.hasProperty(SP.text)) + throw new BadRequestException("Query resource not found in ontology: " + queryRes.getURI()); + + String queryString = queryResource.getProperty(SP.text).getString(); + Query query = QueryFactory.create(queryString, Syntax.syntaxARQ); if (!query.isSelectType()) throw new BadRequestException("Container query is not of SELECT type"); ParameterizedSparqlString pss = new ParameterizedSparqlString(query.toString()); @@ -253,6 +260,16 @@ public Application getApplication() return application; } + /** + * Returns the ontology. + * + * @return the ontology + */ + public Ontology getOntology() + { + return ontology; + } + /** * Returns the current URI info. * diff --git a/src/main/resources/com/atomgraph/linkeddatahub/ldh.ttl b/src/main/resources/com/atomgraph/linkeddatahub/ldh.ttl index 589ae75b9..caa46a07f 100644 --- a/src/main/resources/com/atomgraph/linkeddatahub/ldh.ttl +++ b/src/main/resources/com/atomgraph/linkeddatahub/ldh.ttl @@ -493,6 +493,28 @@ ORDER BY ?title """ ; rdfs:isDefinedBy : . +:SelectInstances a sp:Select ; + rdfs:label "Select instances" ; + dct:description "Selects instances of type from the default graph" ; + sp:text """SELECT DISTINCT ?s +WHERE + { ?s a $type ; + ?p ?o + }""" ; + rdfs:isDefinedBy : . + +:SelectInstancesInGraphs a sp:Select ; + rdfs:label "Select instances in graphs" ; + dct:description "Selects instances of type from named graphs" ; + sp:text """SELECT DISTINCT ?s +WHERE + { GRAPH ?g + { ?s a $type ; + ?p ?o + } + }""" ; + rdfs:isDefinedBy : . + :ChildrenView a :View ; rdfs:label "Children view" ; spin:query :SelectChildren ; diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css b/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css index 66155e480..fcfc0fb23 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/css/bootstrap.css @@ -37,6 +37,8 @@ button.btn.create-action { height: 30px; } a.external::after { content: "⤴"; padding-left: 0.2em; } a.btn.create-action { height: 20px; } .create-resource .btn.create-action { margin-top: 1em; } +.btn-class { background: inherit; } +.btn-class span { color: black; } .btn-group.open .btn.dropdown-toggle.create-action { background-image: url('../icons/ic_note_add_black_24px.svg'); } li button.btn-edit-constructors, li button.btn-add-data, li button.btn-add-ontology, li button.btn-generate-containers { text-align: left; width: 100%; background-color: inherit; } .btn-container { background-image: url('../icons/folder.svg'); } @@ -48,8 +50,6 @@ li button.btn-edit-constructors, li button.btn-add-data, li button.btn-add-ontol .btn-import { background-image: url('../icons/ic_transform_black_24px.svg'); } .btn-chart { background-image: url('../icons/ic_show_chart_black_24px.svg'); } .btn-view { background-image: url('../icons/ic_view_list_black_24px.svg'); } -.btn-latest { background-image: url('../icons/ic_new_releases_black_24px.svg'); } -.btn-geo { background-image: url('../icons/ic_location_on_black_24px.svg'); } .btn-logo { background-position: left; background-repeat: no-repeat; padding-left: 32px; } .dropdown-menu .btn-logo { background-position: 12px center; padding-left: 40px; } .btn.btn-toggle-content { font-size: 0; color: transparent; background-image: url('../icons/baseline-expand_less-24px.svg'); background-position: center center; background-repeat: no-repeat; width: 48px; } @@ -82,17 +82,21 @@ li button.btn-edit-constructors, li button.btn-add-data, li button.btn-add-ontol .dropdown-menu > li > a.btn-list { background-image: url('../icons/view_list_black_24dp.svg'); background-position: 12px center; background-repeat: no-repeat; padding: 5px 5px 5px 40px; } .dropdown-menu > li > a.btn-table { background-image: url('../icons/ic_border_all_black_24px.svg'); background-position: 12px center; background-repeat: no-repeat; padding: 5px 5px 5px 40px; } .dropdown-menu > li > a.btn-grid { background-image: url('../icons/ic_grid_on_black_24px.svg'); background-position: 12px center; background-repeat: no-repeat; padding: 5px 5px 5px 40px; } -#doc-tree { display: none; width: 15%; position: fixed; left: 0; top: 106px; height: calc(100% - 106px); } +#left-sidebar { display: none; width: 15%; position: fixed; left: 0; top: 106px; height: calc(100% - 106px); } @media (max-width: 979px) { body { padding-top: 0; } - #doc-tree { display: block; width: auto; position: unset; top: unset; height: auto; } - #doc-tree .nav { max-height: 20em; overflow: auto; } + #left-sidebar { display: block; width: auto; position: unset; top: unset; height: auto; } + #left-sidebar .nav { max-height: 20em; overflow: auto; } } -#doc-tree .nav-list > li > a { margin-left: 0; margin-right: 0; } -#doc-tree .nav-list > li > a.btn-container, #doc-tree .nav-list > li > a.btn-app, #doc-tree .nav-list > li > a.btn-chart, #doc-tree .nav-list > li > a.btn-file, #doc-tree .nav-list > li > a.btn-geo, #doc-tree .nav-list > li > a.btn-import, #doc-tree .nav-list > li > a.btn-latest, #doc-tree .nav-list > li > a.btn-query, #doc-tree .nav-list > li > a.btn-service { padding-left: 24px; } -#doc-tree li { max-height: 20em; overflow: auto; } -#doc-tree li > a { display: inline-block; } +#left-sidebar .nav-list > li > a.btn-container { padding-left: 24px; } +#left-sidebar .nav-list > li > a { margin-left: 0; margin-right: 0; } +#left-sidebar .nav-list > li > a.btn-container +#left-sidebar li { max-height: 20em; overflow: auto; } +#left-sidebar li > a { display: inline-block; } +#left-sidebar .btn-latest { background-image: url('../icons/ic_new_releases_black_24px.svg'); background-color: inherit; } +#left-sidebar .btn-geo { background-image: url('../icons/ic_location_on_black_24px.svg'); background-color: inherit; } + .btn.btn-expand-tree { height: 24px; width: 24px; background-image: url('../icons/expand_more_black_24dp.svg'); } .btn.btn-expand-tree:hover, .btn.btn-expand-tree:focus { background-position: 0 0; } .btn.btn-expanded-tree { height: 24px; width: 24px; background-image: url('../icons/chevron_right_black_24dp.svg'); } diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl index 1060d7ba2..745d5aab7 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block.xsl @@ -151,28 +151,31 @@ exclude-result-prefixes="#all" - - - - - - - - - - + - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + @@ -454,7 +457,71 @@ exclude-result-prefixes="#all" - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl index 5541a334b..53307ebbc 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/chart.xsl @@ -286,11 +286,16 @@ exclude-result-prefixes="#all" - - - - - + + + + + + + + + + - - - - + + - - - - - - - @@ -816,8 +813,7 @@ exclude-result-prefixes="#all" - - + - @@ -867,11 +862,10 @@ exclude-result-prefixes="#all" - - - + + - + @@ -880,7 +874,10 @@ exclude-result-prefixes="#all" - + + + + @@ -896,8 +893,7 @@ exclude-result-prefixes="#all" - - + - ldh:block-object-metadata-response + ldh:block-object-metadata-response $block/@about: + + @@ -302,18 +304,29 @@ exclude-result-prefixes="#all" + - + + + + + + + + + + - - + + + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/query.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/query.xsl index 550630534..fc2c10194 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/query.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/query.xsl @@ -280,6 +280,12 @@ exclude-result-prefixes="#all" + + + + + + @@ -325,7 +331,7 @@ exclude-result-prefixes="#all" - + @@ -536,7 +542,7 @@ exclude-result-prefixes="#all" - + @@ -581,11 +587,10 @@ exclude-result-prefixes="#all" - - - + + - + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/view.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/view.xsl index 3ebcc4766..e5d3cd2db 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/view.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/block/view.xsl @@ -69,11 +69,15 @@ exclude-result-prefixes="#all" - - - - - + + + + + + + + + @@ -86,9 +90,13 @@ exclude-result-prefixes="#all" 'container': $container, 'mode': $mode, 'refresh-content': $refresh-content, - 'query-uri': $query-uri + 'query-uri': $query-uri, + 'cache': ixsl:get(ixsl:get(ixsl:window(), 'LinkedDataHub.contents'), '`' || $block/@about || '`') }"/> - + + + + - + ldh:view-results-thunk + + + + ldh:load-object-metadata + @@ -154,8 +167,12 @@ exclude-result-prefixes="#all" - + + + + + @@ -177,7 +194,6 @@ exclude-result-prefixes="#all" - - + + ldh:set-object-metadata + + + + @@ -208,7 +229,7 @@ exclude-result-prefixes="#all" - + @@ -331,6 +352,7 @@ exclude-result-prefixes="#all" + @@ -365,8 +387,13 @@ exclude-result-prefixes="#all" map { 'request': $request, 'container': ., - 'count-var-name': $count-var-name + 'count-var-name': $count-var-name, + 'cache': $cache }"/> + + + + - @@ -495,11 +521,8 @@ exclude-result-prefixes="#all" + - - - - @@ -522,7 +545,6 @@ exclude-result-prefixes="#all" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - @@ -544,7 +606,7 @@ exclude-result-prefixes="#all" - + @@ -575,13 +637,27 @@ exclude-result-prefixes="#all" + + + + + + + + + + + + + + - - + + @@ -591,7 +667,7 @@ exclude-result-prefixes="#all" - + @@ -681,7 +757,6 @@ exclude-result-prefixes="#all" - @@ -694,13 +769,13 @@ exclude-result-prefixes="#all" + - - + $initial-load: @@ -769,6 +844,7 @@ exclude-result-prefixes="#all" + @@ -785,8 +861,10 @@ exclude-result-prefixes="#all" 'container': id($order-by-container-id, ixsl:page()), 'id': $id, 'predicate': $predicate, - 'order-by-predicate': $order-by-predicate + 'order-by-predicate': $order-by-predicate, + 'cache': $cache }"/> + - - - - - @@ -809,7 +882,7 @@ exclude-result-prefixes="#all" - + @@ -1113,46 +1186,114 @@ exclude-result-prefixes="#all" - - + + BLOCK DELEGATION: view-mode handler triggered + - - - + BLOCK DELEGATION: block URI = + + BLOCK DELEGATION: cache found: + + + + + + + + + + BLOCK DELEGATION: pager previous triggered + + + BLOCK DELEGATION: block URI = + + BLOCK DELEGATION: cache found: + + + + + + + + + + BLOCK DELEGATION: pager next triggered + + + BLOCK DELEGATION: block URI = + + BLOCK DELEGATION: cache found: + + + + + + + + + + BLOCK DELEGATION: container-order triggered + + + BLOCK DELEGATION: block URI = + + BLOCK DELEGATION: cache found: + + + + + + + + + + + BLOCK DELEGATION: btn-order-by triggered + + + BLOCK DELEGATION: block URI = + + BLOCK DELEGATION: cache found: + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + - + + - - - - - - - + + + + + + + + - - - - - - - - - - - - - - + + + + - - - - + + + + - + - + + - - - - - - - + + + + + + + + + - - - - - - - - - - - - - - + + + + + - - - - + + + + - + - + + - - - - - - - + + + + + + + + + + - - - - - - - - - - - - - - + + + + - - - + + + - + - + + + + + - - - - - - - - - + + + + + + - - - - - - - + + + + - - - - - - - - - + + + + + + + + - + - + - + + - - - - + - + @@ -1371,8 +1486,8 @@ exclude-result-prefixes="#all" - - + + @@ -1395,9 +1510,19 @@ exclude-result-prefixes="#all" - - - + + + + + + + + + + + + + @@ -1459,7 +1584,7 @@ exclude-result-prefixes="#all" - + @@ -1469,12 +1594,11 @@ exclude-result-prefixes="#all" - - - - - - + + + + + @@ -1487,20 +1611,21 @@ exclude-result-prefixes="#all" - + - + - + - + + - + - - - - - - + + + + + @@ -1535,20 +1659,21 @@ exclude-result-prefixes="#all" - + - + - + - + + + ldh:view-query-response @@ -1605,14 +1731,11 @@ exclude-result-prefixes="#all" - + - - - - - - + + + @@ -1630,23 +1753,25 @@ exclude-result-prefixes="#all" - - - - - + + + + + + + + + + + + + + + + + - - - - - - - - - - - + @@ -1675,7 +1800,6 @@ exclude-result-prefixes="#all" - + - - - - - + + ldh:render-view + @@ -1737,7 +1860,6 @@ exclude-result-prefixes="#all" - @@ -1750,6 +1872,7 @@ exclude-result-prefixes="#all" + @@ -1773,11 +1896,16 @@ exclude-result-prefixes="#all" - + + + + + + - + @@ -1788,7 +1916,7 @@ exclude-result-prefixes="#all" ldh:facet-filter-response - + @@ -1804,7 +1932,7 @@ exclude-result-prefixes="#all" - + @@ -1816,7 +1944,7 @@ exclude-result-prefixes="#all" ldh:parallax-response - + @@ -1872,7 +2000,7 @@ exclude-result-prefixes="#all" ldh:parallax-property-response - + @@ -1932,7 +2060,7 @@ exclude-result-prefixes="#all" ldh:facet-value-response - + @@ -2078,6 +2206,9 @@ exclude-result-prefixes="#all" ldh:result-count-response + + + @@ -2101,7 +2232,7 @@ exclude-result-prefixes="#all" - + @@ -2116,7 +2247,7 @@ exclude-result-prefixes="#all" ldh:order-by-response - + @@ -2130,7 +2261,7 @@ exclude-result-prefixes="#all" - + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/functions.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/functions.xsl index 5731d47c9..1b0e1f2e4 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/functions.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/functions.xsl @@ -93,7 +93,7 @@ exclude-result-prefixes="#all" - + @@ -495,6 +495,8 @@ exclude-result-prefixes="#all" + ldh:handle-response + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/map.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/map.xsl index 0fd72e1b5..8c2a46433 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/map.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/client/map.xsl @@ -122,11 +122,11 @@ exclude-result-prefixes="#all" - - - + + + @@ -142,7 +142,8 @@ exclude-result-prefixes="#all" 'request': $request, 'container': $container, 'container-id': $container-id, - 'block-uri': $block-uri + 'map': $map, + 'initial-load': $initial-load }"/> - - + + - - - + @@ -331,17 +330,21 @@ exclude-result-prefixes="#all" - - + + + + + - - + + + @@ -429,6 +432,9 @@ exclude-result-prefixes="#all" + + + diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/translations.rdf b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/translations.rdf index a7dfaff31..df130e8df 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/translations.rdf +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/bootstrap/2.3.2/translations.rdf @@ -104,6 +104,10 @@ Geo Geo + + Other + Otro + Files Archivos diff --git a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/client.xsl b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/client.xsl index 052116f39..3f0ec1505 100644 --- a/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/client.xsl +++ b/src/main/webapp/static/com/atomgraph/linkeddatahub/xsl/client.xsl @@ -286,7 +286,7 @@ WHERE - + @@ -323,15 +323,15 @@ WHERE - - + + - + - - - + + + @@ -481,22 +481,6 @@ WHERE - - - - - -
  • - - - - / - -
  • -
    - @@ -607,9 +591,19 @@ WHERE + + + + + + + + + - - + + + @@ -751,7 +745,7 @@ WHERE - + Application change. Base URI: @@ -842,7 +836,8 @@ WHERE - + + @@ -853,24 +848,22 @@ WHERE
    - - - - - - - - - - - + + - + - +