diff --git a/src/ontology/Makefile b/src/ontology/Makefile index ab606a5..5ba20a4 100644 --- a/src/ontology/Makefile +++ b/src/ontology/Makefile @@ -1,7 +1,7 @@ # ---------------------------------------- # Makefile for geno # Generated using ontology-development-kit -# ODK Version: v1.3.1 +# ODK Version: v1.4 # ---------------------------------------- # IMPORTANT: DO NOT EDIT THIS FILE. To override default make goals, use geno.Makefile instead @@ -15,11 +15,13 @@ # ---------------------------------------- # these can be overwritten on the command line +OBOBASE= http://purl.obolibrary.org/obo URIBASE= http://purl.obolibrary.org/obo ONT= geno -ONTBASE= $(URIBASE)/$(ONT) +ONTBASE= http://purl.obolibrary.org/obo/geno EDIT_FORMAT= owl SRC = $(ONT)-edit.$(EDIT_FORMAT) +MAKE_FAST= $(MAKE) IMP=false PAT=false COMP=false MIR=false CATALOG= catalog-v001.xml ROBOT= robot --catalog $(CATALOG) @@ -32,26 +34,29 @@ MIRRORDIR= mirror IMPORTDIR= imports SUBSETDIR= subsets SCRIPTSDIR= ../scripts +UPDATEREPODIR= target SPARQLDIR = ../sparql COMPONENTSDIR = components REPORT_FAIL_ON = None REPORT_LABEL = -l true REPORT_PROFILE_OPTS = OBO_FORMAT_OPTIONS = -SPARQL_VALIDATION_CHECKS = owldef-self-reference iri-range label-with-iri +SPARQL_VALIDATION_CHECKS = owldef-self-reference iri-range label-with-iri multiple-replaced_by SPARQL_EXPORTS = basic-report class-count-by-prefix edges xrefs obsoletes synonyms -ODK_VERSION_MAKEFILE = v1.3.1 +ODK_VERSION_MAKEFILE = v1.4 TODAY ?= $(shell date +%Y-%m-%d) OBODATE ?= $(shell date +'%d:%m:%Y %H:%M') VERSION= $(TODAY) ANNOTATE_ONTOLOGY_VERSION = annotate -V $(ONTBASE)/releases/$(VERSION)/$@ --annotation owl:versionInfo $(VERSION) +ANNOTATE_CONVERT_FILE = annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) convert -f ofn --output $@.tmp.owl && mv $@.tmp.owl $@ OTHER_SRC = ONTOLOGYTERMS = $(TMPDIR)/ontologyterms.txt +EDIT_PREPROCESSED = $(TMPDIR)/$(ONT)-preprocess.owl FORMATS = $(sort owl obo json owl) FORMATS_INCL_TSV = $(sort $(FORMATS) tsv) -RELEASE_ARTEFACTS = $(sort $(ONT)-base $(ONT)-full $(ONT)-base $(ONT)-full) +RELEASE_ARTEFACTS = $(sort $(ONT)-base $(ONT)-full ) # ---------------------------------------- # Top-level targets @@ -66,10 +71,16 @@ all: all_odk all_odk: odkversion test all_assets .PHONY: test -test: odkversion sparql_test robot_reports $(REPORTDIR)/validate_profile_owl2dl_$(ONT).owl.txt - $(ROBOT) reason --input $(SRC) --reasoner ELK --equivalent-classes-allowed asserted-only \ - --exclude-tautologies structural --output test.owl && rm test.owl &&\ - echo "Success" +test: odkversion reason_test sparql_test robot_reports $(REPORTDIR)/validate_profile_owl2dl_$(ONT).owl.txt + echo "Finished running all tests successfully." + +.PHONY: release_diff +release_diff: $(REPORTDIR)/release-diff.md + +.PHONY: reason_test +reason_test: $(EDIT_PREPROCESSED) + $(ROBOT) reason --input $< --reasoner ELK --equivalent-classes-allowed asserted-only \ + --exclude-tautologies structural --output test.owl && rm test.owl .PHONY: odkversion odkversion: @@ -120,6 +131,19 @@ SUBSET_FILES = $(foreach n,$(SUBSET_ROOTS), $(foreach f,$(FORMATS_INCL_TSV), $(n .PHONY: all_subsets all_subsets: $(SUBSET_FILES) +# ---------------------------------------- +# Mapping assets +# ---------------------------------------- + + +MAPPINGS = + +MAPPING_FILES = $(patsubst %, $(MAPPINGDIR)/%.sssom.tsv, $(MAPPINGS)) + +.PHONY: all_mappings +all_mappings: $(MAPPING_FILES) + + # ---------------------------------------- # QC Reports & Utilities # ---------------------------------------- @@ -138,7 +162,7 @@ all_reports: custom_reports robot_reports # ROBOT OWL Profile checking # ---------------------------------------- -# The conversion to functional syntax is necessary to avoid undeclared entity violations. +# The merge step is necessary to avoid undeclared entity violations. $(REPORTDIR)/validate_profile_owl2dl_%.txt: % | $(REPORTDIR) $(TMPDIR) $(ROBOT) merge -i $< convert -f ofn -o $(TMPDIR)/validate.ofn $(ROBOT) validate-profile --profile DL -i $(TMPDIR)/validate.ofn -o $@ || { cat $@ && exit 1; } @@ -156,9 +180,10 @@ validate_profile_%: $(REPORTDIR)/validate_profile_owl2dl_%.txt SPARQL_VALIDATION_QUERIES = $(foreach V,$(SPARQL_VALIDATION_CHECKS),$(SPARQLDIR)/$(V)-violation.sparql) -sparql_test: $(SRC) catalog-v001.xml | $(REPORTDIR) +sparql_test: $(EDIT_PREPROCESSED) catalog-v001.xml | $(REPORTDIR) ifneq ($(SPARQL_VALIDATION_QUERIES),) - $(ROBOT) verify --catalog catalog-v001.xml -i $< --queries $(SPARQL_VALIDATION_QUERIES) -O $(REPORTDIR) + + $(ROBOT) verify --catalog catalog-v001.xml -i $(EDIT_PREPROCESSED) --queries $(SPARQL_VALIDATION_QUERIES) -O $(REPORTDIR) endif # ---------------------------------------- @@ -179,25 +204,32 @@ ASSETS = \ $(IMPORT_FILES) \ $(MAIN_FILES) \ $(REPORT_FILES) \ - $(SUBSET_FILES) + $(SUBSET_FILES) \ + $(MAPPING_FILES) RELEASE_ASSETS = \ $(MAIN_FILES) \ $(SUBSET_FILES) .PHONY: all_assets -all_assets: $(ASSETS) +all_assets: $(ASSETS) .PHONY: show_assets show_assets: echo $(ASSETS) du -sh $(ASSETS) +check_rdfxml_%: % + @check-rdfxml $< + +.PHONY: check_rdfxml_assets +check_rdfxml_assets: $(foreach product,$(MAIN_PRODUCTS),check_rdfxml_$(product).owl) + # ---------------------------------------- # Release Management # ---------------------------------------- -CLEANFILES=$(MAIN_FILES) $(SRCMERGED) +CLEANFILES=$(MAIN_FILES) $(SRCMERGED) $(EDIT_PREPROCESSED) # This should be executed by the release manager whenever time comes to make a release. # It will ensure that all assets/files are fresh, and will copy to release folder @@ -218,6 +250,14 @@ prepare_initial_release: all_assets prepare_release_fast: $(MAKE) prepare_release IMP=false PAT=false MIR=false COMP=false +CURRENT_RELEASE=$(ONTBASE).owl + +$(TMPDIR)/current-release.owl: + wget $(CURRENT_RELEASE) -O $@ + +$(REPORTDIR)/release-diff.md: $(ONT).owl $(TMPDIR)/current-release.owl + $(ROBOT) diff --labels true --left $(TMPDIR)/current-release.owl --right $(ONT).owl -f markdown -o $@ + # ------------------------ # Imports: Seeding system # ------------------------ @@ -226,10 +266,13 @@ prepare_release_fast: IMPORTSEED=$(TMPDIR)/seed.txt PRESEED=$(TMPDIR)/pre_seed.txt -$(SRCMERGED): $(SRC) $(OTHER_SRC) +$(SRCMERGED): $(EDIT_PREPROCESSED) $(OTHER_SRC) $(ROBOT) remove --input $< --select imports --trim false \ merge $(patsubst %, -i %, $(OTHER_SRC)) -o $@ +$(EDIT_PREPROCESSED): $(SRC) + $(ROBOT) convert --input $< --format ofn --output $@ + $(PRESEED): $(SRCMERGED) $(ROBOT) query -f csv -i $< --query ../sparql/terms.sparql $@.tmp &&\ cat $@.tmp | sort | uniq > $@ @@ -262,16 +305,15 @@ $(IMPORTDIR)/%_import.owl: $(MIRRORDIR)/%.owl $(IMPORTDIR)/%_terms_combined.txt if [ $(IMP) = true ]; then $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \ extract -T $(IMPORTDIR)/$*_terms_combined.txt --force true --copy-ontology-annotations true --individuals include --method BOT \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ - annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@; fi + $(ANNOTATE_CONVERT_FILE); fi .PRECIOUS: $(IMPORTDIR)/%_import.owl ## Module for ontology: omo $(IMPORTDIR)/omo_import.owl: $(MIRRORDIR)/omo.owl $(IMPORTDIR)/omo_terms_combined.txt - if [ $(IMP) = true ]; then $(ROBOT) merge -i $< \ - query --update ../sparql/preprocess-module.ru --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ - annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@; fi + if [ $(IMP) = true ]; then $(ROBOT) merge -i $< query --update ../sparql/preprocess-module.ru --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ + $(ANNOTATE_CONVERT_FILE); fi .PHONY: refresh-imports @@ -308,7 +350,7 @@ IMP_LARGE=true # Global parameter to bypass handling of large imports .PHONY: mirror-ro .PRECIOUS: $(MIRRORDIR)/ro.owl mirror-ro: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(URIBASE)/ro.owl --create-dirs -o $(MIRRORDIR)/ro.owl --retry 4 --max-time 200 &&\ + if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/ro.owl --create-dirs -o $(MIRRORDIR)/ro.owl --retry 4 --max-time 200 &&\ $(ROBOT) convert -i $(MIRRORDIR)/ro.owl -o $@.tmp.owl &&\ mv $@.tmp.owl $(TMPDIR)/$@.owl; fi @@ -317,7 +359,7 @@ mirror-ro: | $(TMPDIR) .PHONY: mirror-omo .PRECIOUS: $(MIRRORDIR)/omo.owl mirror-omo: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(URIBASE)/omo.owl --create-dirs -o $(MIRRORDIR)/omo.owl --retry 4 --max-time 200 &&\ + if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/omo.owl --create-dirs -o $(MIRRORDIR)/omo.owl --retry 4 --max-time 200 &&\ $(ROBOT) convert -i $(MIRRORDIR)/omo.owl -o $@.tmp.owl &&\ mv $@.tmp.owl $(TMPDIR)/$@.owl; fi @@ -326,7 +368,7 @@ mirror-omo: | $(TMPDIR) .PHONY: mirror-obi .PRECIOUS: $(MIRRORDIR)/obi.owl mirror-obi: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(URIBASE)/obi.owl --create-dirs -o $(MIRRORDIR)/obi.owl --retry 4 --max-time 200 &&\ + if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/obi.owl --create-dirs -o $(MIRRORDIR)/obi.owl --retry 4 --max-time 200 &&\ $(ROBOT) convert -i $(MIRRORDIR)/obi.owl -o $@.tmp.owl &&\ mv $@.tmp.owl $(TMPDIR)/$@.owl; fi @@ -356,7 +398,7 @@ $(SUBSETDIR)/%.obo: $(SUBSETDIR)/%.owl $(SUBSETDIR)/%.json: $(SUBSETDIR)/%.owl $(ROBOT) convert --input $< --check false -f json -o $@.tmp.json &&\ - jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json + mv $@.tmp.json $@ # --------------------------------------------- @@ -367,7 +409,7 @@ SPARQL_EXPORTS_ARGS = $(foreach V,$(SPARQL_EXPORTS),-s $(SPARQLDIR)/$(V).sparql # This combines all into one single command .PHONY: custom_reports -custom_reports: $(SRC) | $(REPORTDIR) +custom_reports: $(EDIT_PREPROCESSED) | $(REPORTDIR) ifneq ($(SPARQL_EXPORTS_ARGS),) $(ROBOT) query -f tsv --use-graphs true -i $< $(SPARQL_EXPORTS_ARGS) endif @@ -382,13 +424,13 @@ $(ONT)-base.obo: $(ONT)-base.owl $(ONT)-base.json: $(ONT)-base.owl $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ convert --check false -f json -o $@.tmp.json &&\ - jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json + mv $@.tmp.json $@ $(ONT)-full.obo: $(ONT)-full.owl $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo $(ONT)-full.json: $(ONT)-full.owl $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ convert --check false -f json -o $@.tmp.json &&\ - jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json + mv $@.tmp.json $@ # ---------------------------------------- # Release artefacts: main release artefacts # ---------------------------------------- @@ -399,43 +441,55 @@ $(ONT).owl: $(ONT)-full.owl $(ONT).obo: $(ONT).owl $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo -$(ONT).json: $(ONT)-full.owl +$(ONT).json: $(ONT).owl $(ROBOT) annotate --input $< --ontology-iri $(URIBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ - convert --check false -f json -o $@.tmp.json && \ - jq -S 'walk(if type == "array" then sort else . end)' $@.tmp.json > $@ && rm $@.tmp.json + convert --check false -f json -o $@.tmp.json &&\ + mv $@.tmp.json $@ # ----------------------------------------------------- # Release artefacts: variants (base, full, simple, etc) # ----------------------------------------------------- SHARED_ROBOT_COMMANDS = -$(ONTOLOGYTERMS): $(SRC) $(OTHER_SRC) - touch $(ONTOLOGYTERMS) && \ - $(ROBOT) query --use-graphs true -f csv -i $< --query ../sparql/geno_terms.sparql $@ +$(ONTOLOGYTERMS): $(SRCMERGED) + $(ROBOT) query -f csv -i $< --query ../sparql/geno_terms.sparql $@ -# base: OTHER sources of interest, such as definitions owl -$(ONT)-base.owl: $(SRC) $(OTHER_SRC) - $(ROBOT) remove --input $< --select imports --trim false \ - merge $(patsubst %, -i %, $(OTHER_SRC)) \ - $(SHARED_ROBOT_COMMANDS) annotate --link-annotation http://purl.org/dc/elements/1.1/type http://purl.obolibrary.org/obo/IAO_8000001 \ +# ROBOT pipeline that merges imports, including components. +ROBOT_RELEASE_IMPORT_MODE=$(ROBOT) merge --input $< + +# ROBOT pipeline that removes imports, then merges components. This is for release artefacts that start from "base" +ROBOT_RELEASE_IMPORT_MODE_BASE=$(ROBOT) remove --input $< --select imports --trim false merge $(patsubst %, -i %, $(OTHER_SRC)) + +# base: All the axioms as they are editted by the editors, excluding reasoning +$(ONT)-base.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) + $(ROBOT_RELEASE_IMPORT_MODE_BASE) \ + $(SHARED_ROBOT_COMMANDS) \ + annotate --link-annotation http://purl.org/dc/elements/1.1/type http://purl.obolibrary.org/obo/IAO_8000001 \ --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ --output $@.tmp.owl && mv $@.tmp.owl $@ - -# Full: The full artefacts with imports merged, reasoned -$(ONT)-full.owl: $(SRC) $(OTHER_SRC) $(IMPORT_FILES) - $(ROBOT) merge --input $< \ +# Full: The full artefacts with imports merged, reasoned. +$(ONT)-full.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) $(IMPORT_FILES) + $(ROBOT_RELEASE_IMPORT_MODE) \ reason --reasoner ELK --equivalent-classes-allowed asserted-only --exclude-tautologies structural \ relax \ reduce -r ELK \ $(SHARED_ROBOT_COMMANDS) annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@ - # ---------------------------------------- # Debugging Tools # ---------------------------------------- -explain_unsat: $(SRC) - $(ROBOT) explain -i $(SRC) -M unsatisfiability --unsatisfiable random:10 --explanation $(TMPDIR)/$@.md +explain_unsat: $(EDIT_PREPROCESSED) + $(ROBOT) explain -i $< -M unsatisfiability --unsatisfiable random:10 --explanation $(TMPDIR)/$@.md + + +RELEASE_ASSETS_AFTER_RELEASE=$(foreach n,$(RELEASE_ASSETS), ../../$(n)) +GHVERSION=v$(VERSION) +.PHONY: public_release +public_release: + @test $(GHVERSION) + ls -alt $(RELEASE_ASSETS_AFTER_RELEASE) + gh release create $(GHVERSION) --title "$(VERSION) Release" --draft $(RELEASE_ASSETS_AFTER_RELEASE) --generate-notes # ---------------------------------------- # General Validation @@ -478,6 +532,7 @@ update_repo: clean: [ -n "$(MIRRORDIR)" ] && [ $(MIRRORDIR) != "." ] && [ $(MIRRORDIR) != "/" ] && [ $(MIRRORDIR) != ".." ] && [ -d ./$(MIRRORDIR) ] && rm -rf ./$(MIRRORDIR)/* [ -n "$(TMPDIR)" ] && [ $(TMPDIR) != "." ] && [ $(TMPDIR) != "/" ] && [ $(TMPDIR) != ".." ] && [ -d ./$(TMPDIR) ] && rm -rf ./$(TMPDIR)/* + [ -n "$(UPDATEREPODIR)" ] && [ $(UPDATEREPODIR) != "." ] && [ $(UPDATEREPODIR) != "/" ] && [ $(UPDATEREPODIR) != ".." ] && [ -d ./$(UPDATEREPODIR) ] && rm -rf ./$(UPDATEREPODIR)/* rm -f $(CLEANFILES) .PHONY: help @@ -499,11 +554,12 @@ Core commands: * odkversion: Show the current version of the ODK Makefile and ROBOT. * clean: Delete all temporary files * help: Print ODK Usage information +* public_release: Uploads the release file to a release management system, such as GitHub releases. Must be configured. Imports management: * refresh-imports: Refresh all imports and mirrors. -* refresh-components: Refresh all components. +* recreate-components: Recreate all components. * no-mirror-refresh-imports: Refresh all imports without downloading mirrors. * refresh-imports-excluding-large: Refresh all imports and mirrors, but skipping the ones labelled as 'is_large'. * refresh-%: Refresh a single import, i.e. refresh-go will refresh 'imports/go_import.owl'. @@ -516,6 +572,7 @@ Editor utilities: * explain_unsat: If you have unsatisfiable classes, this command will create a markdown file (tmp/explain_unsat.md) which will explain all your unsatisfiable classes * validate-all-tsv: Check all your tsv files for possible problems in syntax. Use ALL_TSV_FILES variable to list files * validate-tsv: Check a tsv file for syntactic problems with tsvalid. Use TSV variable to pass filepath, e.g. make TSV=../my.tsv validate-tsv. +* release_diff: Create a diff between the current release and the new release Additional build commands (advanced users) * all: Run the entire pipeline (like prepare_release), but without copying the release files to the release directory. @@ -523,10 +580,12 @@ Additional build commands (advanced users) * custom_reports: Generate all custom sparql reports you have configured in your geno-odk.yaml file. * all_assets: Build all assets * show_assets: Print a list of all assets that would be build by the release pipeline +* all_mappings: Update all SSSOM mapping sets Additional QC commands (advanced users) * robot_reports: Run all configured ROBOT reports * validate_profile_%: Run an OWL2 DL profile validation check, for example validate_profile_geno-edit.owl. +* reason_test: Run a basic reasoning test Examples: * sh run.sh make IMP=false prepare_release diff --git a/src/ontology/run.sh b/src/ontology/run.sh index 9a498dd..2718ff6 100755 --- a/src/ontology/run.sh +++ b/src/ontology/run.sh @@ -8,10 +8,36 @@ # The assumption is that you are working in the src/ontology folder; # we therefore map the whole repo (../..) to a docker volume. # +# To use singularity instead of docker, please issue +# export USE_SINGULARITY= +# before running this script. +# # See README-editors.md for more details. -IMAGE=${IMAGE:-odkfull} -ODK_JAVA_OPTS=-Xmx8G +if [ -f run.sh.conf ]; then + . ./run.sh.conf +fi + +# Look for a GitHub token +if [ -n "$GH_TOKEN" ]; then + : +elif [ -f ../../.github/token.txt ]; then + GH_TOKEN=$(cat ../../.github/token.txt) +elif [ -f $XDG_CONFIG_HOME/ontology-development-kit/github/token ]; then + GH_TOKEN=$(cat $XDG_CONFIG_HOME/ontology-development-kit/github/token) +elif [ -f "$HOME/Library/Application Support/ontology-development-kit/github/token" ]; then + GH_TOKEN=$(cat "$HOME/Library/Application Support/ontology-development-kit/github/token") +fi + +ODK_IMAGE=${ODK_IMAGE:-odkfull} +TAG_IN_IMAGE=$(echo $ODK_IMAGE | awk -F':' '{ print $2 }') +if [ -n "$TAG_IN_IMAGE" ]; then + # Override ODK_TAG env var if IMAGE already includes a tag + ODK_TAG=$TAG_IN_IMAGE + ODK_IMAGE=$(echo $ODK_IMAGE | awk -F':' '{ print $1 }') +fi +ODK_TAG=${ODK_TAG:-latest} +ODK_JAVA_OPTS=${ODK_JAVA_OPTS:--Xmx8G} ODK_DEBUG=${ODK_DEBUG:-no} TIMECMD= @@ -23,7 +49,26 @@ if [ x$ODK_DEBUG = xyes ]; then TIMECMD="/usr/bin/time -f ### DEBUG STATS ###\nElapsed time: %E\nPeak memory: %M kb" fi -docker run -v $PWD/../../:/work -w /work/src/ontology -e ROBOT_JAVA_ARGS="$ODK_JAVA_OPTS" -e JAVA_OPTS="$ODK_JAVA_OPTS" --rm -ti obolibrary/$IMAGE $TIMECMD "$@" +VOLUME_BIND=$PWD/../../:/work +WORK_DIR=/work/src/ontology + +if [ -n "$ODK_BINDS" ]; then + VOLUME_BIND="$VOLUME_BIND,$ODK_BINDS" +fi + +if [ -n "$USE_SINGULARITY" ]; then + + singularity exec --cleanenv $ODK_SINGULARITY_OPTIONS \ + --env "ROBOT_JAVA_ARGS=$ODK_JAVA_OPTS,JAVA_OPTS=$ODK_JAVA_OPTS" \ + --bind $VOLUME_BIND \ + -W $WORK_DIR \ + docker://obolibrary/$ODK_IMAGE:$ODK_TAG $TIMECMD "$@" +else + BIND_OPTIONS="-v $(echo $VOLUME_BIND | sed 's/,/ -v /')" + docker run $ODK_DOCKER_OPTIONS $BIND_OPTIONS -w $WORK_DIR \ + -e ROBOT_JAVA_ARGS="$ODK_JAVA_OPTS" -e JAVA_OPTS="$ODK_JAVA_OPTS" \ + --rm -ti obolibrary/$ODK_IMAGE:$ODK_TAG $TIMECMD "$@" +fi case "$@" in *update_repo*|*release*) diff --git a/src/sparql/edges.sparql b/src/sparql/edges.sparql index 5fec04e..edf658b 100644 --- a/src/sparql/edges.sparql +++ b/src/sparql/edges.sparql @@ -1,4 +1,3 @@ -prefix obo: prefix owl: prefix rdfs: prefix rdf: diff --git a/src/sparql/inject-subset-declaration.ru b/src/sparql/inject-subset-declaration.ru index 18a8430..788313b 100644 --- a/src/sparql/inject-subset-declaration.ru +++ b/src/sparql/inject-subset-declaration.ru @@ -7,5 +7,5 @@ INSERT { ?y rdfs:subPropertyOf ?y . FILTER(isIRI(?y)) - FILTER(regex(str(?y),"^(http://purl.obolibrary.org/obo/)") || regex(str(?y),"^(http://www.ebi.ac.uk/efo/)") || regex(str(?y),"^(https://w3id.org/biolink/)")) + FILTER(regex(str(?y),"^(http://purl.obolibrary.org/obo/)") || regex(str(?y),"^(http://www.ebi.ac.uk/efo/)") || regex(str(?y),"^(https://w3id.org/biolink/)") || regex(str(?y),"^(http://purl.obolibrary.org/obo)")) } \ No newline at end of file diff --git a/src/sparql/inject-synonymtype-declaration.ru b/src/sparql/inject-synonymtype-declaration.ru index 9906089..cad57ed 100644 --- a/src/sparql/inject-synonymtype-declaration.ru +++ b/src/sparql/inject-synonymtype-declaration.ru @@ -7,5 +7,5 @@ INSERT { ?y rdfs:subPropertyOf ?y . FILTER(isIRI(?y)) - FILTER(regex(str(?y),"^(http://purl.obolibrary.org/obo/)") || regex(str(?y),"^(http://www.ebi.ac.uk/efo/)") || regex(str(?y),"^(https://w3id.org/biolink/)")) + FILTER(regex(str(?y),"^(http://purl.obolibrary.org/obo/)") || regex(str(?y),"^(http://www.ebi.ac.uk/efo/)") || regex(str(?y),"^(https://w3id.org/biolink/)") || regex(str(?y),"^(http://purl.obolibrary.org/obo)")) } \ No newline at end of file diff --git a/src/sparql/multiple-replaced_by-violation.sparql b/src/sparql/multiple-replaced_by-violation.sparql new file mode 100644 index 0000000..65bb8de --- /dev/null +++ b/src/sparql/multiple-replaced_by-violation.sparql @@ -0,0 +1,12 @@ +PREFIX replaced_by: + +SELECT DISTINCT ?entity ?property ?value WHERE { + VALUES ?property { + replaced_by: + } + ?entity ?property ?value1 . + ?entity ?property ?value2 . + FILTER(?value1!=?value2) + BIND(CONCAT(str(?value1), CONCAT("|", str(?value2))) as ?value) +} + diff --git a/src/sparql/simple-seed.sparql b/src/sparql/simple-seed.sparql index a96d263..247fbde 100644 --- a/src/sparql/simple-seed.sparql +++ b/src/sparql/simple-seed.sparql @@ -1,5 +1,4 @@ prefix owl: -prefix obo: SELECT DISTINCT ?cls WHERE { diff --git a/src/sparql/subsets-labeled.sparql b/src/sparql/subsets-labeled.sparql index 7bc992f..5ca7e31 100644 --- a/src/sparql/subsets-labeled.sparql +++ b/src/sparql/subsets-labeled.sparql @@ -1,5 +1,4 @@ prefix oio: -prefix def: prefix owl: prefix inSubset: prefix rdfs: