From 2fd97a23b99697188c27283aecb13c526154b29a Mon Sep 17 00:00:00 2001 From: Anita Caron Date: Fri, 22 Mar 2024 09:16:54 +0000 Subject: [PATCH] update odk 1.5 (#3242) Co-authored-by: Anita Caron --- .github/workflows/qc.yml | 11 +- docs/odk-workflows/RepoManagement.md | 2 +- docs/odk-workflows/RepositoryFileStructure.md | 4 +- src/ontology/Makefile | 186 ++++++++++++------ src/ontology/run.sh | 38 +++- src/scripts/run-command.sh | 4 + src/scripts/update_repo.sh | 5 +- 7 files changed, 174 insertions(+), 76 deletions(-) create mode 100755 src/scripts/run-command.sh diff --git a/.github/workflows/qc.yml b/.github/workflows/qc.yml index 7e6162fd41..3d602e27da 100644 --- a/.github/workflows/qc.yml +++ b/.github/workflows/qc.yml @@ -1,4 +1,4 @@ -# This is a basic workflow to help you get started with Actions +# Basic ODK workflow name: CI @@ -23,14 +23,11 @@ jobs: ontology_qc: # The type of runner that the job will run on runs-on: ubuntu-latest - container: obolibrary/odkfull:v1.4.3 - permissions: - contents: read - pull-requests: write + container: obolibrary/odkfull:v1.5 # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Run ontology QC checks id: check @@ -59,7 +56,7 @@ jobs: github_token: ${{secrets.GITHUB_TOKEN}} file: "../../comment.md" identifier: "TAXON_CONSTRAINTS_REASONING" - + - name: Last rows of QC report if: steps.check.outcome == 'failure' run: | diff --git a/docs/odk-workflows/RepoManagement.md b/docs/odk-workflows/RepoManagement.md index 46d39130ad..e337f4392b 100644 --- a/docs/odk-workflows/RepoManagement.md +++ b/docs/odk-workflows/RepoManagement.md @@ -2,7 +2,7 @@ ## Updating your ODK repository -Your ODK repositories configuration is managed in `src/ontology/uberon-odk.yaml`. Once you have made your changes, you can run the following to apply your changes to the repository: +Your ODK repositories configuration is managed in `src/ontology/uberon-odk.yaml`. The [ODK Project Configuration Schema](https://github.com/INCATools/ontology-development-kit/blob/master/docs/project-schema.md) defines all possible parameters that can be used in this config YAML. Once you have made your changes, you can run the following to apply your changes to the repository: ``` diff --git a/docs/odk-workflows/RepositoryFileStructure.md b/docs/odk-workflows/RepositoryFileStructure.md index aaea0d9af9..f5e4d4a8fb 100644 --- a/docs/odk-workflows/RepositoryFileStructure.md +++ b/docs/odk-workflows/RepositoryFileStructure.md @@ -18,7 +18,7 @@ These are the current imports in UBERON | ------ | --- | ---- | | pr | https://raw.githubusercontent.com/obophenotype/pro_obo_slim/master/pr_slim.owl | None | | cl | http://purl.obolibrary.org/obo/cl.owl | None | -| go | http://purl.obolibrary.org/obo/go.owl | None | +| go | http://purl.obolibrary.org/obo/go/go-base.owl | None | | envo | http://purl.obolibrary.org/obo/envo.owl | None | | ro | http://purl.obolibrary.org/obo/ro.owl | None | | bspo | http://purl.obolibrary.org/obo/bspo.owl | None | @@ -26,6 +26,7 @@ These are the current imports in UBERON | pato | http://purl.obolibrary.org/obo/pato.owl | None | | bfo | http://purl.obolibrary.org/obo/bfo.owl | None | | ncbitaxon | http://purl.obolibrary.org/obo/ncbitaxon/subsets/taxslim.owl | None | +| ncbitaxondisjoints | http://purl.obolibrary.org/obo/ncbitaxon/subsets/taxslim-disjoint-over-in-taxon.owl | None | | nbo | http://purl.obolibrary.org/obo/nbo.owl | None | | orcidio | https://w3id.org/orcidio/orcidio.owl | None | | omo | http://purl.obolibrary.org/obo/omo.owl | mirror | @@ -41,6 +42,7 @@ These are the components in UBERON | Filename | URL | | -------- | --- | +| disjoint_union_over.owl | None | | mappings.owl | None | | in-subset.owl | None | | hra_subset.owl | None | diff --git a/src/ontology/Makefile b/src/ontology/Makefile index 79b91ad7d3..e56fc3a44f 100644 --- a/src/ontology/Makefile +++ b/src/ontology/Makefile @@ -1,7 +1,7 @@ # ---------------------------------------- # Makefile for uberon # Generated using ontology-development-kit -# ODK Version: v1.4.3 +# ODK Version: v1.5 # ---------------------------------------- # IMPORTANT: DO NOT EDIT THIS FILE. To override default make goals, use uberon.Makefile instead @@ -9,6 +9,9 @@ # ---------------------------------------- # More information: https://github.com/INCATools/ontology-development-kit/ +# Fingerprint of the configuration file when this Makefile was last generated +CONFIG_HASH= c8fa86ab8f984cb6c9ea20468b4ae4ff2e446501d9c361b8a3993841a38177eb + # ---------------------------------------- # Standard Constants @@ -44,7 +47,7 @@ REPORT_PROFILE_OPTS = --profile $(ROBOT_PROFILE) OBO_FORMAT_OPTIONS = SPARQL_VALIDATION_CHECKS = equivalent-classes owldef-self-reference illegal-annotation-property taxon-range orcid-contributor obsolete-replaced_by xrefs-mesh-pattern label-synonym-polysemy id-format SPARQL_EXPORTS = basic-report -ODK_VERSION_MAKEFILE = v1.4.3 +ODK_VERSION_MAKEFILE = v1.5 TODAY ?= $(shell date +%Y-%m-%d) OBODATE ?= $(shell date +'%d:%m:%Y %H:%M') @@ -64,6 +67,11 @@ FORMATS = $(sort owl obo json owl) FORMATS_INCL_TSV = $(sort $(FORMATS) tsv) RELEASE_ARTEFACTS = $(sort $(ONT)-base $(ONT)-full $(ONT)-simple $(ONT)-basic collected-metazoan composite-metazoan composite-metazoan-basic composite-vertebrate composite-vertebrate-basic common-anatomy ) +ifeq ($(ODK_DEBUG),yes) +ODK_DEBUG_FILE = debug.log +SHELL = $(SCRIPTSDIR)/run-command.sh +endif + # ---------------------------------------- # Top-level targets # ---------------------------------------- @@ -74,12 +82,16 @@ RELEASE_ARTEFACTS = $(sort $(ONT)-base $(ONT)-full $(ONT)-simple $(ONT)-basic co all: all_odk .PHONY: all_odk -all_odk: odkversion test all_assets +all_odk: odkversion config_check test custom_reports all_assets .PHONY: test test: odkversion dosdp_validation reason_test sparql_test robot_reports $(REPORTDIR)/validate_profile_owl2dl_$(ONT).owl.txt echo "Finished running all tests successfully." +.PHONY: test +test_fast: + $(MAKE_FAST) test + .PHONY: release_diff release_diff: $(REPORTDIR)/release-diff.md @@ -90,13 +102,52 @@ reason_test: $(EDIT_PREPROCESSED) .PHONY: odkversion odkversion: - echo "ODK Makefile version: $(ODK_VERSION_MAKEFILE) (this is the version of the ODK with which this Makefile was generated, \ - not the version of the ODK you are running)" &&\ - echo "ROBOT version (ODK): " && $(ROBOT) --version + @echo "ODK Makefile $(ODK_VERSION_MAKEFILE)" + @odk-info --tools +.PHONY: config_check +config_check: + @if [ "$$(sha256sum $(ONT)-odk.yaml | cut -c1-64)" = "$(CONFIG_HASH)" ]; then \ + echo "Repository is up-to-date." ; else \ + echo "Your ODK configuration has changed since this Makefile was generated. You may need to run 'make update_repo'." ; fi + $(TMPDIR) $(REPORTDIR) $(MIRRORDIR) $(IMPORTDIR) $(COMPONENTSDIR) $(SUBSETDIR): mkdir -p $@ +# ---------------------------------------- +# ODK-managed ROBOT plugins +# ---------------------------------------- + +# Make sure ROBOT knows where to find plugins +export ROBOT_PLUGINS_DIRECTORY=$(TMPDIR)/plugins + +# Override this rule in uberon.Makefile to install custom plugins +.PHONY: custom_robot_plugins +custom_robot_plugins: + + +.PHONY: extra_robot_plugins +extra_robot_plugins: + + +# Install all ROBOT plugins to the runtime plugins directory +.PHONY: all_robot_plugins +all_robot_plugins: $(foreach plugin,$(notdir $(wildcard /tools/robot-plugins/*.jar)),$(ROBOT_PLUGINS_DIRECTORY)/$(plugin)) \ + $(foreach plugin,$(notdir $(wildcard ../../plugins/*.jar)),$(ROBOT_PLUGINS_DIRECTORY)/$(plugin)) \ + custom_robot_plugins extra_robot_plugins \ + +# Default rule to install plugins +$(ROBOT_PLUGINS_DIRECTORY)/%.jar: + @mkdir -p $(ROBOT_PLUGINS_DIRECTORY) + @if [ -f ../../plugins/$*.jar ]; then \ + ln ../../plugins/$*.jar $@ ; \ + elif [ -f /tools/robot-plugins/$*.jar ]; then \ + cp /tools/robot-plugins/$*.jar $@ ; \ + fi + +# Specific rules for supplementary plugins defined in configuration + + # ---------------------------------------- # Release assets # ---------------------------------------- @@ -186,10 +237,10 @@ validate_profile_%: $(REPORTDIR)/validate_profile_owl2dl_%.txt SPARQL_VALIDATION_QUERIES = $(foreach V,$(SPARQL_VALIDATION_CHECKS),$(SPARQLDIR)/$(V)-violation.sparql) -sparql_test: $(EDIT_PREPROCESSED) catalog-v001.xml | $(REPORTDIR) +sparql_test: $(EDIT_PREPROCESSED) | $(REPORTDIR) ifneq ($(SPARQL_VALIDATION_QUERIES),) - $(ROBOT) verify --catalog catalog-v001.xml -i $(EDIT_PREPROCESSED) --queries $(SPARQL_VALIDATION_QUERIES) -O $(REPORTDIR) + $(ROBOT) verify -i $(EDIT_PREPROCESSED) --queries $(SPARQL_VALIDATION_QUERIES) -O $(REPORTDIR) endif # ---------------------------------------- @@ -202,6 +253,14 @@ $(REPORTDIR)/$(SRC)-obo-report.tsv: $(SRCMERGED) | $(REPORTDIR) $(REPORTDIR)/%-obo-report.tsv: % | $(REPORTDIR) $(ROBOT) report -i $< $(REPORT_LABEL) $(REPORT_PROFILE_OPTS) --fail-on $(REPORT_FAIL_ON) --print 5 -o $@ +check_for_robot_updates: + @cut -f2 "/tools/robot_report_profile.txt" | sort > $(TMPDIR)/sorted_tsv2.txt + @cut -f2 "$(ROBOT_PROFILE)" | sort > $(TMPDIR)/sorted_tsv1.txt + @comm -23 $(TMPDIR)/sorted_tsv2.txt $(TMPDIR)/sorted_tsv1.txt > $(TMPDIR)/missing.txt + @echo "Missing tests:" + @cat $(TMPDIR)/missing.txt + @rm $(TMPDIR)/sorted_tsv1.txt $(TMPDIR)/sorted_tsv2.txt $(TMPDIR)/missing.txt $(TMPDIR)/report_profile_robot.txt + # ---------------------------------------- # Release assets # ---------------------------------------- @@ -219,7 +278,7 @@ RELEASE_ASSETS = \ $(SUBSET_FILES) .PHONY: all_assets -all_assets: $(ASSETS) +all_assets: $(ASSETS) check_rdfxml_assets .PHONY: show_assets show_assets: @@ -227,7 +286,7 @@ show_assets: du -sh $(ASSETS) check_rdfxml_%: % - @check-rdfxml $< + @check-rdfxml $< .PHONY: check_rdfxml_assets check_rdfxml_assets: $(foreach product,$(MAIN_PRODUCTS),check_rdfxml_$(product).owl) @@ -325,6 +384,7 @@ $(IMPORTDIR)/merged_import.owl: $(MIRRORDIR)/merged.owl $(IMPORTDIR)/merged_term if [ $(IMP) = true ]; then $(ROBOT) merge -i $< \ remove --select "" remove --select "" remove --select "" remove --select "" remove --select "" remove --select "" \ extract -T $(IMPORTDIR)/merged_terms_combined.txt --force true --copy-ontology-annotations true --individuals exclude --method BOT \ + remove $(patsubst %, --term %, $(ANNOTATION_PROPERTIES)) -T $(IMPORTDIR)/merged_terms_combined.txt --select complement --select annotation-properties \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ $(ANNOTATE_CONVERT_FILE); fi @@ -393,9 +453,9 @@ recreate-%: no-mirror-recreate-%: $(MAKE) COMP=true IMP=false IMP_LARGE=false MIR=false PAT=true $(COMPONENTSDIR)/$*.owl -B -$(COMPONENTSDIR)/%: | $(COMPONENTSDIR) - touch $@ -.PRECIOUS: $(COMPONENTSDIR)/% +$(COMPONENTSDIR)/%.owl: | $(COMPONENTSDIR) + test -f $@ || touch $@ +.PRECIOUS: $(COMPONENTSDIR)/%.owl @@ -411,136 +471,132 @@ IMP=true # Global parameter to bypass import generation MIR=true # Global parameter to bypass mirror generation IMP_LARGE=true # Global parameter to bypass handling of large imports +ifeq ($(strip $(MIR)),true) ## ONTOLOGY: pr .PHONY: mirror-pr .PRECIOUS: $(MIRRORDIR)/pr.owl mirror-pr: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then $(ROBOT) convert -I https://raw.githubusercontent.com/obophenotype/pro_obo_slim/master/pr_slim.owl -o $@.tmp.owl && \ - $(ROBOT) remove -i $@.tmp.owl --base-iri $(URIBASE)/PR --axioms external --preserve-structure false --trim false -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + $(ROBOT) remove -I https://raw.githubusercontent.com/obophenotype/pro_obo_slim/master/pr_slim.owl --base-iri $(OBOBASE)/PR --axioms external --preserve-structure false --trim false -o $(TMPDIR)/$@.owl ## ONTOLOGY: cl .PHONY: mirror-cl .PRECIOUS: $(MIRRORDIR)/cl.owl mirror-cl: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/cl/cl-base.owl --create-dirs -o $(MIRRORDIR)/cl.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/cl.owl -o $@.tmp.owl && mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/cl/cl-base.owl --create-dirs -o $(TMPDIR)/cl-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/cl-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: go .PHONY: mirror-go .PRECIOUS: $(MIRRORDIR)/go.owl mirror-go: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then $(ROBOT) convert -I http://purl.obolibrary.org/obo/go/go-base.owl -o $@.tmp.owl && \ - $(ROBOT) remove -i $@.tmp.owl --base-iri http://purl.obolibrary.org/obo/GO_ --base-iri http://purl.obolibrary.org/obo/GOREL_ --base-iri http://purl.obolibrary.org/obo/GOCHE_ --axioms external --preserve-structure false --trim false -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + $(ROBOT) remove -I http://purl.obolibrary.org/obo/go/go-base.owl --base-iri http://purl.obolibrary.org/obo/GO_ --base-iri http://purl.obolibrary.org/obo/GOREL_ --base-iri http://purl.obolibrary.org/obo/GOCHE_ --axioms external --preserve-structure false --trim false -o $(TMPDIR)/$@.owl ## ONTOLOGY: envo .PHONY: mirror-envo .PRECIOUS: $(MIRRORDIR)/envo.owl mirror-envo: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/envo/envo-base.owl --create-dirs -o $(MIRRORDIR)/envo.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/envo.owl -o $@.tmp.owl && mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/envo/envo-base.owl --create-dirs -o $(TMPDIR)/envo-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/envo-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: ro .PHONY: mirror-ro .PRECIOUS: $(MIRRORDIR)/ro.owl mirror-ro: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/ro/ro-base.owl --create-dirs -o $(MIRRORDIR)/ro.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/ro.owl -o $@.tmp.owl && mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/ro/ro-base.owl --create-dirs -o $(TMPDIR)/ro-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/ro-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: bspo .PHONY: mirror-bspo .PRECIOUS: $(MIRRORDIR)/bspo.owl mirror-bspo: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/bspo/bspo-base.owl --create-dirs -o $(MIRRORDIR)/bspo.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/bspo.owl -o $@.tmp.owl && mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/bspo/bspo-base.owl --create-dirs -o $(TMPDIR)/bspo-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/bspo-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: chebi .PHONY: mirror-chebi .PRECIOUS: $(MIRRORDIR)/chebi.owl mirror-chebi: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then $(ROBOT) convert -I https://raw.githubusercontent.com/obophenotype/chebi_obo_slim/main/chebi_slim.owl -o $@.tmp.owl && \ - $(ROBOT) remove -i $@.tmp.owl --base-iri $(URIBASE)/CHEBI --axioms external --preserve-structure false --trim false -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + $(ROBOT) remove -I https://raw.githubusercontent.com/obophenotype/chebi_obo_slim/main/chebi_slim.owl --base-iri $(OBOBASE)/CHEBI --axioms external --preserve-structure false --trim false -o $(TMPDIR)/$@.owl ## ONTOLOGY: pato .PHONY: mirror-pato .PRECIOUS: $(MIRRORDIR)/pato.owl mirror-pato: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/pato/pato-base.owl --create-dirs -o $(MIRRORDIR)/pato.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/pato.owl -o $@.tmp.owl && mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/pato/pato-base.owl --create-dirs -o $(TMPDIR)/pato-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/pato-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: bfo .PHONY: mirror-bfo .PRECIOUS: $(MIRRORDIR)/bfo.owl mirror-bfo: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/bfo.owl --create-dirs -o $(MIRRORDIR)/bfo.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/bfo.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/bfo.owl --create-dirs -o $(TMPDIR)/bfo-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/bfo-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: ncbitaxon .PHONY: mirror-ncbitaxon .PRECIOUS: $(MIRRORDIR)/ncbitaxon.owl mirror-ncbitaxon: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then $(ROBOT) convert -I http://purl.obolibrary.org/obo/ncbitaxon/subsets/taxslim.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + $(ROBOT) convert -I http://purl.obolibrary.org/obo/ncbitaxon/subsets/taxslim.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: ncbitaxondisjoints .PHONY: mirror-ncbitaxondisjoints .PRECIOUS: $(MIRRORDIR)/ncbitaxondisjoints.owl mirror-ncbitaxondisjoints: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then $(ROBOT) convert -I http://purl.obolibrary.org/obo/ncbitaxon/subsets/taxslim-disjoint-over-in-taxon.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + $(ROBOT) convert -I http://purl.obolibrary.org/obo/ncbitaxon/subsets/taxslim-disjoint-over-in-taxon.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: nbo .PHONY: mirror-nbo .PRECIOUS: $(MIRRORDIR)/nbo.owl mirror-nbo: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/nbo/nbo-base.owl --create-dirs -o $(MIRRORDIR)/nbo.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/nbo.owl -o $@.tmp.owl && mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/nbo/nbo-base.owl --create-dirs -o $(TMPDIR)/nbo-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/nbo-download.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: orcidio .PHONY: mirror-orcidio .PRECIOUS: $(MIRRORDIR)/orcidio.owl mirror-orcidio: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then $(ROBOT) convert -I https://w3id.org/orcidio/orcidio.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + $(ROBOT) convert -I https://w3id.org/orcidio/orcidio.owl -o $(TMPDIR)/$@.owl ## ONTOLOGY: omo .PHONY: mirror-omo .PRECIOUS: $(MIRRORDIR)/omo.owl mirror-omo: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/omo.owl --create-dirs -o $(MIRRORDIR)/omo.owl --retry 4 --max-time 200 &&\ - $(ROBOT) convert -i $(MIRRORDIR)/omo.owl -o $@.tmp.owl &&\ - mv $@.tmp.owl $(TMPDIR)/$@.owl; fi + curl -L $(OBOBASE)/omo.owl --create-dirs -o $(TMPDIR)/omo-download.owl --retry 4 --max-time 200 && \ + $(ROBOT) convert -i $(TMPDIR)/omo-download.owl -o $(TMPDIR)/$@.owl ALL_MIRRORS = $(patsubst %, $(MIRRORDIR)/%.owl, $(IMPORTS)) MERGE_MIRRORS = true +ifeq ($(strip $(MERGE_MIRRORS)),true) $(MIRRORDIR)/merged.owl: $(ALL_MIRRORS) - if [ $(IMP) = true ] && [ $(MERGE_MIRRORS) = true ]; then $(ROBOT) merge $(patsubst %, -i %, $^) -o $@; fi + $(ROBOT) merge $(patsubst %, -i %, $^) -o $@ .PRECIOUS: $(MIRRORDIR)/merged.owl +endif $(MIRRORDIR)/%.owl: mirror-% | $(MIRRORDIR) - if [ $(IMP) = true ] && [ $(MIR) = true ] && [ -f $(TMPDIR)/mirror-$*.owl ]; then if cmp -s $(TMPDIR)/mirror-$*.owl $@ ; then echo "Mirror identical, ignoring."; else echo "Mirrors different, updating." &&\ + if [ -f $(TMPDIR)/mirror-$*.owl ]; then if cmp -s $(TMPDIR)/mirror-$*.owl $@ ; then echo "Mirror identical, ignoring."; else echo "Mirrors different, updating." &&\ cp $(TMPDIR)/mirror-$*.owl $@; fi; fi +else # MIR=false +$(MIRRORDIR)/%.owl: + @echo "Not refreshing $@ because the mirrorring pipeline is disabled (MIR=$(MIR))." +endif @@ -638,7 +694,7 @@ DOSDP_TERM_FILES_DEFAULT = $(patsubst %.tsv, $(PATTERNDIR)/data/default/%.txt, $ DOSDP_PATTERN_NAMES_DEFAULT = $(strip $(patsubst %.tsv,%, $(notdir $(wildcard $(PATTERNDIR)/data/default/*.tsv)))) $(DOSDP_OWL_FILES_DEFAULT): $(EDIT_PREPROCESSED) $(DOSDP_TSV_FILES_DEFAULT) $(ALL_PATTERN_FILES) - if [ $(PAT) = true ] && [ "${DOSDP_PATTERN_NAMES_DEFAULT}" ]; then $(DOSDPT) generate --catalog=catalog-v001.xml \ + if [ $(PAT) = true ] && [ "${DOSDP_PATTERN_NAMES_DEFAULT}" ]; then $(DOSDPT) generate --catalog=$(CATALOG) \ --infile=$(PATTERNDIR)/data/default/ --template=$(PATTERNDIR)/dosdp-patterns --batch-patterns="$(DOSDP_PATTERN_NAMES_DEFAULT)" \ --ontology=$< --obo-prefixes=true --outfile=$(PATTERNDIR)/data/default; fi @@ -765,9 +821,13 @@ ROBOT_RELEASE_IMPORT_MODE=$(ROBOT) merge --input $< # ROBOT pipeline that removes imports, then merges components. This is for release artefacts that start from "base" ROBOT_RELEASE_IMPORT_MODE_BASE=$(ROBOT) remove --input $< --select imports --trim false merge $(patsubst %, -i %, $(OTHER_SRC)) -# base: All the axioms as they are editted by the editors, excluding reasoning -$(ONT)-base.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) - $(ROBOT_RELEASE_IMPORT_MODE_BASE) \ +# base: A version of the ontology that does not include any externally imported axioms. +$(ONT)-base.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) $(IMPORT_FILES) + $(ROBOT_RELEASE_IMPORT_MODE) \ + reason --reasoner ELK --equivalent-classes-allowed asserted-only --exclude-tautologies structural --annotate-inferred-axioms False \ + relax \ + reduce -r ELK \ + remove --base-iri http://purl.obolibrary.org/obo/UBERON_ --base-iri http://purl.obolibrary.org/obo/uberon# --base-iri http://purl.obolibrary.org/obo/UBPROP_ --base-iri http://purl.obolibrary.org/obo/uberon/core# --axioms external --preserve-structure false --trim false \ $(SHARED_ROBOT_COMMANDS) \ annotate --link-annotation http://purl.org/dc/elements/1.1/type http://purl.obolibrary.org/obo/IAO_8000001 \ --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \ @@ -784,7 +844,7 @@ $(ONT)-full.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) $(IMPORT_FILES) # remove --select imports --trim false $(ONT)-simple.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) $(SIMPLESEED) $(IMPORT_FILES) $(ROBOT_RELEASE_IMPORT_MODE) \ - reason --reasoner ELK --equivalent-classes-allowed asserted-only --exclude-tautologies structural \ + reason --reasoner ELK --equivalent-classes-allowed asserted-only --exclude-tautologies structural --annotate-inferred-axioms False \ relax \ remove --axioms equivalent \ relax \ @@ -798,7 +858,7 @@ $(ONT)-simple.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) $(SIMPLESEED) $(IMPORT_FILE # removes any axioms that contains one of the ops that not in the whitelist file $(ONT)-basic.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) $(SIMPLESEED) $(KEEPRELATIONS) $(IMPORT_FILES) $(ROBOT_RELEASE_IMPORT_MODE) \ - reason --reasoner ELK --equivalent-classes-allowed asserted-only --exclude-tautologies structural \ + reason --reasoner ELK --equivalent-classes-allowed asserted-only --exclude-tautologies structural --annotate-inferred-axioms False \ relax \ remove --axioms equivalent \ remove --axioms disjoint \ @@ -884,14 +944,17 @@ update_repo: update_docs: mkdocs gh-deploy --config-file ../../mkdocs.yaml -# Note to future generations: prepending ./ is a safety measure to ensure that -# the environment does not malicously set `CLEANFILES` to `\`. +# Note to future generations: computing the real path relative to the +# current directory is a way to ensure we only clean up directories that +# are located below the current directory, regardless of the contents of +# the *DIR variables. .PHONY: clean clean: $(MAKE) pattern_clean - [ -n "$(MIRRORDIR)" ] && [ $(MIRRORDIR) != "." ] && [ $(MIRRORDIR) != "/" ] && [ $(MIRRORDIR) != ".." ] && [ -d ./$(MIRRORDIR) ] && rm -rf ./$(MIRRORDIR)/* - [ -n "$(TMPDIR)" ] && [ $(TMPDIR) != "." ] && [ $(TMPDIR) != "/" ] && [ $(TMPDIR) != ".." ] && [ -d ./$(TMPDIR) ] && rm -rf ./$(TMPDIR)/* - [ -n "$(UPDATEREPODIR)" ] && [ $(UPDATEREPODIR) != "." ] && [ $(UPDATEREPODIR) != "/" ] && [ $(UPDATEREPODIR) != ".." ] && [ -d ./$(UPDATEREPODIR) ] && rm -rf ./$(UPDATEREPODIR)/* + for dir in $(MIRRORDIR) $(TMPDIR) $(UPDATEREPODIR) ; do \ + reldir=$$(realpath --relative-to=$$(pwd) $$dir) ; \ + case $$reldir in .*|"") ;; *) rm -rf $$reldir/* ;; esac \ + done rm -f $(CLEANFILES) .PHONY: help @@ -910,6 +973,7 @@ Core commands: * prepare_release_fast: Run the entire release pipeline without refreshing imports, recreating components or recompiling patterns. * update_repo: Update the ODK repository setup using the config file uberon-odk.yaml * test: Running all validation tests +* test_fast: Runs the test suite, but without updating imports or components * odkversion: Show the current version of the ODK Makefile and ROBOT. * clean: Delete all temporary files * help: Print ODK Usage information @@ -936,8 +1000,8 @@ DOSDP templates Editor utilities: * validate_idranges: Make sure your ID ranges file is formatted correctly -* normalize_src: Load and safe your uberon-edit file after you to make sure its serialised correctly -* normalize_obo_src: Load and safe your uberon-edit.obo file after you to merge duplicate annotation assertions +* normalize_src: Load and save your uberon-edit file after you to make sure its serialised correctly +* normalize_obo_src: Load and save your uberon-edit.obo file after you to merge duplicate annotation assertions * explain_unsat: If you have unsatisfiable classes, this command will create a markdown file (tmp/explain_unsat.md) which will explain all your unsatisfiable classes * validate-all-tsv: Check all your tsv files for possible problems in syntax. Use ALL_TSV_FILES variable to list files * validate-tsv: Check a tsv file for syntactic problems with tsvalid. Use TSV variable to pass filepath, e.g. make TSV=../my.tsv validate-tsv. diff --git a/src/ontology/run.sh b/src/ontology/run.sh index 0de0d7ee1f..28d557cff6 100755 --- a/src/ontology/run.sh +++ b/src/ontology/run.sh @@ -14,6 +14,8 @@ # # See README-editors.md for more details. +set -e + if [ -f run.sh.conf ]; then . ./run.sh.conf fi @@ -29,6 +31,22 @@ elif [ -f "$HOME/Library/Application Support/ontology-development-kit/github/tok GH_TOKEN=$(cat "$HOME/Library/Application Support/ontology-development-kit/github/token") fi +# SSH agent socket +# On macOS, we cannot use $SSH_AUTH_SOCK directly, +# we need to use a "magic" socket instead. +case "$(uname)" in +Darwin) + ODK_SSH_AUTH_SOCKET=/run/host-services/ssh-auth.sock + ;; +*) + ODK_SSH_AUTH_SOCKET=$SSH_AUTH_SOCK + ;; +esac +ODK_SSH_BIND= +if [ -n "$ODK_SSH_AUTH_SOCKET" ]; then + ODK_SSH_BIND=",$ODK_SSH_AUTH_SOCKET:/run/host-services/ssh-auth.sock" +fi + ODK_IMAGE=${ODK_IMAGE:-odkfull} TAG_IN_IMAGE=$(echo $ODK_IMAGE | awk -F':' '{ print $2 }') if [ -n "$TAG_IN_IMAGE" ]; then @@ -40,16 +58,28 @@ ODK_TAG=${ODK_TAG:-latest} ODK_JAVA_OPTS=${ODK_JAVA_OPTS:--Xmx20G} ODK_DEBUG=${ODK_DEBUG:-no} +ODK_USER_ID=${ODK_USER_ID:-$(id -u)} +ODK_GROUP_ID=${ODK_GROUP_ID:-$(id -g)} + +# Convert OWLAPI_* environment variables to the OWLAPI as Java options +# See http://owlcs.github.io/owlapi/apidocs_4/org/semanticweb/owlapi/model/parameters/ConfigurationOptions.html +# for a list of allowed options +OWLAPI_OPTIONS_NAMESPACE=org.semanticweb.owlapi.model.parameters.ConfigurationOptions +for owlapi_var in $(env | sed -n s/^OWLAPI_//p) ; do + ODK_JAVA_OPTS="$ODK_JAVA_OPTS -D$OWLAPI_OPTIONS_NAMESPACE.${owlapi_var%=*}=${owlapi_var#*=}" +done + TIMECMD= if [ x$ODK_DEBUG = xyes ]; then # If you wish to change the format string, take care of using # non-breaking spaces (U+00A0) instead of normal spaces, to # prevent the shell from tokenizing the format string. - echo "Running ${IMAGE} with ${ODK_JAVA_OPTS} of memory for ROBOT and Java-based pipeline steps." + echo "Running obolibrary/${ODK_IMAGE}:${ODK_TAG} with '${ODK_JAVA_OPTS}' as options for ROBOT and other Java-based pipeline steps." TIMECMD="/usr/bin/time -f ### DEBUG STATS ###\nElapsed time: %E\nPeak memory: %M kb" fi +rm -f tmp/debug.log -VOLUME_BIND=$PWD/../../:/work +VOLUME_BIND=$PWD/../../:/work$ODK_SSH_BIND WORK_DIR=/work/src/ontology if [ -n "$ODK_BINDS" ]; then @@ -59,14 +89,14 @@ fi if [ -n "$USE_SINGULARITY" ]; then singularity exec --cleanenv $ODK_SINGULARITY_OPTIONS \ - --env "ROBOT_JAVA_ARGS=$ODK_JAVA_OPTS,JAVA_OPTS=$ODK_JAVA_OPTS" \ + --env "ROBOT_JAVA_ARGS=$ODK_JAVA_OPTS,JAVA_OPTS=$ODK_JAVA_OPTS,SSH_AUTH_SOCK=/run/host-services/ssh-auth.sock,ODK_USER_ID=$ODK_USER_ID,ODK_GROUP_ID=$ODK_GROUP_ID,ODK_DEBUG=$ODK_DEBUG" \ --bind $VOLUME_BIND \ -W $WORK_DIR \ docker://obolibrary/$ODK_IMAGE:$ODK_TAG $TIMECMD "$@" else BIND_OPTIONS="-v $(echo $VOLUME_BIND | sed 's/,/ -v /')" docker run $ODK_DOCKER_OPTIONS $BIND_OPTIONS -w $WORK_DIR \ - -e ROBOT_JAVA_ARGS="$ODK_JAVA_OPTS" -e JAVA_OPTS="$ODK_JAVA_OPTS" \ + -e ROBOT_JAVA_ARGS="$ODK_JAVA_OPTS" -e JAVA_OPTS="$ODK_JAVA_OPTS" -e SSH_AUTH_SOCK=/run/host-services/ssh-auth.sock -e ODK_USER_ID=$ODK_USER_ID -e ODK_GROUP_ID=$ODK_GROUP_ID -e ODK_DEBUG=$ODK_DEBUG \ --rm -ti obolibrary/$ODK_IMAGE:$ODK_TAG $TIMECMD "$@" fi diff --git a/src/scripts/run-command.sh b/src/scripts/run-command.sh new file mode 100755 index 0000000000..45d431d10a --- /dev/null +++ b/src/scripts/run-command.sh @@ -0,0 +1,4 @@ +#!/bin/sh +ODK_DEBUG_FILE=${ODK_DEBUG_FILE:-debug.log} +echo "Command: sh $@" >> $ODK_DEBUG_FILE +/usr/bin/time -a -o $ODK_DEBUG_FILE -f "Elapsed time: %E\nPeak memory: %M kb" /bin/sh "$@" diff --git a/src/scripts/update_repo.sh b/src/scripts/update_repo.sh index 66d8466749..0b9fde2072 100644 --- a/src/scripts/update_repo.sh +++ b/src/scripts/update_repo.sh @@ -26,7 +26,8 @@ cp target/$OID/src/ontology/run.sh $SRCDIR/ontology/ cp -r target/$OID/src/sparql/* $SRCDIR/sparql/ mkdir -p $ROOTDIR/.github mkdir -p $ROOTDIR/.github/workflows -cp -n target/$OID/.github/workflows/qc.yml $ROOTDIR/.github/workflows/qc.yml +cp target/$OID/.github/workflows/qc.yml $ROOTDIR/.github/workflows/qc.yml + cp target/$OID/.github/workflows/docs.yml $ROOTDIR/.github/workflows/docs.yml @@ -35,5 +36,5 @@ cp target/$OID/.github/workflows/docs.yml $ROOTDIR/.github/workflows/docs.yml cp -n target/$OID/mkdocs.yaml $ROOTDIR/ echo "WARNING: These files should be manually migrated: mkdocs.yaml, .gitignore, src/ontology/catalog.xml (if you added a new import or component)" -echo "WARNING: Your QC workflows have not been updated automatically. Please update the ODK version number(s) in .github/workflows/qc.yml." + echo "Ontology repository update successfully completed." \ No newline at end of file