From 3a76dfe5a97c88c28edc0aa6aeffc8d73ae0c985 Mon Sep 17 00:00:00 2001 From: Damien Goutte-Gattat Date: Mon, 13 Jan 2025 17:49:24 +0000 Subject: [PATCH 1/8] Use Make conditionals for IMP=true. Instead of using shell conditionals (`if [ $(IMP) = true ]; then ... ; fi`) in all targets of the imports section, bracket the entire section in a single Make conditional. --- template/src/ontology/Makefile.jinja2 | 65 ++++++++++++++++----------- 1 file changed, 39 insertions(+), 26 deletions(-) diff --git a/template/src/ontology/Makefile.jinja2 b/template/src/ontology/Makefile.jinja2 index ef0b836e..2ce0a31f 100644 --- a/template/src/ontology/Makefile.jinja2 +++ b/template/src/ontology/Makefile.jinja2 @@ -464,23 +464,30 @@ ANNOTATION_PROPERTIES={% for p in project.import_group.annotation_properties %}{ # These live in the imports/ folder # This pattern uses ROBOT to generate an import module +# Set to false to bypass all code to generate/refresh import modules +IMP = true + +# Likewise, but for "large" modules only +IMP_LARGE = true + +ifeq ($(IMP),true) # Should be able to drop this if robot can just take a big messy list of terms as input. $(IMPORTDIR)/%_terms_combined.txt: $(IMPORTSEED) $(IMPORTDIR)/%_terms.txt - if [ $(IMP) = true ]; then cat $^ | grep -v ^# | sort | uniq > $@; fi + cat $^ | grep -v ^# | sort | uniq > $@ {% if project.import_group.use_base_merging %} ALL_TERMS_COMBINED = $(patsubst %, $(IMPORTDIR)/%_terms_combined.txt, $(IMPORTS)) $(IMPORTDIR)/merged_terms_combined.txt: $(ALL_TERMS_COMBINED) - if [ $(IMP) = true ]; then cat $^ | grep -v ^# | sort | uniq > $@; fi + cat $^ | grep -v ^# | sort | uniq > $@ {% if 'slme' == project.import_group.module_type %} $(IMPORTDIR)/merged_import.owl: $(MIRRORDIR)/merged.owl $(IMPORTDIR)/merged_terms_combined.txt - if [ $(IMP) = true ]; then $(ROBOT) merge -i $< \{% if project.import_group.exclude_iri_patterns is not none %} + $(ROBOT) merge -i $< \{% if project.import_group.exclude_iri_patterns is not none %} {% for pattern in project.import_group.exclude_iri_patterns %}remove --select "{{ pattern }}" {% endfor %} \{% endif %} extract -T $(IMPORTDIR)/merged_terms_combined.txt --force true --copy-ontology-annotations true --individuals {{ project.import_group.slme_individuals }} --method {{ project.import_group.module_type_slme }} \ remove $(patsubst %, --term %, $(ANNOTATION_PROPERTIES)) -T $(IMPORTDIR)/merged_terms_combined.txt --select complement --select annotation-properties \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ - $(ANNOTATE_CONVERT_FILE); fi + $(ANNOTATE_CONVERT_FILE) {% else %} $(IMPORTDIR)/merged_import.owl: $(MIRRORDIR)/merged.owl $(IMPORTDIR)/merged_terms_combined.txt echo "ERROR: You have configured your default module type to be {{project.import_group.module_type}}; this behavior needs to be overwritten in {{ project.id }}.Makefile!" && false @@ -489,30 +496,30 @@ $(IMPORTDIR)/merged_import.owl: $(MIRRORDIR)/merged.owl $(IMPORTDIR)/merged_term {% if 'slme' == project.import_group.module_type %} $(IMPORTDIR)/%_import.owl: {% if project.import_group.use_base_merging %}$(MIRRORDIR)/merged.owl{% else %}$(MIRRORDIR)/%.owl{% endif %} $(IMPORTDIR)/%_terms_combined.txt - if [ $(IMP) = true ]; then $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \ + $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \ extract -T $(IMPORTDIR)/$*_terms_combined.txt --force true --copy-ontology-annotations true --individuals {{ project.import_group.slme_individuals }} --method {{ project.import_group.module_type_slme }} \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ - $(ANNOTATE_CONVERT_FILE); fi + $(ANNOTATE_CONVERT_FILE) {% elif 'minimal' == project.import_group.module_type %} $(IMPORTDIR)/%_import.owl: {% if project.import_group.use_base_merging %}$(MIRRORDIR)/merged.owl{% else %}$(MIRRORDIR)/%.owl{% endif %} $(IMPORTDIR)/%_terms_combined.txt - if [ $(IMP) = true ]; then $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \ + $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \ extract -T $(IMPORTDIR)/$*_terms_combined.txt --force true --copy-ontology-annotations true --method BOT \ remove --base-iri $(OBOBASE)"/$(shell echo $* | tr a-z A-Z)_" --axioms external --preserve-structure false --trim false \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ remove $(patsubst %, --term %, $(ANNOTATION_PROPERTIES)) -T $(IMPORTDIR)/$*_terms_combined.txt --select complement --select "classes individuals annotation-properties" \ - $(ANNOTATE_CONVERT_FILE); fi + $(ANNOTATE_CONVERT_FILE) {% elif 'mirror' == project.import_group.module_type %} $(IMPORTDIR)/%_import.owl: {% if project.import_group.use_base_merging %}$(MIRRORDIR)/merged.owl{% else %}$(MIRRORDIR)/%.owl{% endif %} $(IMPORTDIR)/%_terms_combined.txt - if [ $(IMP) = true ]; then $(ROBOT) merge -i $< query --update ../sparql/preprocess-module.ru --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ - $(ANNOTATE_CONVERT_FILE); fi + $(ROBOT) merge -i $< query --update ../sparql/preprocess-module.ru --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ + $(ANNOTATE_CONVERT_FILE) {% elif 'filter' == project.import_group.module_type %} $(IMPORTDIR)/%_import.owl: {% if project.import_group.use_base_merging %}$(MIRRORDIR)/merged.owl{% else %}$(MIRRORDIR)/%.owl{% endif %} $(IMPORTDIR)/%_terms_combined.txt - if [ $(IMP) = true ]; then $(ROBOT) merge -i $< \ + $(ROBOT) merge -i $< \ query --update ../sparql/preprocess-module.ru \ remove --base-iri $(OBOBASE)"/$(shell echo $* | tr a-z A-Z)_" --axioms external --preserve-structure false --trim false \ remove $(patsubst %, --term %, $(ANNOTATION_PROPERTIES)) -T $(IMPORTDIR)/$*_terms_combined.txt --select complement \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ - $(ANNOTATE_CONVERT_FILE); fi + $(ANNOTATE_CONVERT_FILE) {% elif 'custom' == project.import_group.module_type %} $(IMPORTDIR)/%_import.owl: {% if project.import_group.use_base_merging %}$(MIRRORDIR)/merged.owl{% else %}$(MIRRORDIR)/%.owl{% endif %} echo "ERROR: You have configured your default module type to be custom; this behavior needs to be overwritten in {{ project.id }}.Makefile!" && false @@ -523,49 +530,56 @@ $(IMPORTDIR)/%_import.owl: {% if project.import_group.use_base_merging %}$(MIRRO # this can be useful for spot-checks and diffs. # we set strict mode to false by default. For discussion see https://github.com/owlcs/owlapi/issues/752 $(IMPORTDIR)/%_import.obo: $(IMPORTDIR)/%_import.owl - if [ $(IMP) = true ]; then $(ROBOT) convert --check false -i $< -f obo -o $@.tmp.obo && mv $@.tmp.obo $@; fi + $(ROBOT) convert --check false -i $< -f obo -o $@.tmp.obo && mv $@.tmp.obo $@ {% endif -%} {%- for ont in project.import_group.products -%} {% if ont.is_large or ont.module_type is not none %} ## Module for ontology: {{ ont.id }} +{% if ont.is_large -%} +ifeq ($(IMP_LARGE),true) +{% endif -%} {% if (ont.is_large and ('slme' == ont.module_type or (ont.module_type is none and 'slme' == project.import_group.module_type))) or ('fast_slme' == ont.module_type) or (ont.module_type is none and 'fast_slme' == project.import_group.module_type) %} $(IMPORTDIR)/{{ ont.id }}_import.owl: {% if project.import_group.use_base_merging %}$(MIRRORDIR)/merged.owl{% else %}$(MIRRORDIR)/{{ ont.id }}.owl{% endif %} $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt - if [ $(IMP) = true ]{% if ont.is_large %} && [ $(IMP_LARGE) = true ]{% endif %}; then $(ROBOT) extract -i $< -T $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt --force true --copy-ontology-annotations true --individuals {% if ont.module_type is none %}{{ project.import_group.slme_individuals }} --method {{ project.import_group.module_type_slme }}{% else %}{{ ont.slme_individuals }} --method {{ ont.module_type_slme }}{% endif %} \ + $(ROBOT) extract -i $< -T $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt --force true --copy-ontology-annotations true --individuals {% if ont.module_type is none %}{{ project.import_group.slme_individuals }} --method {{ project.import_group.module_type_slme }}{% else %}{{ ont.slme_individuals }} --method {{ ont.module_type_slme }}{% endif %} \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ - $(ANNOTATE_CONVERT_FILE); fi + $(ANNOTATE_CONVERT_FILE) {% elif ('slme' == ont.module_type) or (ont.module_type is none and 'slme' == project.import_group.module_type) %} $(IMPORTDIR)/{{ ont.id }}_import.owl: {% if project.import_group.use_base_merging %}$(MIRRORDIR)/merged.owl{% else %}$(MIRRORDIR)/{{ ont.id }}.owl{% endif %} $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt - if [ $(IMP) = true ]; then $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \ + $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \ extract -T $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt --copy-ontology-annotations true --force true --individuals {% if ont.module_type is none %}{{ project.import_group.slme_individuals }} --method {{ project.import_group.module_type_slme }}{% else %}{{ ont.slme_individuals }} --method {{ ont.module_type_slme }}{% endif %} \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ - $(ANNOTATE_CONVERT_FILE); fi + $(ANNOTATE_CONVERT_FILE) {% elif ('filter' == ont.module_type) or (ont.module_type is none and 'filter' == project.import_group.module_type) %} $(IMPORTDIR)/{{ ont.id }}_import.owl: {% if project.import_group.use_base_merging %}$(MIRRORDIR)/merged.owl{% else %}$(MIRRORDIR)/{{ ont.id }}.owl{% endif %} $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt - if [ $(IMP) = true ]{% if ont.is_large %} && [ $(IMP_LARGE) = true ]{% endif %}; then $(ROBOT) {% if ont.is_large %}extract -i $< -T $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt --copy-ontology-annotations true --force true --method BOT \{% else %}query -i $< --update ../sparql/preprocess-module.ru \ + $(ROBOT) {% if ont.is_large %}extract -i $< -T $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt --copy-ontology-annotations true --force true --method BOT \{% else %}query -i $< --update ../sparql/preprocess-module.ru \ extract -T $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt --copy-ontology-annotations true --force true --method BOT \{% endif %} remove {% if ont.base_iris is not none %}{% for iri in ont.base_iris %}--base-iri {{ iri }} {% endfor %}{% else %}--base-iri $(OBOBASE)/{{ ont.id.upper() }} {% endif %}--axioms external --preserve-structure false --trim false \ remove $(patsubst %, --term %, $(ANNOTATION_PROPERTIES)) {% if ont.annotation_properties is defined %}{% for p in ont.annotation_properties %}--term {{ p }} {% endfor %}{% endif %} -T $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt --select complement \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \ - $(ANNOTATE_CONVERT_FILE); fi + $(ANNOTATE_CONVERT_FILE) {% elif ('mirror' == ont.module_type) or (ont.module_type is none and 'mirror' == project.import_group.module_type) %} $(IMPORTDIR)/{{ ont.id }}_import.owl: {% if project.import_group.use_base_merging %}$(MIRRORDIR)/merged.owl{% else %}$(MIRRORDIR)/{{ ont.id }}.owl{% endif %} $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt - if [ $(IMP) = true ]{% if ont.is_large %} && [ $(IMP_LARGE) = true ]{% endif %}; then $(ROBOT) merge -i $< {% if not ont.is_large %}query --update ../sparql/preprocess-module.ru --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru {% endif %} \ - $(ANNOTATE_CONVERT_FILE); fi + $(ROBOT) merge -i $< {% if not ont.is_large %}query --update ../sparql/preprocess-module.ru --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru {% endif %} \ + $(ANNOTATE_CONVERT_FILE) {% elif ('minimal' == ont.module_type) or (ont.module_type is none and 'minimal' == project.import_group.module_type) %} $(IMPORTDIR)/{{ ont.id }}_import.owl: {% if project.import_group.use_base_merging %}$(MIRRORDIR)/merged.owl{% else %}$(MIRRORDIR)/{{ ont.id }}.owl{% endif %} $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt - if [ $(IMP) = true ]{% if ont.is_large %} && [ $(IMP_LARGE) = true ]{% endif %}; then $(ROBOT) {% if ont.is_large %}extract -i $< -T $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt --copy-ontology-annotations true --force true --method BOT \{% else %}query -i $< --update ../sparql/preprocess-module.ru \ + $(ROBOT) {% if ont.is_large %}extract -i $< -T $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt --copy-ontology-annotations true --force true --method BOT \{% else %}query -i $< --update ../sparql/preprocess-module.ru \ extract -T $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt --copy-ontology-annotations true --force true --method BOT \{% endif %} remove {% if ont.base_iris is not none %}{% for iri in ont.base_iris %}--base-iri {{iri}} {% endfor %}{% else %}--base-iri $(OBOBASE)/{{ ont.id.upper() }} {% endif %}--axioms external --preserve-structure false --trim false \ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru \ remove $(patsubst %, --term %, $(ANNOTATION_PROPERTIES)) {% if ont.annotation_properties is defined %}{% for p in ont.annotation_properties %}--term {{ p }} {% endfor %}{% endif %} -T $(IMPORTDIR)/{{ ont.id }}_terms_combined.txt --select complement --select "classes individuals annotation-properties" \ - $(ANNOTATE_CONVERT_FILE); fi + $(ANNOTATE_CONVERT_FILE) {% elif ('custom' == ont.module_type) or (ont.module_type is none and 'custom' == project.import_group.module_type) %} $(IMPORTDIR)/{{ ont.id }}_import.owl: {% if project.import_group.use_base_merging %}$(MIRRORDIR)/merged.owl{% elif 'no_mirror' != ont.mirror_type %}$(MIRRORDIR)/{{ ont.id }}.owl{% endif %} echo "ERROR: You have configured your default module type to be custom; this behavior needs to be overwritten in {{ project.id }}.Makefile!" && false {%- endif %} +{% if ont.is_large -%} +endif # IMP_LARGE=true +{% endif -%} {%- endif %} {%- endfor %} +endif # IMP=true .PHONY: refresh-imports refresh-imports: @@ -657,9 +671,8 @@ $(COMPONENTSDIR)/{{ component.filename }}:{% if component.mappings is not none % # Mirroring upstream ontologies # ---------------------------------------- -IMP=true # Global parameter to bypass import generation -MIR=true # Global parameter to bypass mirror generation -IMP_LARGE=true # Global parameter to bypass handling of large imports +# Set to false to bypass all code to create/refresh mirrors +MIR=true ifeq ($(strip $(MIR)),true) {% for ont in project.import_group.products %} From 1ac8acef44039eed43d6e4773da4d1ec2520418b Mon Sep 17 00:00:00 2001 From: Damien Goutte-Gattat Date: Mon, 13 Jan 2025 17:52:55 +0000 Subject: [PATCH 2/8] Simplify Make conditionals. Some pre-existing Make conditionals were using the following form: ifeq ($(strip $(VARIABLE),true)) This is because some VARIABLES were declared with a comment on the same line, as in VARIABLE = true # some comment The presence of the comment causes Make to treat the whitespaces on the line as significant, meaning that VARIABLE in the example above has the value " true ", instead of "true". Using the `strip` function in the condition allowed to test against the intended value of the variable, rather than the value effectively set. But it is actually better to fix the declaration of the variable (moving the comment to the line above), once and for all, than to strip whitespaces whenever the variable is tested. This has been done for all affected variables, so the calls to `strip` are no longer necessary. --- template/src/ontology/Makefile.jinja2 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/template/src/ontology/Makefile.jinja2 b/template/src/ontology/Makefile.jinja2 index 2ce0a31f..9d1f8d45 100644 --- a/template/src/ontology/Makefile.jinja2 +++ b/template/src/ontology/Makefile.jinja2 @@ -674,7 +674,7 @@ $(COMPONENTSDIR)/{{ component.filename }}:{% if component.mappings is not none % # Set to false to bypass all code to create/refresh mirrors MIR=true -ifeq ($(strip $(MIR)),true) +ifeq ($(MIR),true) {% for ont in project.import_group.products %} ## ONTOLOGY: {{ ont.id }} @@ -688,7 +688,7 @@ ifeq ($(strip $(MIR)),true) {%- endif -%} {%- if ont.is_large %} -ifeq ($(strip $(IMP_LARGE)),true) +ifeq ($(IMP_LARGE),true) {%- endif %} {%- if ont.mirror_from %} mirror-{{ ont.id }}: | $(TMPDIR) @@ -730,7 +730,7 @@ endif ALL_MIRRORS = $(patsubst %, $(MIRRORDIR)/%.owl, $(IMPORTS)) MERGE_MIRRORS = true -ifeq ($(strip $(MERGE_MIRRORS)),true) +ifeq ($(MERGE_MIRRORS),true) $(MIRRORDIR)/merged.owl: $(ALL_MIRRORS) $(ROBOT) merge $(patsubst %, -i %, $^) {% if project.import_group.annotate_defined_by %}--annotate-defined-by true{% endif %} {% if project.import_group.base_merge_drop_equivalent_class_axioms %}remove --axioms equivalent --preserve-structure false {% endif %}-o $@ .PRECIOUS: $(MIRRORDIR)/merged.owl From 06dbf6249e850f1d543e6e2dfea169d8070c41a6 Mon Sep 17 00:00:00 2001 From: Damien Goutte-Gattat Date: Mon, 13 Jan 2025 18:23:51 +0000 Subject: [PATCH 3/8] Use Make conditionals for components. Apply to targets of the components pipeline the same principle as for mirrors and imports: bracket the entire section within a single Make conditional on the COMP variable. Also, use a Make conditional on the MIR variable to avoid re-generating a component that is downloaded from a remote source when MIR is false. --- template/src/ontology/Makefile.jinja2 | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/template/src/ontology/Makefile.jinja2 b/template/src/ontology/Makefile.jinja2 index 9d1f8d45..4e1f368a 100644 --- a/template/src/ontology/Makefile.jinja2 +++ b/template/src/ontology/Makefile.jinja2 @@ -608,8 +608,10 @@ no-mirror-refresh-%: # ---------------------------------------- # Some ontologies contain external and internal components. A component is included in the ontology in its entirety. -COMP=true # Global parameter to bypass component generation +# Set to false to bypass all code to generate/refresh components +COMP=true +ifeq ($(COMP),true) .PHONY: all_components all_components: $(OTHER_SRC) @@ -635,36 +637,39 @@ $(COMPONENTSDIR)/%.owl: | $(COMPONENTSDIR) {% for component in project.components.products %} {% if component.source is not none %} +ifeq ($(MIR),true) .PHONY: component-download-{{ component.filename }} component-download-{{ component.filename }}: | $(TMPDIR) - if [ $(MIR) = true ] && [ $(COMP) = true ]; then $(ROBOT) merge -I {{ component.source }} \{%- if component.make_base %} + $(ROBOT) merge -I {{ component.source }} \{%- if component.make_base %} remove {% if component.base_iris is not none %}{% for iri in component.base_iris %}--base-iri {{iri}} {% endfor %}{% else %}--base-iri $(OBOBASE)/{{ project.id.upper() }} {% endif %}--axioms external --preserve-structure false --trim false \{% endif %} - annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) -o $(TMPDIR)/$@.owl; fi + annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) -o $(TMPDIR)/$@.owl $(COMPONENTSDIR)/{{ component.filename }}: component-download-{{ component.filename }} - if [ $(COMP) = true ]; then if cmp -s $(TMPDIR)/component-download-{{ component.filename }}.owl $(TMPDIR)/component-download-{{ component.filename }}.tmp.owl ; then echo "Component identical."; \ + if cmp -s $(TMPDIR)/component-download-{{ component.filename }}.owl $(TMPDIR)/component-download-{{ component.filename }}.tmp.owl ; then echo "Component identical."; \ else echo "Component is different, updating." &&\ cp $(TMPDIR)/component-download-{{ component.filename }}.owl $(TMPDIR)/component-download-{{ component.filename }}.tmp.owl &&\ - $(ROBOT) annotate -i $(TMPDIR)/component-download-{{ component.filename }}.owl --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) -o $@; fi; fi + $(ROBOT) annotate -i $(TMPDIR)/component-download-{{ component.filename }}.owl --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) -o $@; fi .PRECIOUS: $(COMPONENTSDIR)/{{ component.filename }} +endif # MIR=true {% elif component.use_template %} $(COMPONENTSDIR)/{{ component.filename }}:{% if component.templates is not none %}{% for template in component.templates %} $(TEMPLATEDIR)/{{ template }}{% endfor %}{% else %} $(TEMPLATEDIR)/{{ component.filename.split('.') | first }}.tsv{% endif %} - if [ $(COMP) = true ] ; then $(ROBOT) template {% if component.template_options is not none %}{{ component.template_options }}{% endif %} \ + $(ROBOT) template {% if component.template_options is not none %}{{ component.template_options }}{% endif %} \ $(patsubst %, --template %, $^) \ - $(ANNOTATE_CONVERT_FILE); fi + $(ANNOTATE_CONVERT_FILE) .PRECIOUS: $(COMPONENTSDIR)/{{ component.filename }} {% elif component.use_mappings %} $(COMPONENTSDIR)/{{ component.filename }}:{% if component.mappings is not none %}{% for mapping in component.mappings %} $(MAPPINGDIR)/{{ mapping }}{% endfor %}{% else %} $(MAPPINGDIR)/{{ component.filename.split('.') | first }}sssom.tsv{% endif %} - if [ $(COMP) = true ] ; then $(SSSOMPY) merge $^ --output $(TMPDIR)/{{ component.filename }}-merged.sssom.tsv &&\ + $(SSSOMPY) merge $^ --output $(TMPDIR)/{{ component.filename }}-merged.sssom.tsv &&\ $(SSSOMPY) convert $(TMPDIR)/{{ component.filename }}-merged.sssom.tsv {{ component.sssom_tool_options }} --output $(TMPDIR)/{{ component.filename }}-converted.sssom.owl &&\ $(ROBOT) merge -i $(TMPDIR)/{{ component.filename }}-converted.sssom.owl \ - $(ANNOTATE_CONVERT_FILE); fi + $(ANNOTATE_CONVERT_FILE) .PRECIOUS: $(COMPONENTSDIR)/{{ component.filename }} {% endif -%} {% endfor -%} +endif # COMP=true {% endif -%} {% if project.import_group is defined -%} # ---------------------------------------- From b7bd7532d590d67a5a91d55a12f858e4f53cd69b Mon Sep 17 00:00:00 2001 From: Damien Goutte-Gattat Date: Mon, 13 Jan 2025 18:26:05 +0000 Subject: [PATCH 4/8] Do not download remote SSSOM mapping set unless MIR is true. The general rule is that when MIR is set to false, nothing should ever be fetched from a remote source -- pipelines should only use locally available resources. So if a mapping set is defined as having a remote source, we bracket the rule that downloads that set in a Make conditional on MIR. --- template/src/ontology/Makefile.jinja2 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/template/src/ontology/Makefile.jinja2 b/template/src/ontology/Makefile.jinja2 index 4e1f368a..dea46339 100644 --- a/template/src/ontology/Makefile.jinja2 +++ b/template/src/ontology/Makefile.jinja2 @@ -966,8 +966,10 @@ $(MAPPINGDIR)/{{ mapping.id }}.sssom.tsv: $(TMPDIR)/{{ mapping.id }}.obographs.j $(MAPPINGDIR)/{{ mapping.id }}.sssom.tsv: test -f $@ {%- elif mapping.maintenance == "mirror" %} +ifeq ($(MIR),true) $(MAPPINGDIR)/{{ mapping.id }}.sssom.tsv: wget "{{ mapping.mirror_from }}" -O $@ +endif {%- endif %} {%- endfor %} {%- endif %} From 81a7e0a1ef11a8e80ba9a2d31d149e316d3e04b6 Mon Sep 17 00:00:00 2001 From: Damien Goutte-Gattat Date: Mon, 13 Jan 2025 18:52:25 +0000 Subject: [PATCH 5/8] Use Make conditionals for the pattern pipeline. Apply to the DOSDP pipeline targets the same principle as for the imports, mirrors, and components pipelines: bracket the entire section in a Make conditional on the PAT variable. --- template/src/ontology/Makefile.jinja2 | 39 +++++++++++++++------------ 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/template/src/ontology/Makefile.jinja2 b/template/src/ontology/Makefile.jinja2 index dea46339..875abb89 100644 --- a/template/src/ontology/Makefile.jinja2 +++ b/template/src/ontology/Makefile.jinja2 @@ -799,7 +799,11 @@ endif # DOSDP Templates/Patterns # ---------------------------------------- -PAT=true # Global parameter to bypass pattern generation +# Set to false to bypass all code to generate/refresh patterns +PAT = true + +ifeq ($(PAT),true) + ALL_PATTERN_FILES=$(wildcard $(PATTERNDIR)/dosdp-patterns/*.yaml) ALL_PATTERN_NAMES=$(strip $(patsubst %.yaml,%, $(notdir $(wildcard $(PATTERNDIR)/dosdp-patterns/*.yaml)))) @@ -837,13 +841,13 @@ update_patterns: download_patterns # It downloads all patterns specified in external.txt .PHONY: download_patterns download_patterns: - if [ $(PAT) = true ]; then rm -f $(TMPDIR)/dosdp/*.yaml.1 || true; fi - if [ $(PAT) = true ] && [ -s $(PATTERNDIR)/dosdp-patterns/external.txt ]; then wget -i $(PATTERNDIR)/dosdp-patterns/external.txt --backups=1 -P $(TMPDIR)/dosdp; fi - if [ $(PAT) = true ]; then rm -f $(TMPDIR)/dosdp/*.yaml.1 || true; fi + rm -f $(TMPDIR)/dosdp/*.yaml.1 || true + if [ -s $(PATTERNDIR)/dosdp-patterns/external.txt ]; then wget -i $(PATTERNDIR)/dosdp-patterns/external.txt --backups=1 -P $(TMPDIR)/dosdp; fi + rm -f $(TMPDIR)/dosdp/*.yaml.1 || true $(PATTERNDIR)/dospd-patterns/%.yml: download_patterns - if [ $(PAT) = true ] ; then if cmp -s $(TMPDIR)/dosdp-$*.yml $@ ; then echo "DOSDP templates identical."; else echo "DOSDP templates different, updating." &&\ - cp $(TMPDIR)/dosdp-$*.yml $@; fi; fi + if cmp -s $(TMPDIR)/dosdp-$*.yml $@ ; then echo "DOSDP templates identical."; else echo "DOSDP templates different, updating." &&\ + cp $(TMPDIR)/dosdp-$*.yml $@; fi # DOSDP Template: Pipelines @@ -858,7 +862,7 @@ DOSDP_TERM_FILES_DEFAULT = $(foreach name, $(DOSDP_PATTERN_NAMES_DEFAULT), $(PAT DOSDP_YAML_FILES_DEFAULT = $(foreach name, $(DOSDP_PATTERN_NAMES_DEFAULT), $(PATTERNDIR)/dosdp-patterns/$(name).yaml) $(DOSDP_OWL_FILES_DEFAULT): $(EDIT_PREPROCESSED) $(DOSDP_TSV_FILES_DEFAULT) $(ALL_PATTERN_FILES) - if [ $(PAT) = true ] && [ "${DOSDP_PATTERN_NAMES_DEFAULT}" ]; then $(DOSDPT) generate --catalog=$(CATALOG) \ + if [ "${DOSDP_PATTERN_NAMES_DEFAULT}" ]; then $(DOSDPT) generate --catalog=$(CATALOG) \ --infile=$(PATTERNDIR)/data/default/ --template=$(PATTERNDIR)/dosdp-patterns --batch-patterns="$(DOSDP_PATTERN_NAMES_DEFAULT)" \ --ontology=$< {{ project.dosdp_tools_options }} --outfile=$(PATTERNDIR)/data/default; fi @@ -885,7 +889,7 @@ DOSDP_TERM_FILES_{{ pipeline.id.upper() }} = $(foreach name, $(DOSDP_PATTERN_NAM DOSDP_YAML_FILES_{{ pipeline.id.upper() }} = $(foreach name, $(DOSDP_PATTERN_NAMES_{{ pipeline.id.upper() }}), $(PATTERNDIR)/dosdp-patterns/$(name).yaml) $(DOSDP_OWL_FILES_{{ pipeline.id.upper() }}): $(EDIT_PREPROCESSED) $(DOSDP_TSV_FILES_{{ pipeline.id.upper() }}) $(ALL_PATTERN_FILES) - if [ $(PAT) = true ] && [ "${DOSDP_PATTERN_NAMES_{{ pipeline.id.upper() }}}" ]; then $(DOSDPT) generate --catalog=$(CATALOG) \ + if [ "${DOSDP_PATTERN_NAMES_{{ pipeline.id.upper() }}}" ]; then $(DOSDPT) generate --catalog=$(CATALOG) \ --infile=$(PATTERNDIR)/data/{{ pipeline.id }} --template=$(PATTERNDIR)/dosdp-patterns/ --batch-patterns="$(DOSDP_PATTERN_NAMES_{{ pipeline.id.upper() }})" \ --ontology=$< {{ pipeline.dosdp_tools_options }} --outfile=$(PATTERNDIR)/data/{{ pipeline.id }}; fi @@ -907,40 +911,41 @@ dosdp-docs-{{ pipeline.id }}: $(EDIT_PREPROCESSED) $(DOSDP_TSV_FILES_{{ pipeline ## Generate template file seeds $(PATTERNDIR)/data/default/%.txt: $(PATTERNDIR)/dosdp-patterns/%.yaml $(PATTERNDIR)/data/default/%.tsv - if [ $(PAT) = true ]; then $(DOSDPT) terms --infile=$(word 2, $^) --template=$< --obo-prefixes=true --outfile=$@; fi + $(DOSDPT) terms --infile=$(word 2, $^) --template=$< --obo-prefixes=true --outfile=$@ {% if project.pattern_pipelines_group is defined -%} {% for pipeline in project.pattern_pipelines_group.products %} $(PATTERNDIR)/data/{{ pipeline.id }}/%.txt: $(PATTERNDIR)/dosdp-patterns/%.yaml $(PATTERNDIR)/data/{{ pipeline.id }}/%.tsv - if [ $(PAT) = true ]; then $(DOSDPT) terms --infile=$(word 2, $^) --template=$< --obo-prefixes=true --outfile=$@; fi + $(DOSDPT) terms --infile=$(word 2, $^) --template=$< --obo-prefixes=true --outfile=$@ {% endfor %} {% if project.pattern_pipelines_group.matches is iterable -%}{% for matches in project.pattern_pipelines_group.matches %} dosdp-matches-{{ matches.id }}: {{ matches.ontology }} $(ALL_PATTERN_FILES) - if [ $(PAT) = true ]; then $(DOSDPT) query --ontology=$< --catalog=$(CATALOG) --reasoner=elk {{ matches.dosdp_tools_options }} \ - --batch-patterns="$(ALL_PATTERN_NAMES)" --template="$(PATTERNDIR)/dosdp-patterns" --outfile="$(PATTERNDIR)/data/{{ matches.id }}/"; fi + $(DOSDPT) query --ontology=$< --catalog=$(CATALOG) --reasoner=elk {{ matches.dosdp_tools_options }} \ + --batch-patterns="$(ALL_PATTERN_NAMES)" --template="$(PATTERNDIR)/dosdp-patterns" --outfile="$(PATTERNDIR)/data/{{ matches.id }}/" {% endfor %}{% endif -%} {% endif -%} # Generating the seed file from all the TSVs. If Pattern generation is deactivated, we still extract a seed from definitions.owl $(TMPDIR)/all_pattern_terms.txt: $(DOSDP_TERM_FILES_DEFAULT) {% if project.pattern_pipelines_group is defined %} {% for pipeline in project.pattern_pipelines_group.products %} $(DOSDP_TERM_FILES_{{ pipeline.id.upper() }}){% endfor %}{% endif %} $(TMPDIR)/pattern_owl_seed.txt - if [ $(PAT) = true ]; then cat $^ | sort | uniq > $@; else $(ROBOT) query --use-graphs true -f csv -i $(PATTERNDIR)/definitions.owl \ - --query ../sparql/terms.sparql $@; fi + cat $^ | sort | uniq > $@; else $(ROBOT) query --use-graphs true -f csv -i $(PATTERNDIR)/definitions.owl \ + --query ../sparql/terms.sparql $@ $(TMPDIR)/pattern_owl_seed.txt: $(PATTERNDIR)/pattern.owl - if [ $(PAT) = true ]; then $(ROBOT) query --use-graphs true -f csv -i $< --query ../sparql/terms.sparql $@; fi + $(ROBOT) query --use-graphs true -f csv -i $< --query ../sparql/terms.sparql $@ # Pattern pipeline main targets: the generated OWL files # Create pattern.owl, an ontology of all DOSDP patterns $(PATTERNDIR)/pattern.owl: $(ALL_PATTERN_FILES) - if [ $(PAT) = true ]; then $(DOSDPT) prototype --obo-prefixes true --template=$(PATTERNDIR)/dosdp-patterns --outfile=$@; fi + $(DOSDPT) prototype --obo-prefixes true --template=$(PATTERNDIR)/dosdp-patterns --outfile=$@ # Generating the individual pattern modules and merging them into definitions.owl $(PATTERNDIR)/definitions.owl: $(DOSDP_OWL_FILES_DEFAULT) {% if project.pattern_pipelines_group is defined %} {% for pipeline in project.pattern_pipelines_group.products %} $(DOSDP_OWL_FILES_{{ pipeline.id.upper() }}){% endfor %}{% endif %} - if [ $(PAT) = true ] && [ "${DOSDP_PATTERN_NAMES_DEFAULT}" ] {% if project.pattern_pipelines_group is defined %} {% for pipeline in project.pattern_pipelines_group.products %} || [ "${DOSDP_PATTERN_NAMES_{{ pipeline.id.upper() }}}" ]{% endfor %}{% endif %} && [ $(PAT) = true ]; then $(ROBOT) merge $(addprefix -i , $^) \ + if [ "${DOSDP_PATTERN_NAMES_DEFAULT}" ] {% if project.pattern_pipelines_group is defined %} {% for pipeline in project.pattern_pipelines_group.products %} || [ "${DOSDP_PATTERN_NAMES_{{ pipeline.id.upper() }}}" ]{% endfor %}{% endif %} && [ $(PAT) = true ]; then $(ROBOT) merge $(addprefix -i , $^) \ annotate --ontology-iri $(ONTBASE)/patterns/definitions.owl --version-iri $(ONTBASE)/releases/$(TODAY)/patterns/definitions.owl \ --annotation owl:versionInfo $(VERSION) -o definitions.ofn && mv definitions.ofn $@; fi +endif # PAT=true {% endif %} {%- if project.use_mappings %} # ---------------------------------------- From d2e38ee4055728592697b42586d23d68c9ab4018 Mon Sep 17 00:00:00 2001 From: Damien Goutte-Gattat Date: Mon, 13 Jan 2025 18:55:23 +0000 Subject: [PATCH 6/8] Remove dummy rules when MIR or IMP_LARGE are false. It shouldn't be necessary to have a rule that does nothing but display a message that says that nothing is being done. --- template/src/ontology/Makefile.jinja2 | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/template/src/ontology/Makefile.jinja2 b/template/src/ontology/Makefile.jinja2 index 875abb89..8f261a5e 100644 --- a/template/src/ontology/Makefile.jinja2 +++ b/template/src/ontology/Makefile.jinja2 @@ -725,9 +725,6 @@ mirror-{{ ont.id }}: | $(TMPDIR) {%- endif %} {%- endif %} {%- if ont.is_large %} -else -mirror-{{ ont.id }}: - @echo "Not refreshing {{ ont.id }} because refreshing large imports is disabled (IMP_LARGE=$(IMP_LARGE))." endif {%- endif %} {% endfor -%} @@ -746,10 +743,7 @@ $(MIRRORDIR)/%.owl: mirror-% | $(MIRRORDIR) if [ -f $(TMPDIR)/mirror-$*.owl ]; then if cmp -s $(TMPDIR)/mirror-$*.owl $@ ; then echo "Mirror identical, ignoring."; else echo "Mirrors different, updating." &&\ cp $(TMPDIR)/mirror-$*.owl $@; fi; fi -else # MIR=false -$(MIRRORDIR)/%.owl: - @echo "Not refreshing $@ because the mirrorring pipeline is disabled (MIR=$(MIR))." -endif +endif # MIR=true {% endif %} {% if project.subset_group is defined %} From a8ee15d4a4f154d31c55f486ee32b26cf1c40e10 Mon Sep 17 00:00:00 2001 From: Damien Goutte-Gattat Date: Mon, 13 Jan 2025 20:33:20 +0000 Subject: [PATCH 7/8] Fix rule to generate the all_pattern_terms seed list. I missed the fact that this rule is supposed to behave differently depending on whether the pattern pipeline is enabled or not. So in this particular instance we need two rules: one in the block where PAT is true, and one in the alternative block. --- template/src/ontology/Makefile.jinja2 | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/template/src/ontology/Makefile.jinja2 b/template/src/ontology/Makefile.jinja2 index 8f261a5e..b6d78e97 100644 --- a/template/src/ontology/Makefile.jinja2 +++ b/template/src/ontology/Makefile.jinja2 @@ -919,10 +919,9 @@ dosdp-matches-{{ matches.id }}: {{ matches.ontology }} $(ALL_PATTERN_FILES) {% endfor %}{% endif -%} {% endif -%} -# Generating the seed file from all the TSVs. If Pattern generation is deactivated, we still extract a seed from definitions.owl +# Generating the seed file from all the TSVs. $(TMPDIR)/all_pattern_terms.txt: $(DOSDP_TERM_FILES_DEFAULT) {% if project.pattern_pipelines_group is defined %} {% for pipeline in project.pattern_pipelines_group.products %} $(DOSDP_TERM_FILES_{{ pipeline.id.upper() }}){% endfor %}{% endif %} $(TMPDIR)/pattern_owl_seed.txt - cat $^ | sort | uniq > $@; else $(ROBOT) query --use-graphs true -f csv -i $(PATTERNDIR)/definitions.owl \ - --query ../sparql/terms.sparql $@ + cat $^ | sort | uniq > $@ $(TMPDIR)/pattern_owl_seed.txt: $(PATTERNDIR)/pattern.owl $(ROBOT) query --use-graphs true -f csv -i $< --query ../sparql/terms.sparql $@ @@ -939,7 +938,11 @@ $(PATTERNDIR)/definitions.owl: $(DOSDP_OWL_FILES_DEFAULT) {% if project.pattern_ annotate --ontology-iri $(ONTBASE)/patterns/definitions.owl --version-iri $(ONTBASE)/releases/$(TODAY)/patterns/definitions.owl \ --annotation owl:versionInfo $(VERSION) -o definitions.ofn && mv definitions.ofn $@; fi -endif # PAT=true +else # PAT=false +# Even if pattern generation is disabled, we still extract a seed from definitions.owl +$(TMPDIR)/all_pattern_terms.txt: $(PATTERNDIR)/definitions.owl + $(ROBOT) query --use-graphs true -f csv -i $< --query $(SPARQLDIR)/terms.sparql $@ +endif {% endif %} {%- if project.use_mappings %} # ---------------------------------------- From 643802caf80e8a1782d3ac2fd2af479a0509ab9a Mon Sep 17 00:00:00 2001 From: Damien Goutte-Gattat Date: Tue, 14 Jan 2025 10:32:03 +0000 Subject: [PATCH 8/8] Regroup all variables to control workflows. Declare all the variables that control whether workflows are enabled (MIR, IMP/IMP_LARGE, COMP, and PAT) in a single block near the beginning of the Makefile. This both ensures that all such variables are always declared before they are used, and makes it easier for users to find them and understand what they are for. --- template/src/ontology/Makefile.jinja2 | 38 ++++++++++++++++----------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/template/src/ontology/Makefile.jinja2 b/template/src/ontology/Makefile.jinja2 index b6d78e97..46d86b22 100644 --- a/template/src/ontology/Makefile.jinja2 +++ b/template/src/ontology/Makefile.jinja2 @@ -110,6 +110,29 @@ ODK_DEBUG_FILE = debug.log SHELL = $(SCRIPTSDIR)/run-command.sh endif +# ---------------------------------------- +# Workflow control +# ---------------------------------------- +# Set any of the following variables to false to completely disable the +# corresponding workflows. + +# Refresh of mirrors (and all remote resources more generally) +MIR = true + +# Re-generation of import modules +IMP = true + +# Re-generation of "large" import modules +# Note that IMP=false takes precedence over IMP_LARGE=true, that is, +# IMP=false disables the generation of all import modules, large or not. +IMP_LARGE = true + +# Re-generation of component modules +COMP = true + +# Re-generation of pattern-derived files +PAT = true + # ---------------------------------------- # Top-level targets # ---------------------------------------- @@ -464,12 +487,6 @@ ANNOTATION_PROPERTIES={% for p in project.import_group.annotation_properties %}{ # These live in the imports/ folder # This pattern uses ROBOT to generate an import module -# Set to false to bypass all code to generate/refresh import modules -IMP = true - -# Likewise, but for "large" modules only -IMP_LARGE = true - ifeq ($(IMP),true) # Should be able to drop this if robot can just take a big messy list of terms as input. $(IMPORTDIR)/%_terms_combined.txt: $(IMPORTSEED) $(IMPORTDIR)/%_terms.txt @@ -608,9 +625,6 @@ no-mirror-refresh-%: # ---------------------------------------- # Some ontologies contain external and internal components. A component is included in the ontology in its entirety. -# Set to false to bypass all code to generate/refresh components -COMP=true - ifeq ($(COMP),true) .PHONY: all_components all_components: $(OTHER_SRC) @@ -676,9 +690,6 @@ endif # COMP=true # Mirroring upstream ontologies # ---------------------------------------- -# Set to false to bypass all code to create/refresh mirrors -MIR=true - ifeq ($(MIR),true) {% for ont in project.import_group.products %} @@ -793,9 +804,6 @@ endif # DOSDP Templates/Patterns # ---------------------------------------- -# Set to false to bypass all code to generate/refresh patterns -PAT = true - ifeq ($(PAT),true) ALL_PATTERN_FILES=$(wildcard $(PATTERNDIR)/dosdp-patterns/*.yaml)