From 3c7274e7e3e9a24fe7bed702e04d0efb68ac554e Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Tue, 16 Jan 2024 14:43:54 -0800 Subject: [PATCH 01/18] update sphinx referencing --- src/hdmf/utils.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/src/hdmf/utils.py b/src/hdmf/utils.py index b3c8129b7..3f0ec14e9 100644 --- a/src/hdmf/utils.py +++ b/src/hdmf/utils.py @@ -702,8 +702,10 @@ def to_str(argtype): module = argtype.__module__ name = argtype.__name__ - if module.startswith("h5py") or module.startswith("pandas") or module.startswith("builtins"): + if module.startswith("builtins"): return ":py:class:`~{name}`".format(name=name) + elif module.startswith("h5py") or module.startswith('pandas'): + return ":py:class:`~{module}.{name}`".format(name=name, module=module.split('.')[0]) else: return ":py:class:`~{module}.{name}`".format(name=name, module=module) return argtype @@ -712,18 +714,23 @@ def __sphinx_arg(arg): fmt = dict() fmt['name'] = arg.get('name') fmt['doc'] = arg.get('doc') - if isinstance(arg['type'], tuple) or isinstance(arg['type'], list): - fmt['type'] = " or ".join(map(to_str, arg['type'])) - else: - fmt['type'] = to_str(arg['type']) + fmt['type'] = type_to_str(arg['type']) return arg_fmt.format(**fmt) + def type_to_str(type_arg, string=" or "): + if isinstance(type_arg, tuple) or isinstance(type_arg, list): + type_str = f"{string}".join(type_to_str(t, string=', ') for t in type_arg) + else: + type_str = to_str(type_arg) + return type_str + sig = "%s(%s)\n\n" % (func.__name__, ", ".join(map(__sig_arg, validator))) desc = func.__doc__.strip() if func.__doc__ is not None else "" sig += docstring_fmt.format(description=desc, args="\n".join(map(__sphinx_arg, validator))) if not (ret_fmt is None or returns is None or rtype is None): - sig += ret_fmt.format(returns=returns, rtype=rtype) + rtype_fmt = type_to_str(rtype) + sig += ret_fmt.format(returns=returns, rtype=rtype_fmt) return sig From ee881c0159309664e47143bfbb6b5b6b061247b7 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Tue, 16 Jan 2024 15:11:32 -0800 Subject: [PATCH 02/18] update storage spec ref --- src/hdmf/spec/spec.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hdmf/spec/spec.py b/src/hdmf/spec/spec.py index 9a9d876c3..b0df0ab70 100644 --- a/src/hdmf/spec/spec.py +++ b/src/hdmf/spec/spec.py @@ -210,7 +210,7 @@ def is_region(self): {'name': 'dims', 'type': (list, tuple), 'doc': 'the dimensions of this dataset', 'default': None}, {'name': 'required', 'type': bool, 'doc': 'whether or not this attribute is required. ignored when "value" is specified', 'default': True}, - {'name': 'parent', 'type': 'BaseStorageSpec', 'doc': 'the parent of this spec', 'default': None}, + {'name': 'parent', 'type': '~hdmf.spec.spec.BaseStorageSpec', 'doc': 'the parent of this spec', 'default': None}, {'name': 'value', 'type': None, 'doc': 'a constant value for this attribute', 'default': None}, {'name': 'default_value', 'type': None, 'doc': 'a default value for this attribute', 'default': None} ] From e3260fd47258e68298e61cd619603dc72d52ff9a Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Wed, 17 Jan 2024 11:01:13 -0800 Subject: [PATCH 03/18] add test for docstring generation --- tests/unit/utils_test/test_docval.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/unit/utils_test/test_docval.py b/tests/unit/utils_test/test_docval.py index d0ea934f7..50c487182 100644 --- a/tests/unit/utils_test/test_docval.py +++ b/tests/unit/utils_test/test_docval.py @@ -827,6 +827,17 @@ def test_enum_forbidden_values(self): def method(self, **kwargs): pass + def test_nested_return_types(self): + """Test that having nested tuple rtype creates valid sphinx references""" + @docval({'name': 'arg1', 'type': int, 'doc': 'an arg'}, + returns='output', rtype=(list, (list, bool), (list, 'Test'))) + def method(self, **kwargs): + return [] + + doc = ('method(arg1)\n\n\n\nArgs:\n arg1 (:py:class:`~int`): an arg\n\nReturns:\n ' + ':py:class:`~list` or :py:class:`~list`, :py:class:`~bool` or :py:class:`~list`, Test: output') + self.assertEqual(method.__doc__, doc) + class TestDocValidatorChain(TestCase): From b3be2d4cb4cf0609e1f939c29e49053edfd570e6 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Wed, 17 Jan 2024 12:00:29 -0800 Subject: [PATCH 04/18] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f9bd98450..996c5e8e2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ - Added `add_ref_termset`, updated helper methods for `HERD`, revised `add_ref` to support validations prior to populating the tables and added `add_ref_container`. @mavaylon1 [#968](https://github.com/hdmf-dev/hdmf/pull/968) - Use `stacklevel` in most warnings. @rly [#1027](https://github.com/hdmf-dev/hdmf/pull/1027) +- Fixed broken links in documentation. @stephprince [#1031](https://github.com/hdmf-dev/hdmf/pull/1031) ### Minor Improvements - Updated `__gather_columns` to ignore the order of bases when generating columns from the super class. @mavaylon1 [#991](https://github.com/hdmf-dev/hdmf/pull/991) From e648f9ca2491f02a03a1d92069ebf071219230b9 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Wed, 17 Jan 2024 14:10:24 -0800 Subject: [PATCH 05/18] add zarr and mpi mappings --- docs/source/conf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index 58fa3f2ba..c0dd523b4 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -76,6 +76,8 @@ "matplotlib": ("https://matplotlib.org/stable/", None), "h5py": ("https://docs.h5py.org/en/latest/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), + "zarr": ("https://zarr.readthedocs.io/en/stable/", None), + "mpi": ("https://mpi4py.readthedocs.io/en/stable/", None) } # these links cannot be checked in github actions From 7cb854d34f279ba1dd1e9257012dad3f78d6e0a2 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Wed, 17 Jan 2024 14:11:47 -0800 Subject: [PATCH 06/18] update tutorials --- docs/gallery/plot_external_resources.py | 4 ++-- docs/gallery/plot_generic_data_chunk_tutorial.py | 2 +- docs/gallery/plot_term_set.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/gallery/plot_external_resources.py b/docs/gallery/plot_external_resources.py index 5bf8dd5d8..4229a9043 100644 --- a/docs/gallery/plot_external_resources.py +++ b/docs/gallery/plot_external_resources.py @@ -153,8 +153,8 @@ def __init__(self, **kwargs): # ------------------------------------------------------ # It is important to keep in mind that when adding and :py:class:`~hdmf.common.resources.Object` to # the :py:class:~hdmf.common.resources.ObjectTable, the parent object identified by -# :py:class:`~hdmf.common.resources.Object.object_id` must be the closest parent to the target object -# (i.e., :py:class:`~hdmf.common.resources.Object.relative_path` must be the shortest possible path and +# `Object.object_id` must be the closest parent to the target object +# (i.e., `Object.relative_path` must be the shortest possible path and # as such cannot contain any objects with a ``data_type`` and associated ``object_id``). # # A common example would be with the :py:class:`~hdmf.common.table.DynamicTable` class, which holds diff --git a/docs/gallery/plot_generic_data_chunk_tutorial.py b/docs/gallery/plot_generic_data_chunk_tutorial.py index 96d55c8a4..9f791ef76 100644 --- a/docs/gallery/plot_generic_data_chunk_tutorial.py +++ b/docs/gallery/plot_generic_data_chunk_tutorial.py @@ -120,7 +120,7 @@ def _get_dtype(self): # HDMF acts as a block of data for writing data to dataset, and spans multiple HDF5 chunks to improve performance. # This is achieved by avoiding repeat # updates to the same `Chunk` in the HDF5 file, :py:class:`~hdmf.data_utils.DataChunk` objects for write -# should align with `Chunks` in the HDF5 file, i.e., the :py:class:`~hdmf.data_utils.DataChunk.selection` +# should align with `Chunks` in the HDF5 file, i.e., the `DataChunk.selection` # should fully cover one or more `Chunks` in the HDF5 file to avoid repeat updates to the same # `Chunks` in the HDF5 file. This is what the `buffer` of the :py:class`~hdmf.data_utils.GenericDataChunkIterator` # does, which upon each iteration returns a single diff --git a/docs/gallery/plot_term_set.py b/docs/gallery/plot_term_set.py index 71053bba5..c1f7c7257 100644 --- a/docs/gallery/plot_term_set.py +++ b/docs/gallery/plot_term_set.py @@ -107,7 +107,7 @@ ###################################################### # Viewing TermSet values # ---------------------------------------------------- -# :py:class:`~hdmf.term_set.TermSet` has methods to retrieve terms. The :py:func:`~hdmf.term_set.TermSet:view_set` +# :py:class:`~hdmf.term_set.TermSet` has methods to retrieve terms. The :py:func:`~hdmf.term_set.TermSet.view_set` # method will return a dictionary of all the terms and the corresponding information for each term. # Users can index specific terms from the :py:class:`~hdmf.term_set.TermSet`. LinkML runtime will need to be installed. # You can do so by first running ``pip install linkml-runtime``. From e61207d95b97c8846d1193ebd15f7f6063aa4a17 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 10:52:53 -0800 Subject: [PATCH 07/18] fix italics in tutorials to code blocks --- docs/gallery/plot_external_resources.py | 4 ++-- docs/gallery/plot_generic_data_chunk_tutorial.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/gallery/plot_external_resources.py b/docs/gallery/plot_external_resources.py index 4229a9043..36e84b357 100644 --- a/docs/gallery/plot_external_resources.py +++ b/docs/gallery/plot_external_resources.py @@ -153,8 +153,8 @@ def __init__(self, **kwargs): # ------------------------------------------------------ # It is important to keep in mind that when adding and :py:class:`~hdmf.common.resources.Object` to # the :py:class:~hdmf.common.resources.ObjectTable, the parent object identified by -# `Object.object_id` must be the closest parent to the target object -# (i.e., `Object.relative_path` must be the shortest possible path and +# ``Object.object_id`` must be the closest parent to the target object +# (i.e., ``Object.relative_path`` must be the shortest possible path and # as such cannot contain any objects with a ``data_type`` and associated ``object_id``). # # A common example would be with the :py:class:`~hdmf.common.table.DynamicTable` class, which holds diff --git a/docs/gallery/plot_generic_data_chunk_tutorial.py b/docs/gallery/plot_generic_data_chunk_tutorial.py index 9f791ef76..e55ab0aa0 100644 --- a/docs/gallery/plot_generic_data_chunk_tutorial.py +++ b/docs/gallery/plot_generic_data_chunk_tutorial.py @@ -122,7 +122,7 @@ def _get_dtype(self): # updates to the same `Chunk` in the HDF5 file, :py:class:`~hdmf.data_utils.DataChunk` objects for write # should align with `Chunks` in the HDF5 file, i.e., the `DataChunk.selection` # should fully cover one or more `Chunks` in the HDF5 file to avoid repeat updates to the same -# `Chunks` in the HDF5 file. This is what the `buffer` of the :py:class`~hdmf.data_utils.GenericDataChunkIterator` +# ``Chunks`` in the HDF5 file. This is what the `buffer` of the :py:class`~hdmf.data_utils.GenericDataChunkIterator` # does, which upon each iteration returns a single # :py:class:`~hdmf.data_utils.DataChunk` object (by default > 1 GB) that perfectly spans many HDF5 chunks # (by default < 1 MB) to help reduce the number of small I/O operations From 9a3c7d8aefd574c670ed79670cfacb5bcfc7a86d Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 11:12:46 -0800 Subject: [PATCH 08/18] fix broken links and references --- .../source/overview_software_architecture.rst | 12 +++++----- docs/source/validation.rst | 2 +- src/hdmf/backends/hdf5/h5_utils.py | 4 ++-- src/hdmf/backends/hdf5/h5tools.py | 6 ++--- src/hdmf/build/builders.py | 2 +- src/hdmf/common/alignedtable.py | 2 +- src/hdmf/common/resources.py | 2 +- src/hdmf/data_utils.py | 23 ++++++++----------- src/hdmf/spec/write.py | 4 ++-- src/hdmf/testing/testcase.py | 4 ++-- src/hdmf/utils.py | 14 +++++------ src/hdmf/validate/validator.py | 2 +- 12 files changed, 36 insertions(+), 41 deletions(-) diff --git a/docs/source/overview_software_architecture.rst b/docs/source/overview_software_architecture.rst index 05b808ff2..973a01b2f 100644 --- a/docs/source/overview_software_architecture.rst +++ b/docs/source/overview_software_architecture.rst @@ -81,19 +81,19 @@ Spec * Interface for writing extensions or custom specification * There are several main specification classes: - * :py:class:`~hdmf.spec.AttributeSpec` - specification for metadata - * :py:class:`~hdmf.spec.GroupSpec` - specification for a collection of + * :py:class:`~hdmf.spec.spec.AttributeSpec` - specification for metadata + * :py:class:`~hdmf.spec.spec.GroupSpec` - specification for a collection of objects (i.e. subgroups, datasets, link) - * :py:class:`~hdmf.spec.DatasetSpec` - specification for dataset (like + * :py:class:`~hdmf.spec.spec.DatasetSpec` - specification for dataset (like and n-dimensional array). Specifies data type, dimensions, etc. - * :py:class:`~hdmf.spec.LinkSpec` - specification for link (like a POSIX + * :py:class:`~hdmf.spec.spec.LinkSpec` - specification for link (like a POSIX soft link) * :py:class:`~hdmf.spec.spec.RefSpec` - specification for references (References are like links, but stored as data) - * :py:class:`~hdmf.spec.DtypeSpec` - specification for compound data + * :py:class:`~hdmf.spec.spec.DtypeSpec` - specification for compound data types. Used to build complex data type specification, e.g., to define tables (used only in :py:class:`~hdmf.spec.spec.DatasetSpec` and - correspondingly :py:class:`~hdmf.spec.DatasetSpec`) + correspondingly :py:class:`~hdmf.spec.spec.DatasetSpec`) * **Main Modules:** :py:class:`hdmf.spec` diff --git a/docs/source/validation.rst b/docs/source/validation.rst index c4034b87b..cd5168cb5 100644 --- a/docs/source/validation.rst +++ b/docs/source/validation.rst @@ -3,7 +3,7 @@ Validating HDMF Data ==================== -Validation of NWB files is available through :py:mod:`~pynwb`. See the `PyNWB documentation +Validation of NWB files is available through ``pynwb``. See the `PyNWB documentation `_ for more information. -------- diff --git a/src/hdmf/backends/hdf5/h5_utils.py b/src/hdmf/backends/hdf5/h5_utils.py index 85be494c2..be3368f2b 100644 --- a/src/hdmf/backends/hdf5/h5_utils.py +++ b/src/hdmf/backends/hdf5/h5_utils.py @@ -77,7 +77,7 @@ def append(self, dataset, data): Append a value to the queue :param dataset: The dataset where the DataChunkIterator is written to - :type dataset: Dataset + :type dataset: :py:class:`~h5py.Dataset` :param data: DataChunkIterator with the data to be written :type data: AbstractDataChunkIterator """ @@ -604,7 +604,7 @@ def filter_available(filter, allow_plugin_filters): :param filter: String with the name of the filter, e.g., gzip, szip etc. int with the registered filter ID, e.g. 307 - :type filter: String, int + :type filter: str, int :param allow_plugin_filters: bool indicating whether the given filter can be dynamically loaded :return: bool indicating whether the given filter is available """ diff --git a/src/hdmf/backends/hdf5/h5tools.py b/src/hdmf/backends/hdf5/h5tools.py index 643d9a7be..3cf2e715b 100644 --- a/src/hdmf/backends/hdf5/h5tools.py +++ b/src/hdmf/backends/hdf5/h5tools.py @@ -484,7 +484,7 @@ def read(self, **kwargs): raise UnsupportedOperation("Cannot read data from file %s in mode '%s'. There are no values." % (self.source, self.__mode)) - @docval(returns='a GroupBuilder representing the data object', rtype='GroupBuilder') + @docval(returns='a GroupBuilder representing the data object', rtype=GroupBuilder) def read_builder(self): """ Read data and return the GroupBuilder representing it. @@ -978,7 +978,7 @@ def _filler(): 'default': True}, {'name': 'export_source', 'type': str, 'doc': 'The source of the builders when exporting', 'default': None}, - returns='the Group that was created', rtype='Group') + returns='the Group that was created', rtype=Group) def write_group(self, **kwargs): parent, builder = popargs('parent', 'builder', kwargs) self.logger.debug("Writing GroupBuilder '%s' to parent group '%s'" % (builder.name, parent.name)) @@ -1033,7 +1033,7 @@ def __get_path(self, builder): {'name': 'builder', 'type': LinkBuilder, 'doc': 'the LinkBuilder to write'}, {'name': 'export_source', 'type': str, 'doc': 'The source of the builders when exporting', 'default': None}, - returns='the Link that was created', rtype='Link') + returns='the Link that was created', rtype=(SoftLink, ExternalLink)) def write_link(self, **kwargs): parent, builder, export_source = getargs('parent', 'builder', 'export_source', kwargs) self.logger.debug("Writing LinkBuilder '%s' to parent group '%s'" % (builder.name, parent.name)) diff --git a/src/hdmf/build/builders.py b/src/hdmf/build/builders.py index 05a71f80c..97cf10469 100644 --- a/src/hdmf/build/builders.py +++ b/src/hdmf/build/builders.py @@ -415,7 +415,7 @@ def builder(self): class RegionBuilder(ReferenceBuilder): - @docval({'name': 'region', 'type': (slice, tuple, list, RegionReference), + @docval({'name': 'region', 'type': (slice, tuple, list, 'RegionReference'), 'doc': 'The region, i.e. slice or indices, into the target dataset.'}, {'name': 'builder', 'type': DatasetBuilder, 'doc': 'The dataset this region reference applies to.'}) def __init__(self, **kwargs): diff --git a/src/hdmf/common/alignedtable.py b/src/hdmf/common/alignedtable.py index 2cc20bbdc..f8126690a 100644 --- a/src/hdmf/common/alignedtable.py +++ b/src/hdmf/common/alignedtable.py @@ -29,7 +29,7 @@ class AlignedDynamicTable(DynamicTable): @docval(*get_docval(DynamicTable.__init__), {'name': 'category_tables', 'type': list, - 'doc': 'List of DynamicTables to be added to the container. NOTE: Only regular ' + 'doc': 'List of DynamicTables to be added to the container. NOTE - Only regular ' 'DynamicTables are allowed. Using AlignedDynamicTable as a category for ' 'AlignedDynamicTable is currently not supported.', 'default': None}, {'name': 'categories', 'type': 'array_data', diff --git a/src/hdmf/common/resources.py b/src/hdmf/common/resources.py index f7f08b944..29d61ea79 100644 --- a/src/hdmf/common/resources.py +++ b/src/hdmf/common/resources.py @@ -897,7 +897,7 @@ def get_object_entities(self, **kwargs): @docval({'name': 'use_categories', 'type': bool, 'default': False, 'doc': 'Use a multi-index on the columns to indicate which category each column belongs to.'}, - rtype=pd.DataFrame, returns='A DataFrame with all data merged into a flat, denormalized table.') + rtype='pandas.DataFrame', returns='A DataFrame with all data merged into a flat, denormalized table.') def to_dataframe(self, **kwargs): """ Convert the data from the keys, resources, entities, objects, and object_keys tables diff --git a/src/hdmf/data_utils.py b/src/hdmf/data_utils.py index f1eee655f..2df66106d 100644 --- a/src/hdmf/data_utils.py +++ b/src/hdmf/data_utils.py @@ -36,7 +36,7 @@ def extend_data(data, arg): """Add all the elements of the iterable arg to the end of data. :param data: The array to extend - :type data: list, DataIO, np.ndarray, h5py.Dataset + :type data: list, DataIO, numpy.ndarray, h5py.Dataset """ if isinstance(data, (list, DataIO)): data.extend(arg) @@ -383,15 +383,12 @@ def _get_data(self, selection: Tuple[slice]) -> np.ndarray: The developer of a new implementation of the GenericDataChunkIterator must ensure the data is actually loaded into memory, and not simply mapped. - :param selection: Tuple of slices, each indicating the selection indexed with respect to maxshape for that axis - :type selection: tuple of slices + :param selection: tuple of slices, each indicating the selection indexed with respect to maxshape for that axis. + Each axis of tuple is a slice of the full shape from which to pull data into the buffer. + :type selection: Tuple[slice] :returns: Array of data specified by selection - :rtype: np.ndarray - Parameters - ---------- - selection : tuple of slices - Each axis of tuple is a slice of the full shape from which to pull data into the buffer. + :rtype: numpy.ndarray """ raise NotImplementedError("The data fetching method has not been built for this DataChunkIterator!") @@ -615,7 +612,7 @@ def __next__(self): .. tip:: - :py:attr:`numpy.s_` provides a convenient way to generate index tuples using standard array slicing. This + :py:obj:`numpy.s_` provides a convenient way to generate index tuples using standard array slicing. This is often useful to define the DataChunk.selection of the current chunk :returns: DataChunk object with the data and selection of the current chunk @@ -800,17 +797,17 @@ def assertEqualShape(data1, Ensure that the shape of data1 and data2 match along the given dimensions :param data1: The first input array - :type data1: List, Tuple, np.ndarray, DataChunkIterator etc. + :type data1: List, Tuple, numpy.ndarray, DataChunkIterator :param data2: The second input array - :type data2: List, Tuple, np.ndarray, DataChunkIterator etc. + :type data2: List, Tuple, numpy.ndarray, DataChunkIterator :param name1: Optional string with the name of data1 :param name2: Optional string with the name of data2 :param axes1: The dimensions of data1 that should be matched to the dimensions of data2. Set to None to compare all axes in order. - :type axes1: int, Tuple of ints, List of ints, or None + :type axes1: int, Tuple(int), List(int), None :param axes2: The dimensions of data2 that should be matched to the dimensions of data1. Must have the same length as axes1. Set to None to compare all axes in order. - :type axes1: int, Tuple of ints, List of ints, or None + :type axes1: int, Tuple(int), List(int), None :param ignore_undetermined: Boolean indicating whether non-matching unlimited dimensions should be ignored, i.e., if two dimension don't match because we can't determine the shape of either one, then should we ignore that case or treat it as no match diff --git a/src/hdmf/spec/write.py b/src/hdmf/spec/write.py index 799ffb88a..d397c9f26 100644 --- a/src/hdmf/spec/write.py +++ b/src/hdmf/spec/write.py @@ -240,9 +240,9 @@ def export_spec(ns_builder, new_data_types, output_dir): the given data type specs. Args: - ns_builder - NamespaceBuilder instance used to build the + ns_builder: NamespaceBuilder instance used to build the namespace and extension - new_data_types - Iterable of specs that represent new data types + new_data_types: Iterable of specs that represent new data types to be added """ diff --git a/src/hdmf/testing/testcase.py b/src/hdmf/testing/testcase.py index f36ecc186..798df6fe4 100644 --- a/src/hdmf/testing/testcase.py +++ b/src/hdmf/testing/testcase.py @@ -239,8 +239,8 @@ def assertBuilderEqual(self, :type check_path: bool :param check_source: Check that the builder.source values are equal :type check_source: bool - :param message: Custom message to add when any asserts as part of this assert are failing - :type message: str or None (default=None) + :param message: Custom message to add when any asserts as part of this assert are failing (default=None) + :type message: str or None """ self.assertTrue(isinstance(builder1, Builder), message) self.assertTrue(isinstance(builder2, Builder), message) diff --git a/src/hdmf/utils.py b/src/hdmf/utils.py index 3f0ec14e9..12acebbc8 100644 --- a/src/hdmf/utils.py +++ b/src/hdmf/utils.py @@ -72,10 +72,10 @@ def check_type(value, argtype, allow_none=False): The difference between this function and :py:func:`isinstance` is that it allows specifying a type as a string. Furthermore, strings allow for specifying more general - types, such as a simple numeric type (i.e. ``argtype``="num"). + types, such as a simple numeric type (i.e. ``argtype="num"``). Args: - value (any): the value to check + value (Any): the value to check argtype (type, str): the type to check for allow_none (bool): whether or not to allow None as a valid value @@ -568,7 +568,7 @@ def foo(self, **kwargs): :param rtype: String describing the data type of the return values :param is_method: True if this is decorating an instance or class method, False otherwise (Default=True) :param enforce_shape: Enforce the dimensions of input arrays (Default=True) - :param validator: :py:func:`dict` objects specifying the method parameters + :param validator: :py:class:`dict` objects specifying the method parameters :param allow_extra: Allow extra arguments (Default=False) :param allow_positional: Allow positional arguments (Default=True) :param options: additional options for documenting and validating method parameters @@ -668,8 +668,6 @@ def func_call(*args, **kwargs): return func(**pargs) _rtype = rtype - if isinstance(rtype, type): - _rtype = rtype.__name__ docstring = __googledoc(func, _docval[__docval_args_loc], returns=returns, rtype=_rtype) docval_idx = {a['name']: a for a in _docval[__docval_args_loc]} # cache a name-indexed dictionary of args setattr(func_call, '__doc__', docstring) @@ -859,7 +857,7 @@ def post_init(cls, func): An example use of this method would be to define a classmethod that gathers any defined methods or attributes after the base Python type construction (i.e. after - :py:func:`type` has been called) + :py:obj:`type` has been called) ''' setattr(func, cls.__postinit, True) return classmethod(func) @@ -887,8 +885,8 @@ def get_data_shape(data, strict_no_data_load=False): to enforce that this does not happen, at the cost that we may not be able to determine the shape of the array. - :param data: Array for which we should determine the shape. - :type data: List, numpy.ndarray, DataChunkIterator, any object that support __len__ or .shape. + :param data: Array for which we should determine the shape. Can be any object that supports __len__ or .shape. + :type data: List, numpy.ndarray, DataChunkIterator :param strict_no_data_load: If True and data is an out-of-core iterator, None may be returned. If False (default), the first element of data may be loaded into memory. :return: Tuple of ints indicating the size of known dimensions. Dimensions for which the size is unknown diff --git a/src/hdmf/validate/validator.py b/src/hdmf/validate/validator.py index 35e647e4a..6bea85975 100644 --- a/src/hdmf/validate/validator.py +++ b/src/hdmf/validate/validator.py @@ -635,7 +635,7 @@ def unmatched_builders(self): @property def spec_matches(self): - """Returns a list of tuples of: (spec, assigned builders)""" + """Returns a list of tuples of (spec, assigned builders)""" return [(sm.spec, sm.builders) for sm in self._spec_matches] def assign_to_specs(self, builders): From 772215f5693422587b552aec8bfbeb5adbea8dd2 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 11:13:38 -0800 Subject: [PATCH 09/18] set nitpicky link checking for sphinx --- docs/source/conf.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index c0dd523b4..1e7053a19 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -77,7 +77,6 @@ "h5py": ("https://docs.h5py.org/en/latest/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), "zarr": ("https://zarr.readthedocs.io/en/stable/", None), - "mpi": ("https://mpi4py.readthedocs.io/en/stable/", None) } # these links cannot be checked in github actions @@ -86,6 +85,9 @@ "https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request", ] +nitpicky = True +nitpick_ignore = [('py:class', 'Intracomm')] + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] From d9465072ec841cfa85822dede4b004ba6bb504d5 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 11:24:04 -0800 Subject: [PATCH 10/18] update conf file --- docs/source/conf.py | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 1e7053a19..fc8be2960 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -86,7 +86,6 @@ ] nitpicky = True -nitpick_ignore = [('py:class', 'Intracomm')] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] From 7e3244c372d574d5d1966ab7898db3e722d901b2 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 11:28:01 -0800 Subject: [PATCH 11/18] fix italics to code blocks in tutorials --- docs/gallery/plot_generic_data_chunk_tutorial.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/gallery/plot_generic_data_chunk_tutorial.py b/docs/gallery/plot_generic_data_chunk_tutorial.py index e55ab0aa0..09607397b 100644 --- a/docs/gallery/plot_generic_data_chunk_tutorial.py +++ b/docs/gallery/plot_generic_data_chunk_tutorial.py @@ -119,9 +119,9 @@ def _get_dtype(self): # optimal performance (typically 1 MB or less). In contrast, a :py:class:`~hdmf.data_utils.DataChunk` in # HDMF acts as a block of data for writing data to dataset, and spans multiple HDF5 chunks to improve performance. # This is achieved by avoiding repeat -# updates to the same `Chunk` in the HDF5 file, :py:class:`~hdmf.data_utils.DataChunk` objects for write -# should align with `Chunks` in the HDF5 file, i.e., the `DataChunk.selection` -# should fully cover one or more `Chunks` in the HDF5 file to avoid repeat updates to the same +# updates to the same ``Chunk`` in the HDF5 file, :py:class:`~hdmf.data_utils.DataChunk` objects for write +# should align with ``Chunks`` in the HDF5 file, i.e., the ``DataChunk.selection`` +# should fully cover one or more ``Chunks`` in the HDF5 file to avoid repeat updates to the same # ``Chunks`` in the HDF5 file. This is what the `buffer` of the :py:class`~hdmf.data_utils.GenericDataChunkIterator` # does, which upon each iteration returns a single # :py:class:`~hdmf.data_utils.DataChunk` object (by default > 1 GB) that perfectly spans many HDF5 chunks From bd479aaf5f047e58b5ee26f51c4ef0b346c2be03 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 11:43:40 -0800 Subject: [PATCH 12/18] revert RegionReference to type --- src/hdmf/build/builders.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hdmf/build/builders.py b/src/hdmf/build/builders.py index 97cf10469..05a71f80c 100644 --- a/src/hdmf/build/builders.py +++ b/src/hdmf/build/builders.py @@ -415,7 +415,7 @@ def builder(self): class RegionBuilder(ReferenceBuilder): - @docval({'name': 'region', 'type': (slice, tuple, list, 'RegionReference'), + @docval({'name': 'region', 'type': (slice, tuple, list, RegionReference), 'doc': 'The region, i.e. slice or indices, into the target dataset.'}, {'name': 'builder', 'type': DatasetBuilder, 'doc': 'The dataset this region reference applies to.'}) def __init__(self, **kwargs): From ad2063eb64fbd583e3f6c0645dbeccb8ebf90e89 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 14:52:37 -0800 Subject: [PATCH 13/18] Update src/hdmf/spec/spec.py Co-authored-by: Ryan Ly --- src/hdmf/spec/spec.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hdmf/spec/spec.py b/src/hdmf/spec/spec.py index b0df0ab70..9a9d876c3 100644 --- a/src/hdmf/spec/spec.py +++ b/src/hdmf/spec/spec.py @@ -210,7 +210,7 @@ def is_region(self): {'name': 'dims', 'type': (list, tuple), 'doc': 'the dimensions of this dataset', 'default': None}, {'name': 'required', 'type': bool, 'doc': 'whether or not this attribute is required. ignored when "value" is specified', 'default': True}, - {'name': 'parent', 'type': '~hdmf.spec.spec.BaseStorageSpec', 'doc': 'the parent of this spec', 'default': None}, + {'name': 'parent', 'type': 'BaseStorageSpec', 'doc': 'the parent of this spec', 'default': None}, {'name': 'value', 'type': None, 'doc': 'a constant value for this attribute', 'default': None}, {'name': 'default_value', 'type': None, 'doc': 'a default value for this attribute', 'default': None} ] From 81263e4ea53117cc92c792d8038f3d43f3d6090e Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 15:48:20 -0800 Subject: [PATCH 14/18] add remaining warnings to nitpick_ignore --- docs/source/conf.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index b07c7005e..caff737e7 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -86,6 +86,12 @@ ] nitpicky = True +nitpick_ignore = [('py:class', 'Intracomm'), + ('py:class', 'h5py.RegionReference'), + ('py:class', 'h5py._hl.dataset.Dataset'), + ('py:class', 'function'), + ('py:class', 'unittest.case.TestCase'), + ] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] From 46dbbea43c00bd7dde501807ac338878eb3f0b60 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 15:59:54 -0800 Subject: [PATCH 15/18] raise sphinx warnings as errors --- .github/workflows/check_external_links.yml | 2 +- docs/Makefile | 2 +- docs/make.bat | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/check_external_links.yml b/.github/workflows/check_external_links.yml index e030f37ae..312669fd9 100644 --- a/.github/workflows/check_external_links.yml +++ b/.github/workflows/check_external_links.yml @@ -30,4 +30,4 @@ jobs: python -m pip install . - name: Check Sphinx external links - run: sphinx-build -b linkcheck ./docs/source ./test_build + run: sphinx-build -W -b linkcheck ./docs/source ./test_build diff --git a/docs/Makefile b/docs/Makefile index 5129f2240..f01af1f8b 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -149,7 +149,7 @@ changes: @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + $(SPHINXBUILD) -W -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." diff --git a/docs/make.bat b/docs/make.bat index 25d3a04d4..dc48f5b3e 100644 --- a/docs/make.bat +++ b/docs/make.bat @@ -183,7 +183,7 @@ if "%1" == "changes" ( ) if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + %SPHINXBUILD% -W -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ From 3d85fea3ced1c8e181c6977f75d76be796a99df0 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 16:07:36 -0800 Subject: [PATCH 16/18] rename workflow to reflect linkcheck updates --- .../{check_external_links.yml => check_sphinx_links.yml} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename .github/workflows/{check_external_links.yml => check_sphinx_links.yml} (92%) diff --git a/.github/workflows/check_external_links.yml b/.github/workflows/check_sphinx_links.yml similarity index 92% rename from .github/workflows/check_external_links.yml rename to .github/workflows/check_sphinx_links.yml index 312669fd9..5fdf31f35 100644 --- a/.github/workflows/check_external_links.yml +++ b/.github/workflows/check_sphinx_links.yml @@ -1,4 +1,4 @@ -name: Check Sphinx external links +name: Check Sphinx links on: pull_request: schedule: @@ -29,5 +29,5 @@ jobs: python -m pip install -r requirements-doc.txt -r requirements-opt.txt python -m pip install . - - name: Check Sphinx external links + - name: Check Sphinx internal and external links run: sphinx-build -W -b linkcheck ./docs/source ./test_build From 5bc8b2807bf51c873c66b522639fa460f9d0a0ef Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 16:09:54 -0800 Subject: [PATCH 17/18] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e39be903e..e256efeb0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,7 @@ - Added `add_ref_termset`, updated helper methods for `HERD`, revised `add_ref` to support validations prior to populating the tables and added `add_ref_container`. @mavaylon1 [#968](https://github.com/hdmf-dev/hdmf/pull/968) - Use `stacklevel` in most warnings. @rly [#1027](https://github.com/hdmf-dev/hdmf/pull/1027) -- Fixed broken links in documentation. @stephprince [#1031](https://github.com/hdmf-dev/hdmf/pull/1031) +- Fixed broken links in documentation and added internal link checking to workflows. @stephprince [#1031](https://github.com/hdmf-dev/hdmf/pull/1031) ### Minor Improvements - Updated `__gather_columns` to ignore the order of bases when generating columns from the super class. @mavaylon1 [#991](https://github.com/hdmf-dev/hdmf/pull/991) From 4811e5aab4ebc001137b3053f3756e29abedb572 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 18 Jan 2024 16:13:18 -0800 Subject: [PATCH 18/18] Update check_sphinx_links.yml job name --- .github/workflows/check_sphinx_links.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check_sphinx_links.yml b/.github/workflows/check_sphinx_links.yml index 5fdf31f35..15fc61e30 100644 --- a/.github/workflows/check_sphinx_links.yml +++ b/.github/workflows/check_sphinx_links.yml @@ -6,7 +6,7 @@ on: workflow_dispatch: jobs: - check-external-links: + check-sphinx-links: runs-on: ubuntu-latest concurrency: group: ${{ github.workflow }}-${{ github.ref }}