From d479009d79374dc4a56c9f4346b1af38f5ac182c Mon Sep 17 00:00:00 2001 From: Joe Hamman Date: Thu, 10 Feb 2022 11:44:51 -0800 Subject: [PATCH] [docs] update urls throughout documentation (#6262) * update urls throughout documentation * more url updates * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update doc/internals/how-to-add-new-backend.rst * Apply suggestions from code review Co-authored-by: Anderson Banihirwe Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Anderson Banihirwe --- CONTRIBUTING.md | 2 +- README.rst | 6 ++-- asv_bench/asv.conf.json | 2 +- ci/install-upstream-wheels.sh | 2 +- design_notes/flexible_indexes_notes.md | 2 +- doc/README.rst | 2 +- doc/conf.py | 6 ++-- doc/contributing.rst | 18 +++++------ doc/ecosystem.rst | 15 ++++----- doc/gallery.rst | 2 +- doc/gallery/plot_rasterio.py | 2 +- doc/gallery/plot_rasterio_rgb.py | 2 +- doc/getting-started-guide/faq.rst | 14 ++++----- doc/getting-started-guide/installing.rst | 28 ++++++++--------- doc/getting-started-guide/quick-overview.rst | 2 +- doc/index.rst | 12 ++++---- doc/internals/how-to-add-new-backend.rst | 8 ++--- doc/internals/zarr-encoding-spec.rst | 2 +- doc/roadmap.rst | 2 +- doc/tutorials-and-videos.rst | 4 +-- doc/user-guide/computation.rst | 10 +++--- doc/user-guide/dask.rst | 8 ++--- doc/user-guide/data-structures.rst | 8 ++--- doc/user-guide/groupby.rst | 6 ++-- doc/user-guide/indexing.rst | 10 +++--- doc/user-guide/io.rst | 32 ++++++++++---------- doc/user-guide/pandas.rst | 8 ++--- doc/user-guide/plotting.rst | 18 +++++------ doc/user-guide/reshaping.rst | 2 +- doc/user-guide/time-series.rst | 10 +++--- doc/user-guide/weather-climate.rst | 2 +- doc/whats-new.rst | 20 ++++++------ setup.cfg | 6 ++-- xarray/backends/api.py | 4 +-- xarray/backends/h5netcdf_.py | 2 +- xarray/backends/plugins.py | 12 ++++---- xarray/backends/rasterio_.py | 2 +- xarray/core/computation.py | 4 +-- xarray/core/dataarray.py | 2 +- xarray/core/dataset.py | 4 +-- xarray/core/dtypes.py | 2 +- xarray/core/indexing.py | 4 +-- xarray/core/nputils.py | 2 +- xarray/tests/test_backends.py | 6 ++-- xarray/tests/test_cupy.py | 2 +- xarray/tutorial.py | 6 ++-- 46 files changed, 162 insertions(+), 163 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7a909aefd08..dd9931f907b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1 +1 @@ -Xarray's contributor guidelines [can be found in our online documentation](http://xarray.pydata.org/en/stable/contributing.html) +Xarray's contributor guidelines [can be found in our online documentation](http://docs.xarray.dev/en/stable/contributing.html) diff --git a/README.rst b/README.rst index f58b0002b62..7a4ad4e1f9f 100644 --- a/README.rst +++ b/README.rst @@ -6,7 +6,7 @@ xarray: N-D labeled arrays and datasets .. image:: https://codecov.io/gh/pydata/xarray/branch/main/graph/badge.svg :target: https://codecov.io/gh/pydata/xarray .. image:: https://readthedocs.org/projects/xray/badge/?version=latest - :target: https://xarray.pydata.org/ + :target: https://docs.xarray.dev/ .. image:: https://img.shields.io/badge/benchmarked%20by-asv-green.svg?style=flat :target: https://pandas.pydata.org/speed/xarray/ .. image:: https://img.shields.io/pypi/v/xarray.svg @@ -69,12 +69,12 @@ powerful and concise interface. For example: Documentation ------------- -Learn more about xarray in its official documentation at https://xarray.pydata.org/ +Learn more about xarray in its official documentation at https://docs.xarray.dev/ Contributing ------------ -You can find information about contributing to xarray at our `Contributing page `_. +You can find information about contributing to xarray at our `Contributing page `_. Get in touch ------------ diff --git a/asv_bench/asv.conf.json b/asv_bench/asv.conf.json index 7e0b11b815a..3e4137cf807 100644 --- a/asv_bench/asv.conf.json +++ b/asv_bench/asv.conf.json @@ -7,7 +7,7 @@ "project": "xarray", // The project's homepage - "project_url": "http://xarray.pydata.org/", + "project_url": "http://docs.xarray.dev/", // The URL or local path of the source code repository for the // project being benchmarked diff --git a/ci/install-upstream-wheels.sh b/ci/install-upstream-wheels.sh index 5fde7045b7d..96a39ccd20b 100755 --- a/ci/install-upstream-wheels.sh +++ b/ci/install-upstream-wheels.sh @@ -40,7 +40,7 @@ python -m pip install \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ git+https://github.com/Unidata/cftime \ - git+https://github.com/mapbox/rasterio \ + git+https://github.com/rasterio/rasterio \ git+https://github.com/pypa/packaging \ git+https://github.com/hgrecco/pint \ git+https://github.com/pydata/bottleneck \ diff --git a/design_notes/flexible_indexes_notes.md b/design_notes/flexible_indexes_notes.md index c7eb718720c..b36ce3e46ed 100644 --- a/design_notes/flexible_indexes_notes.md +++ b/design_notes/flexible_indexes_notes.md @@ -133,7 +133,7 @@ A possible, more explicit solution to reuse a `pandas.MultiIndex` in a DataArray New indexes may also be built from existing sets of coordinates or variables in a Dataset/DataArray using the `.set_index()` method. -The [current signature](http://xarray.pydata.org/en/stable/generated/xarray.DataArray.set_index.html#xarray.DataArray.set_index) of `.set_index()` is tailored to `pandas.MultiIndex` and tied to the concept of a dimension-index. It is therefore hardly reusable as-is in the context of flexible indexes proposed here. +The [current signature](http://docs.xarray.dev/en/stable/generated/xarray.DataArray.set_index.html#xarray.DataArray.set_index) of `.set_index()` is tailored to `pandas.MultiIndex` and tied to the concept of a dimension-index. It is therefore hardly reusable as-is in the context of flexible indexes proposed here. The new signature may look like one of these: diff --git a/doc/README.rst b/doc/README.rst index 0579f85d85f..c1b6c63a4c0 100644 --- a/doc/README.rst +++ b/doc/README.rst @@ -3,4 +3,4 @@ xarray ------ -You can find information about building the docs at our `Contributing page `_. +You can find information about building the docs at our `Contributing page `_. diff --git a/doc/conf.py b/doc/conf.py index 93174c6aaec..5c4c0a52d43 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -260,12 +260,12 @@ # configuration for sphinxext.opengraph -ogp_site_url = "https://xarray.pydata.org/en/latest/" -ogp_image = "https://xarray.pydata.org/en/stable/_static/dataset-diagram-logo.png" +ogp_site_url = "https://docs.xarray.dev/en/latest/" +ogp_image = "https://docs.xarray.dev/en/stable/_static/dataset-diagram-logo.png" ogp_custom_meta_tags = [ '', '', - '', + '', ] # Redirects for pages that were moved to new locations diff --git a/doc/contributing.rst b/doc/contributing.rst index f5653fcc65e..df279caa54f 100644 --- a/doc/contributing.rst +++ b/doc/contributing.rst @@ -95,14 +95,14 @@ version control to allow many people to work together on the project. Some great resources for learning Git: -* the `GitHub help pages `_. -* the `NumPy's documentation `_. -* Matthew Brett's `Pydagogue `_. +* the `GitHub help pages `_. +* the `NumPy's documentation `_. +* Matthew Brett's `Pydagogue `_. Getting started with Git ------------------------ -`GitHub has instructions `__ for installing git, +`GitHub has instructions `__ for installing git, setting up your SSH key, and configuring git. All these steps need to be completed before you can work seamlessly between your local repository and GitHub. @@ -455,7 +455,7 @@ it is worth getting in the habit of writing tests ahead of time so that this is Like many packages, *xarray* uses `pytest `_ and the convenient extensions in `numpy.testing -`_. +`_. Writing tests ~~~~~~~~~~~~~ @@ -855,15 +855,15 @@ GitHub. To delete it there do:: PR checklist ------------ -- **Properly comment and document your code.** See `"Documenting your code" `_. -- **Test that the documentation builds correctly** by typing ``make html`` in the ``doc`` directory. This is not strictly necessary, but this may be easier than waiting for CI to catch a mistake. See `"Contributing to the documentation" `_. +- **Properly comment and document your code.** See `"Documenting your code" `_. +- **Test that the documentation builds correctly** by typing ``make html`` in the ``doc`` directory. This is not strictly necessary, but this may be easier than waiting for CI to catch a mistake. See `"Contributing to the documentation" `_. - **Test your code**. - - Write new tests if needed. See `"Test-driven development/code writing" `_. + - Write new tests if needed. See `"Test-driven development/code writing" `_. - Test the code using `Pytest `_. Running all tests (type ``pytest`` in the root directory) takes a while, so feel free to only run the tests you think are needed based on your PR (example: ``pytest xarray/tests/test_dataarray.py``). CI will catch any failing tests. - By default, the upstream dev CI is disabled on pull request and push events. You can override this behavior per commit by adding a [test-upstream] tag to the first line of the commit message. For documentation-only commits, you can skip the CI per commit by adding a "[skip-ci]" tag to the first line of the commit message. -- **Properly format your code** and verify that it passes the formatting guidelines set by `Black `_ and `Flake8 `_. See `"Code formatting" `_. You can use `pre-commit `_ to run these automatically on each commit. +- **Properly format your code** and verify that it passes the formatting guidelines set by `Black `_ and `Flake8 `_. See `"Code formatting" `_. You can use `pre-commit `_ to run these automatically on each commit. - Run ``pre-commit run --all-files`` in the root directory. This may modify some files. Confirm and commit any formatting changes. diff --git a/doc/ecosystem.rst b/doc/ecosystem.rst index a9cbf39b644..469f83d37c1 100644 --- a/doc/ecosystem.rst +++ b/doc/ecosystem.rst @@ -20,12 +20,12 @@ Geosciences - `infinite-diff `_: xarray-based finite-differencing, focused on gridded climate/meteorology data - `marc_analysis `_: Analysis package for CESM/MARC experiments and output. - `MetPy `_: A collection of tools in Python for reading, visualizing, and performing calculations with weather data. -- `MPAS-Analysis `_: Analysis for simulations produced with Model for Prediction Across Scales (MPAS) components and the Accelerated Climate Model for Energy (ACME). -- `OGGM `_: Open Global Glacier Model +- `MPAS-Analysis `_: Analysis for simulations produced with Model for Prediction Across Scales (MPAS) components and the Accelerated Climate Model for Energy (ACME). +- `OGGM `_: Open Global Glacier Model - `Oocgcm `_: Analysis of large gridded geophysical datasets - `Open Data Cube `_: Analysis toolkit of continental scale Earth Observation data from satellites. - `Pangaea: `_: xarray extension for gridded land surface & weather model output). -- `Pangeo `_: A community effort for big data geoscience in the cloud. +- `Pangeo `_: A community effort for big data geoscience in the cloud. - `PyGDX `_: Python 3 package for accessing data stored in GAMS Data eXchange (GDX) files. Also uses a custom subclass. @@ -41,13 +41,13 @@ Geosciences - `wradlib `_: An Open Source Library for Weather Radar Data Processing. - `wrf-python `_: A collection of diagnostic and interpolation routines for use with output of the Weather Research and Forecasting (WRF-ARW) Model. - `xarray-simlab `_: xarray extension for computer model simulations. -- `xarray-spatial `_: Numba-accelerated raster-based spatial processing tools (NDVI, curvature, zonal-statistics, proximity, hillshading, viewshed, etc.) -- `xarray-topo `_: xarray extension for topographic analysis and modelling. +- `xarray-spatial `_: Numba-accelerated raster-based spatial processing tools (NDVI, curvature, zonal-statistics, proximity, hillshading, viewshed, etc.) +- `xarray-topo `_: xarray extension for topographic analysis and modelling. - `xbpch `_: xarray interface for bpch files. - `xclim `_: A library for calculating climate science indices with unit handling built from xarray and dask. - `xESMF `_: Universal regridder for geospatial data. - `xgcm `_: Extends the xarray data model to understand finite volume grid cells (common in General Circulation Models) and provides interpolation and difference operations for such grids. -- `xmitgcm `_: a python package for reading `MITgcm `_ binary MDS files into xarray data structures. +- `xmitgcm `_: a python package for reading `MITgcm `_ binary MDS files into xarray data structures. - `xnemogcm `_: a package to read `NEMO `_ output files and add attributes to interface with xgcm. Machine Learning @@ -57,6 +57,7 @@ Machine Learning - `Elm `_: Parallel machine learning on xarray data structures - `sklearn-xarray (1) `_: Combines scikit-learn and xarray (1). - `sklearn-xarray (2) `_: Combines scikit-learn and xarray (2). +- `xbatcher `_: Batch Generation from Xarray Datasets. Other domains ~~~~~~~~~~~~~ @@ -90,7 +91,7 @@ Visualization Non-Python projects ~~~~~~~~~~~~~~~~~~~ -- `xframe `_: C++ data structures inspired by xarray. +- `xframe `_: C++ data structures inspired by xarray. - `AxisArrays `_ and `NamedArrays `_: similar data structures for Julia. diff --git a/doc/gallery.rst b/doc/gallery.rst index 9e5284cc2ee..36eb39d1a53 100644 --- a/doc/gallery.rst +++ b/doc/gallery.rst @@ -116,7 +116,7 @@ External Examples --- :img-top: https://avatars.githubusercontent.com/u/60833341?s=200&v=4 ++++ - .. link-button:: http://gallery.pangeo.io/ + .. link-button:: https://gallery.pangeo.io/ :type: url :text: Xarray and dask on the cloud with Pangeo :classes: btn-outline-dark btn-block stretched-link diff --git a/doc/gallery/plot_rasterio.py b/doc/gallery/plot_rasterio.py index 8294e01975f..853923a38bd 100644 --- a/doc/gallery/plot_rasterio.py +++ b/doc/gallery/plot_rasterio.py @@ -23,7 +23,7 @@ import xarray as xr # Read the data -url = "https://github.com/mapbox/rasterio/raw/master/tests/data/RGB.byte.tif" +url = "https://github.com/rasterio/rasterio/raw/master/tests/data/RGB.byte.tif" da = xr.open_rasterio(url) # Compute the lon/lat coordinates with pyproj diff --git a/doc/gallery/plot_rasterio_rgb.py b/doc/gallery/plot_rasterio_rgb.py index 758d4cd3c37..912224ac132 100644 --- a/doc/gallery/plot_rasterio_rgb.py +++ b/doc/gallery/plot_rasterio_rgb.py @@ -18,7 +18,7 @@ import xarray as xr # Read the data -url = "https://github.com/mapbox/rasterio/raw/master/tests/data/RGB.byte.tif" +url = "https://github.com/rasterio/rasterio/raw/master/tests/data/RGB.byte.tif" da = xr.open_rasterio(url) # The data is in UTM projection. We have to set it manually until diff --git a/doc/getting-started-guide/faq.rst b/doc/getting-started-guide/faq.rst index d6e1c812fb2..0eeb09c432c 100644 --- a/doc/getting-started-guide/faq.rst +++ b/doc/getting-started-guide/faq.rst @@ -136,7 +136,7 @@ With xarray, we draw a firm line between labels that the library understands example, we do not automatically interpret and enforce units or `CF conventions`_. (An exception is serialization to and from netCDF files.) -.. _CF conventions: http://cfconventions.org/latest.html +.. _CF conventions: https://cfconventions.org/latest.html An implication of this choice is that we do not propagate ``attrs`` through most operations unless explicitly flagged (some methods have a ``keep_attrs`` @@ -155,7 +155,7 @@ xarray, and have contributed a number of improvements and fixes upstream. Xarray does not yet support all of netCDF4-python's features, such as modifying files on-disk. -__ https://github.com/Unidata/netcdf4-python +__ https://unidata.github.io/netcdf4-python/ Iris_ (supported by the UK Met office) provides similar tools for in- memory manipulation of labeled arrays, aimed specifically at weather and @@ -166,13 +166,13 @@ different approaches to handling metadata: Iris strictly interprets integration with Cartopy_. .. _Iris: https://scitools-iris.readthedocs.io/en/stable/ -.. _Cartopy: http://scitools.org.uk/cartopy/docs/latest/ +.. _Cartopy: https://scitools.org.uk/cartopy/docs/latest/ `UV-CDAT`__ is another Python library that implements in-memory netCDF-like variables and `tools for working with climate data`__. -__ http://uvcdat.llnl.gov/ -__ http://drclimate.wordpress.com/2014/01/02/a-beginners-guide-to-scripting-with-uv-cdat/ +__ https://uvcdat.llnl.gov/ +__ https://drclimate.wordpress.com/2014/01/02/a-beginners-guide-to-scripting-with-uv-cdat/ We think the design decisions we have made for xarray (namely, basing it on pandas) make it a faster and more flexible data analysis tool. That said, Iris @@ -197,7 +197,7 @@ would certainly appreciate it. We recommend two citations. - Hoyer, S. & Hamman, J., (2017). xarray: N-D labeled Arrays and Datasets in Python. Journal of Open Research Software. 5(1), p.10. - DOI: http://doi.org/10.5334/jors.148 + DOI: https://doi.org/10.5334/jors.148 Here’s an example of a BibTeX entry:: @@ -210,7 +210,7 @@ would certainly appreciate it. We recommend two citations. year = {2017}, publisher = {Ubiquity Press}, doi = {10.5334/jors.148}, - url = {http://doi.org/10.5334/jors.148} + url = {https://doi.org/10.5334/jors.148} } 2. You may also want to cite a specific version of the xarray package. We diff --git a/doc/getting-started-guide/installing.rst b/doc/getting-started-guide/installing.rst index c14e7d36579..6177ba0aaac 100644 --- a/doc/getting-started-guide/installing.rst +++ b/doc/getting-started-guide/installing.rst @@ -27,21 +27,21 @@ For netCDF and IO - `netCDF4 `__: recommended if you want to use xarray for reading or writing netCDF files -- `scipy `__: used as a fallback for reading/writing netCDF3 -- `pydap `__: used as a fallback for accessing OPeNDAP -- `h5netcdf `__: an alternative library for +- `scipy `__: used as a fallback for reading/writing netCDF3 +- `pydap `__: used as a fallback for accessing OPeNDAP +- `h5netcdf `__: an alternative library for reading and writing netCDF4 files that does not use the netCDF-C libraries - `PyNIO `__: for reading GRIB and other geoscience specific file formats. Note that PyNIO is not available for Windows and that the PyNIO backend may be moved outside of xarray in the future. -- `zarr `__: for chunked, compressed, N-dimensional arrays. +- `zarr `__: for chunked, compressed, N-dimensional arrays. - `cftime `__: recommended if you want to encode/decode datetimes for non-standard calendars or dates before year 1678 or after year 2262. - `PseudoNetCDF `__: recommended for accessing CAMx, GEOS-Chem (bpch), NOAA ARL files, ICARTT files (ffi1001) and many other. -- `rasterio `__: for reading GeoTiffs and +- `rasterio `__: for reading GeoTiffs and other gridded raster datasets. - `iris `__: for conversion to and from iris' Cube objects @@ -51,26 +51,26 @@ For netCDF and IO For accelerating xarray ~~~~~~~~~~~~~~~~~~~~~~~ -- `scipy `__: necessary to enable the interpolation features for +- `scipy `__: necessary to enable the interpolation features for xarray objects - `bottleneck `__: speeds up NaN-skipping and rolling window aggregations by a large factor -- `numbagg `_: for exponential rolling +- `numbagg `_: for exponential rolling window operations For parallel computing ~~~~~~~~~~~~~~~~~~~~~~ -- `dask.array `__: required for :ref:`dask`. +- `dask.array `__: required for :ref:`dask`. For plotting ~~~~~~~~~~~~ -- `matplotlib `__: required for :ref:`plotting` -- `cartopy `__: recommended for :ref:`plot-maps` -- `seaborn `__: for better +- `matplotlib `__: required for :ref:`plotting` +- `cartopy `__: recommended for :ref:`plot-maps` +- `seaborn `__: for better color palettes -- `nc-time-axis `__: for plotting +- `nc-time-axis `__: for plotting cftime.datetime objects Alternative data containers @@ -115,11 +115,11 @@ with its recommended dependencies using the conda command line tool:: $ conda install -c conda-forge xarray dask netCDF4 bottleneck -.. _conda: http://conda.io/ +.. _conda: https://docs.conda.io If you require other :ref:`optional-dependencies` add them to the line above. -We recommend using the community maintained `conda-forge `__ channel, +We recommend using the community maintained `conda-forge `__ channel, as some of the dependencies are difficult to build. New releases may also appear in conda-forge before being updated in the default channel. diff --git a/doc/getting-started-guide/quick-overview.rst b/doc/getting-started-guide/quick-overview.rst index 5bb5bb88ad3..cd4b66d2f6f 100644 --- a/doc/getting-started-guide/quick-overview.rst +++ b/doc/getting-started-guide/quick-overview.rst @@ -69,7 +69,7 @@ Unlike positional indexing, label-based indexing frees us from having to know ho Attributes ---------- -While you're setting up your DataArray, it's often a good idea to set metadata attributes. A useful choice is to set ``data.attrs['long_name']`` and ``data.attrs['units']`` since xarray will use these, if present, to automatically label your plots. These special names were chosen following the `NetCDF Climate and Forecast (CF) Metadata Conventions `_. ``attrs`` is just a Python dictionary, so you can assign anything you wish. +While you're setting up your DataArray, it's often a good idea to set metadata attributes. A useful choice is to set ``data.attrs['long_name']`` and ``data.attrs['units']`` since xarray will use these, if present, to automatically label your plots. These special names were chosen following the `NetCDF Climate and Forecast (CF) Metadata Conventions `_. ``attrs`` is just a Python dictionary, so you can assign anything you wish. .. ipython:: python diff --git a/doc/index.rst b/doc/index.rst index cffa450b6e8..c549c33aa62 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -17,10 +17,10 @@ It is particularly tailored to working with netCDF_ files, which were the source of xarray's data model, and integrates tightly with dask_ for parallel computing. -.. _NumPy: http://www.numpy.org -.. _pandas: http://pandas.pydata.org -.. _dask: http://dask.org -.. _netCDF: http://www.unidata.ucar.edu/software/netcdf +.. _NumPy: https://www.numpy.org +.. _pandas: https://pandas.pydata.org +.. _dask: https://dask.org +.. _netCDF: https://www.unidata.ucar.edu/software/netcdf .. toctree:: @@ -98,7 +98,7 @@ Hoyer, Alex Kleeman and Eugene Brevdo and was released as open source in May 2014. The project was renamed from "xray" in January 2016. Xarray became a fiscally sponsored project of NumFOCUS_ in August 2018. -__ http://climate.com/ +__ https://climate.com/ .. _NumFOCUS: https://numfocus.org License @@ -106,4 +106,4 @@ License Xarray is available under the open source `Apache License`__. -__ http://www.apache.org/licenses/LICENSE-2.0.html +__ https://www.apache.org/licenses/LICENSE-2.0.html diff --git a/doc/internals/how-to-add-new-backend.rst b/doc/internals/how-to-add-new-backend.rst index 22216997273..ceb59c8a3bd 100644 --- a/doc/internals/how-to-add-new-backend.rst +++ b/doc/internals/how-to-add-new-backend.rst @@ -311,9 +311,7 @@ The BackendArray subclass shall implement the following method and attributes: - the ``shape`` attribute - the ``dtype`` attribute. - -Xarray supports different type of -`indexing `__, that can be +Xarray supports different type of :doc:`/user-guide/indexing`, that can be grouped in three types of indexes :py:class:`~xarray.core.indexing.BasicIndexer`, :py:class:`~xarray.core.indexing.OuterIndexer` and @@ -372,7 +370,7 @@ input the ``key``, the array ``shape`` and the following parameters: For more details see :py:class:`~xarray.core.indexing.IndexingSupport` and :ref:`RST indexing`. -In order to support `Dask `__ distributed and +In order to support `Dask Distributed `__ and :py:mod:`multiprocessing`, ``BackendArray`` subclass should be serializable either with :ref:`io.pickle` or `cloudpickle `__. @@ -436,7 +434,7 @@ currently available in :py:mod:`~xarray.backends` module. Backend preferred chunks ^^^^^^^^^^^^^^^^^^^^^^^^ -The backend is not directly involved in `Dask `__ +The backend is not directly involved in `Dask `__ chunking, since it is internally managed by Xarray. However, the backend can define the preferred chunk size inside the variable’s encoding ``var.encoding["preferred_chunks"]``. The ``preferred_chunks`` may be useful diff --git a/doc/internals/zarr-encoding-spec.rst b/doc/internals/zarr-encoding-spec.rst index 082d7984f59..f809ea337d5 100644 --- a/doc/internals/zarr-encoding-spec.rst +++ b/doc/internals/zarr-encoding-spec.rst @@ -5,7 +5,7 @@ Zarr Encoding Specification ============================ -In implementing support for the `Zarr `_ storage +In implementing support for the `Zarr `_ storage format, Xarray developers made some *ad hoc* choices about how to store NetCDF data in Zarr. Future versions of the Zarr spec will likely include a more formal convention diff --git a/doc/roadmap.rst b/doc/roadmap.rst index b6ccb8d73db..c59d56fdd6d 100644 --- a/doc/roadmap.rst +++ b/doc/roadmap.rst @@ -20,7 +20,7 @@ Why has xarray been successful? In our opinion: - The dominant use-case for xarray is for analysis of gridded dataset in the geosciences, e.g., as part of the - `Pangeo `__ project. + `Pangeo `__ project. - Xarray is also used more broadly in the physical sciences, where we've found the needs for analyzing multidimensional datasets are remarkably consistent (e.g., see diff --git a/doc/tutorials-and-videos.rst b/doc/tutorials-and-videos.rst index 0a266c4f4a7..6a9602bcfa6 100644 --- a/doc/tutorials-and-videos.rst +++ b/doc/tutorials-and-videos.rst @@ -62,8 +62,8 @@ Books, Chapters and Articles .. _Xarray's Tutorials: https://xarray-contrib.github.io/xarray-tutorial/ -.. _Journal of Open Research Software paper: http://doi.org/10.5334/jors.148 +.. _Journal of Open Research Software paper: https://doi.org/10.5334/jors.148 .. _UW eScience Institute's Geohackweek : https://geohackweek.github.io/nDarrays/ .. _tutorial: https://github.com/Unidata/unidata-users-workshop/blob/master/notebooks/xray-tutorial.ipynb .. _with answers: https://github.com/Unidata/unidata-users-workshop/blob/master/notebooks/xray-tutorial-with-answers.ipynb -.. _Nicolas Fauchereau's 2015 tutorial: http://nbviewer.iPython.org/github/nicolasfauchereau/metocean/blob/master/notebooks/xray.ipynb +.. _Nicolas Fauchereau's 2015 tutorial: https://nbviewer.iPython.org/github/nicolasfauchereau/metocean/blob/master/notebooks/xray.ipynb diff --git a/doc/user-guide/computation.rst b/doc/user-guide/computation.rst index cb6eadc8e63..d830076e37b 100644 --- a/doc/user-guide/computation.rst +++ b/doc/user-guide/computation.rst @@ -38,7 +38,7 @@ numpy) over all array values: You can also use any of numpy's or scipy's many `ufunc`__ functions directly on a DataArray: -__ http://docs.scipy.org/doc/numpy/reference/ufuncs.html +__ https://numpy.org/doc/stable/reference/ufuncs.html .. ipython:: python @@ -200,7 +200,7 @@ From version 0.17, xarray supports multidimensional rolling, Note that rolling window aggregations are faster and use less memory when bottleneck_ is installed. This only applies to numpy-backed xarray objects with 1d-rolling. -.. _bottleneck: https://github.com/pydata/bottleneck/ +.. _bottleneck: https://github.com/pydata/bottleneck We can also manually iterate through ``Rolling`` objects: @@ -216,7 +216,7 @@ While ``rolling`` provides a simple moving average, ``DataArray`` also supports an exponential moving average with :py:meth:`~xarray.DataArray.rolling_exp`. This is similar to pandas' ``ewm`` method. numbagg_ is required. -.. _numbagg: https://github.com/shoyer/numbagg +.. _numbagg: https://github.com/numbagg/numbagg .. code:: python @@ -744,7 +744,7 @@ However, adding support for labels on both :py:class:`~xarray.Dataset` and To make this easier, xarray supplies the :py:func:`~xarray.apply_ufunc` helper function, designed for wrapping functions that support broadcasting and vectorization on unlabeled arrays in the style of a NumPy -`universal function `_ ("ufunc" for short). +`universal function `_ ("ufunc" for short). ``apply_ufunc`` takes care of everything needed for an idiomatic xarray wrapper, including alignment, broadcasting, looping over ``Dataset`` variables (if needed), and merging of coordinates. In fact, many internal xarray @@ -761,7 +761,7 @@ any additional arguments: For using more complex operations that consider some array values collectively, it's important to understand the idea of "core dimensions" from NumPy's -`generalized ufuncs `_. Core dimensions are defined as dimensions +`generalized ufuncs `_. Core dimensions are defined as dimensions that should *not* be broadcast over. Usually, they correspond to the fundamental dimensions over which an operation is defined, e.g., the summed axis in ``np.sum``. A good clue that core dimensions are needed is the presence of an diff --git a/doc/user-guide/dask.rst b/doc/user-guide/dask.rst index 4998cc68828..4d8715d9c51 100644 --- a/doc/user-guide/dask.rst +++ b/doc/user-guide/dask.rst @@ -5,7 +5,7 @@ Parallel computing with Dask ============================ -Xarray integrates with `Dask `__ to support parallel +Xarray integrates with `Dask `__ to support parallel computations and streaming computation on datasets that don't fit into memory. Currently, Dask is an entirely optional feature for xarray. However, the benefits of using Dask are sufficiently strong that Dask may become a required @@ -16,7 +16,7 @@ For a full example of how to use xarray's Dask integration, read the may be found at the `Pangeo project's gallery `_ and at the `Dask examples website `_. -.. _blog post introducing xarray and Dask: http://stephanhoyer.com/2015/06/11/xray-dask-out-of-core-labeled-arrays/ +.. _blog post introducing xarray and Dask: https://stephanhoyer.com/2015/06/11/xray-dask-out-of-core-labeled-arrays/ What is a Dask array? --------------------- @@ -39,7 +39,7 @@ The actual computation is controlled by a multi-processing or thread pool, which allows Dask to take full advantage of multiple processors available on most modern computers. -For more details on Dask, read `its documentation `__. +For more details on Dask, read `its documentation `__. Note that xarray only makes use of ``dask.array`` and ``dask.delayed``. .. _dask.io: @@ -225,7 +225,7 @@ disk. .. note:: For more on the differences between :py:meth:`~xarray.Dataset.persist` and - :py:meth:`~xarray.Dataset.compute` see this `Stack Overflow answer `_ and the `Dask documentation `_. + :py:meth:`~xarray.Dataset.compute` see this `Stack Overflow answer `_ and the `Dask documentation `_. For performance you may wish to consider chunk sizes. The correct choice of chunk size depends both on your data and on the operations you want to perform. diff --git a/doc/user-guide/data-structures.rst b/doc/user-guide/data-structures.rst index 1322c51248d..e0fd4bd0d25 100644 --- a/doc/user-guide/data-structures.rst +++ b/doc/user-guide/data-structures.rst @@ -227,7 +227,7 @@ container of labeled arrays (:py:class:`~xarray.DataArray` objects) with aligned dimensions. It is designed as an in-memory representation of the data model from the `netCDF`__ file format. -__ http://www.unidata.ucar.edu/software/netcdf/ +__ https://www.unidata.ucar.edu/software/netcdf/ In addition to the dict-like interface of the dataset itself, which can be used to access any variable in a dataset, datasets have four key properties: @@ -247,7 +247,7 @@ distinction for indexing and computations. Coordinates indicate constant/fixed/independent quantities, unlike the varying/measured/dependent quantities that belong in data. -.. _CF conventions: http://cfconventions.org/ +.. _CF conventions: https://cfconventions.org/ Here is an example of how we might structure a dataset for a weather forecast: @@ -520,7 +520,7 @@ in xarray: "non-dimension coordinates" are called "auxiliary coordinate variables" (see :issue:`1295` for more details). -.. _CF terminology: http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#terminology +.. _CF terminology: https://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#terminology Modifying coordinates @@ -628,4 +628,4 @@ it is recommended that you explicitly set the names of the levels. at which the forecast was made, rather than ``time`` which is the valid time for which the forecast applies. -__ http://en.wikipedia.org/wiki/Map_projection +__ https://en.wikipedia.org/wiki/Map_projection diff --git a/doc/user-guide/groupby.rst b/doc/user-guide/groupby.rst index 4c4f8d473ce..98f88a3d4ec 100644 --- a/doc/user-guide/groupby.rst +++ b/doc/user-guide/groupby.rst @@ -6,8 +6,8 @@ GroupBy: split-apply-combine Xarray supports `"group by"`__ operations with the same API as pandas to implement the `split-apply-combine`__ strategy: -__ http://pandas.pydata.org/pandas-docs/stable/groupby.html -__ http://www.jstatsoft.org/v40/i01/paper +__ https://pandas.pydata.org/pandas-docs/stable/groupby.html +__ https://www.jstatsoft.org/v40/i01/paper - Split your data into multiple independent groups. - Apply some function to each group. @@ -201,7 +201,7 @@ which is different from the logical grid dimensions (e.g. nx, ny). Such variables are valid under the `CF conventions`__. Xarray supports groupby operations over multidimensional coordinate variables: -__ http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#_two_dimensional_latitude_longitude_coordinate_variables +__ https://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#_two_dimensional_latitude_longitude_coordinate_variables .. ipython:: python diff --git a/doc/user-guide/indexing.rst b/doc/user-guide/indexing.rst index 89f00466fa4..29b48bf7c47 100644 --- a/doc/user-guide/indexing.rst +++ b/doc/user-guide/indexing.rst @@ -97,7 +97,7 @@ including indexing with individual, slices and arrays of labels, as well as indexing with boolean arrays. Like pandas, label based indexing in xarray is *inclusive* of both the start and stop bounds. -__ http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-label +__ https://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-label Setting values with label based indexing is also supported: @@ -145,7 +145,7 @@ Python :py:class:`slice` objects or 1-dimensional arrays. brackets, but unfortunately, Python `does yet not support`__ indexing with keyword arguments like ``da[space=0]`` -__ http://legacy.python.org/dev/peps/pep-0472/ +__ https://legacy.python.org/dev/peps/pep-0472/ .. _nearest neighbor lookups: @@ -373,7 +373,7 @@ indexing for xarray is based on our :ref:`broadcasting rules `. See :ref:`indexing.rules` for the complete specification. -.. _advanced indexing: https://docs.scipy.org/doc/numpy-1.13.0/reference/arrays.indexing.html +.. _advanced indexing: https://numpy.org/doc/stable/reference/arrays.indexing.html Vectorized indexing also works with ``isel``, ``loc``, and ``sel``: @@ -503,7 +503,7 @@ This is because ``v[0] = v[0] - 1`` is called three times, rather than ``v[0] = v[0] - 1 - 1 - 1``. See `Assigning values to indexed arrays`__ for the details. -__ https://docs.scipy.org/doc/numpy/user/basics.indexing.html#assigning-values-to-indexed-arrays +__ https://numpy.org/doc/stable/user/basics.indexing.html#assigning-values-to-indexed-arrays .. note:: @@ -751,7 +751,7 @@ Whether data is a copy or a view is more predictable in xarray than in pandas, s unlike pandas, xarray does not produce `SettingWithCopy warnings`_. However, you should still avoid assignment with chained indexing. -.. _SettingWithCopy warnings: http://pandas.pydata.org/pandas-docs/stable/indexing.html#returning-a-view-versus-a-copy +.. _SettingWithCopy warnings: https://pandas.pydata.org/pandas-docs/stable/indexing.html#returning-a-view-versus-a-copy .. _multi-level indexing: diff --git a/doc/user-guide/io.rst b/doc/user-guide/io.rst index 16b8708231e..28eeeeda99b 100644 --- a/doc/user-guide/io.rst +++ b/doc/user-guide/io.rst @@ -33,14 +33,14 @@ NetCDF is supported on almost all platforms, and parsers exist for the vast majority of scientific programming languages. Recent versions of netCDF are based on the even more widely used HDF5 file-format. -__ http://www.unidata.ucar.edu/software/netcdf/ +__ https://www.unidata.ucar.edu/software/netcdf/ .. tip:: If you aren't familiar with this data format, the `netCDF FAQ`_ is a good place to start. -.. _netCDF FAQ: http://www.unidata.ucar.edu/software/netcdf/docs/faq.html#What-Is-netCDF +.. _netCDF FAQ: https://www.unidata.ucar.edu/software/netcdf/docs/faq.html#What-Is-netCDF Reading and writing netCDF files with xarray requires scipy or the `netCDF4-Python`__ library to be installed (the latter is required to @@ -70,7 +70,7 @@ the ``format`` and ``engine`` arguments. .. tip:: - Using the `h5netcdf `_ package + Using the `h5netcdf `_ package by passing ``engine='h5netcdf'`` to :py:meth:`open_dataset` can sometimes be quicker than the default ``engine='netcdf4'`` that uses the `netCDF4 `_ package. @@ -255,7 +255,7 @@ See its docstring for more details. (``compat='override'``). -.. _dask: http://dask.pydata.org +.. _dask: http://dask.org .. _blog post: http://stephanhoyer.com/2015/06/11/xray-dask-out-of-core-labeled-arrays/ Sometimes multi-file datasets are not conveniently organized for easy use of :py:func:`open_mfdataset`. @@ -430,7 +430,7 @@ in the `documentation for createVariable`_ for netCDF4-Python. This only works for netCDF4 files and thus requires using ``format='netCDF4'`` and either ``engine='netcdf4'`` or ``engine='h5netcdf'``. -.. _documentation for createVariable: http://unidata.github.io/netcdf4-python/#netCDF4.Dataset.createVariable +.. _documentation for createVariable: https://unidata.github.io/netcdf4-python/#netCDF4.Dataset.createVariable Chunk based gzip compression can yield impressive space savings, especially for sparse data, but it comes with significant performance overhead. HDF5 @@ -529,7 +529,7 @@ Conversely, we can create a new ``DataArray`` object from a ``Cube`` using da_cube -.. _Iris: http://scitools.org.uk/iris +.. _Iris: https://scitools.org.uk/iris OPeNDAP @@ -538,13 +538,13 @@ OPeNDAP Xarray includes support for `OPeNDAP`__ (via the netCDF4 library or Pydap), which lets us access large datasets over HTTP. -__ http://www.opendap.org/ +__ https://www.opendap.org/ For example, we can open a connection to GBs of weather data produced by the `PRISM`__ project, and hosted by `IRI`__ at Columbia: -__ http://www.prism.oregonstate.edu/ -__ http://iri.columbia.edu/ +__ https://www.prism.oregonstate.edu/ +__ https://iri.columbia.edu/ .. ipython source code for this section we don't use this to avoid hitting the DAP server on every doc build. @@ -652,8 +652,8 @@ that require NASA's URS authentication:: ds = xr.open_dataset(store) -__ http://docs.python-requests.org -__ http://pydap.readthedocs.io/en/latest/client.html#authentication +__ https://docs.python-requests.org +__ https://www.pydap.org/en/latest/client.html#authentication .. _io.pickle: @@ -820,7 +820,7 @@ GDAL readable raster data using `rasterio`_ as well as for exporting to a geoTIF .. _rasterio: https://rasterio.readthedocs.io/en/latest/ .. _rioxarray: https://corteva.github.io/rioxarray/stable/ -.. _test files: https://github.com/mapbox/rasterio/blob/master/tests/data/RGB.byte.tif +.. _test files: https://github.com/rasterio/rasterio/blob/master/tests/data/RGB.byte.tif .. _pyproj: https://github.com/pyproj4/pyproj .. _io.zarr: @@ -923,17 +923,17 @@ instance and pass this, as follows: (or use the utility function ``fsspec.get_mapper()``). .. _fsspec: https://filesystem-spec.readthedocs.io/en/latest/ -.. _Zarr: http://zarr.readthedocs.io/ +.. _Zarr: https://zarr.readthedocs.io/ .. _Amazon S3: https://aws.amazon.com/s3/ .. _Google Cloud Storage: https://cloud.google.com/storage/ -.. _gcsfs: https://github.com/dask/gcsfs +.. _gcsfs: https://github.com/fsspec/gcsfs Zarr Compressors and Filters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ There are many different options for compression and filtering possible with zarr. These are described in the -`zarr documentation `_. +`zarr documentation `_. These options can be passed to the ``to_zarr`` method as variable encoding. For example: @@ -1156,7 +1156,7 @@ To use PseudoNetCDF to read such files, supply Add ``backend_kwargs={'format': ''}`` where `` options are listed on the PseudoNetCDF page. -.. _PseudoNetCDF: http://github.com/barronh/PseudoNetCDF +.. _PseudoNetCDF: https://github.com/barronh/PseudoNetCDF CSV and other formats supported by pandas diff --git a/doc/user-guide/pandas.rst b/doc/user-guide/pandas.rst index acf1d16b6ee..a376b0a5cb8 100644 --- a/doc/user-guide/pandas.rst +++ b/doc/user-guide/pandas.rst @@ -11,8 +11,8 @@ ecosystem. For example, for plotting labeled data, we highly recommend using the visualization `built in to pandas itself`__ or provided by the pandas aware libraries such as `Seaborn`__. -__ http://pandas.pydata.org/pandas-docs/stable/visualization.html -__ http://seaborn.pydata.org/ +__ https://pandas.pydata.org/pandas-docs/stable/visualization.html +__ https://seaborn.pydata.org/ .. ipython:: python :suppress: @@ -32,7 +32,7 @@ Tabular data is easiest to work with when it meets the criteria for * Each column holds a different variable. * Each rows holds a different observation. -__ http://www.jstatsoft.org/v59/i10/ +__ https://www.jstatsoft.org/v59/i10/ In this "tidy data" format, we can represent any :py:class:`Dataset` and :py:class:`DataArray` in terms of :py:class:`~pandas.DataFrame` and @@ -241,5 +241,5 @@ While the xarray docs are relatively complete, a few items stand out for Panel u While xarray may take some getting used to, it's worth it! If anything is unclear, please post an issue on `GitHub `__ or -`StackOverflow `__, +`StackOverflow `__, and we'll endeavor to respond to the specific case or improve the general docs. diff --git a/doc/user-guide/plotting.rst b/doc/user-guide/plotting.rst index 1dce65b191c..d81ba30f12f 100644 --- a/doc/user-guide/plotting.rst +++ b/doc/user-guide/plotting.rst @@ -20,7 +20,7 @@ nicely into a pandas DataFrame then you're better off using one of the more developed tools there. Xarray plotting functionality is a thin wrapper around the popular -`matplotlib `_ library. +`matplotlib `_ library. Matplotlib syntax and function names were copied as much as possible, which makes for an easy transition between the two. Matplotlib must be installed before xarray can plot. @@ -32,11 +32,11 @@ needs to be installed. For more extensive plotting applications consider the following projects: -- `Seaborn `_: "provides +- `Seaborn `_: "provides a high-level interface for drawing attractive statistical graphics." Integrates well with pandas. -- `HoloViews `_ +- `HoloViews `_ and `GeoViews `_: "Composable, declarative data structures for building even complex visualizations easily." Includes native support for xarray objects. @@ -45,7 +45,7 @@ For more extensive plotting applications consider the following projects: dynamic plots (backed by ``Holoviews`` or ``Geoviews``) by adding a ``hvplot`` accessor to DataArrays. -- `Cartopy `_: Provides cartographic +- `Cartopy `_: Provides cartographic tools. Imports @@ -106,7 +106,7 @@ The simplest way to make a plot is to call the :py:func:`DataArray.plot()` metho @savefig plotting_1d_simple.png width=4in air1d.plot() -Xarray uses the coordinate name along with metadata ``attrs.long_name``, ``attrs.standard_name``, ``DataArray.name`` and ``attrs.units`` (if available) to label the axes. The names ``long_name``, ``standard_name`` and ``units`` are copied from the `CF-conventions spec `_. When choosing names, the order of precedence is ``long_name``, ``standard_name`` and finally ``DataArray.name``. The y-axis label in the above plot was constructed from the ``long_name`` and ``units`` attributes of ``air1d``. +Xarray uses the coordinate name along with metadata ``attrs.long_name``, ``attrs.standard_name``, ``DataArray.name`` and ``attrs.units`` (if available) to label the axes. The names ``long_name``, ``standard_name`` and ``units`` are copied from the `CF-conventions spec `_. When choosing names, the order of precedence is ``long_name``, ``standard_name`` and finally ``DataArray.name``. The y-axis label in the above plot was constructed from the ``long_name`` and ``units`` attributes of ``air1d``. .. ipython:: python @@ -123,7 +123,7 @@ matplotlib.pyplot.plot_ passing in the index and the array values as x and y, re So to make a line plot with blue triangles a matplotlib format string can be used: -.. _matplotlib.pyplot.plot: http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.plot +.. _matplotlib.pyplot.plot: https://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.plot .. ipython:: python :okwarning: @@ -563,7 +563,7 @@ You can also specify a list of discrete colors through the ``colors`` argument: @savefig plotting_custom_colors_levels.png width=4in air2d.plot(levels=[0, 12, 18, 30], colors=flatui) -Finally, if you have `Seaborn `_ +Finally, if you have `Seaborn `_ installed, you can also specify a seaborn color palette to the ``cmap`` argument. Note that ``levels`` *must* be specified with seaborn color palettes if using ``imshow`` or ``pcolormesh`` (but not with ``contour`` or ``contourf``, @@ -687,7 +687,7 @@ The object returned, ``g`` in the above examples, is a :py:class:`~xarray.plot.F that links a :py:class:`DataArray` to a matplotlib figure with a particular structure. This object can be used to control the behavior of the multiple plots. It borrows an API and code from `Seaborn's FacetGrid -`_. +`_. The structure is contained within the ``axes`` and ``name_dicts`` attributes, both 2d NumPy object arrays. @@ -1020,7 +1020,7 @@ You can however decide to infer the cell boundaries and use the yet. If you want to use these coordinates, you'll have to make the plots outside the xarray framework. -.. _cell boundaries: http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#cell-boundaries +.. _cell boundaries: https://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#cell-boundaries One can also make line plots with multidimensional coordinates. In this case, ``hue`` must be a dimension name, not a coordinate name. diff --git a/doc/user-guide/reshaping.rst b/doc/user-guide/reshaping.rst index 86dc5fbe51a..edfaaa49427 100644 --- a/doc/user-guide/reshaping.rst +++ b/doc/user-guide/reshaping.rst @@ -151,7 +151,7 @@ Stacking different variables together These stacking and unstacking operations are particularly useful for reshaping xarray objects for use in machine learning packages, such as `scikit-learn -`_, that usually require two-dimensional numpy +`_, that usually require two-dimensional numpy arrays as inputs. For datasets with only one variable, we only need ``stack`` and ``unstack``, but combining multiple variables in a :py:class:`xarray.Dataset` is more complicated. If the variables in the dataset diff --git a/doc/user-guide/time-series.rst b/doc/user-guide/time-series.rst index 1813c125eed..36a57e37475 100644 --- a/doc/user-guide/time-series.rst +++ b/doc/user-guide/time-series.rst @@ -46,7 +46,7 @@ When reading or writing netCDF files, xarray automatically decodes datetime and timedelta arrays using `CF conventions`_ (that is, by using a ``units`` attribute like ``'days since 2000-01-01'``). -.. _CF conventions: http://cfconventions.org +.. _CF conventions: https://cfconventions.org .. note:: @@ -111,7 +111,7 @@ Datetime components Similar `to pandas`_, the components of datetime objects contained in a given ``DataArray`` can be quickly computed using a special ``.dt`` accessor. -.. _to pandas: http://pandas.pydata.org/pandas-docs/stable/basics.html#basics-dt-accessors +.. _to pandas: https://pandas.pydata.org/pandas-docs/stable/basics.html#basics-dt-accessors .. ipython:: python @@ -128,7 +128,7 @@ Xarray also supports a notion of "virtual" or "derived" coordinates for "day", "hour", "minute", "second", "dayofyear", "week", "dayofweek", "weekday" and "quarter": -__ http://pandas.pydata.org/pandas-docs/stable/api.html#time-date-components +__ https://pandas.pydata.org/pandas-docs/stable/api.html#time-date-components .. ipython:: python @@ -150,7 +150,7 @@ You can use these shortcuts with both Datasets and DataArray coordinates. In addition, xarray supports rounding operations ``floor``, ``ceil``, and ``round``. These operations require that you supply a `rounding frequency as a string argument.`__ -__ http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases +__ https://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases .. ipython:: python @@ -200,7 +200,7 @@ For upsampling or downsampling temporal resolutions, xarray offers a offered by the pandas method of the same name. Resample uses essentially the same api as ``resample`` `in pandas`_. -.. _in pandas: http://pandas.pydata.org/pandas-docs/stable/timeseries.html#up-and-downsampling +.. _in pandas: https://pandas.pydata.org/pandas-docs/stable/timeseries.html#up-and-downsampling For example, we can downsample our dataset from hourly to 6-hourly: diff --git a/doc/user-guide/weather-climate.rst b/doc/user-guide/weather-climate.rst index 893e7b50429..d11c7c3a4f9 100644 --- a/doc/user-guide/weather-climate.rst +++ b/doc/user-guide/weather-climate.rst @@ -12,7 +12,7 @@ Weather and climate data Xarray can leverage metadata that follows the `Climate and Forecast (CF) conventions`_ if present. Examples include automatic labelling of plots with descriptive names and units if proper metadata is present (see :ref:`plotting`) and support for non-standard calendars used in climate science through the ``cftime`` module (see :ref:`CFTimeIndex`). There are also a number of geosciences-focused projects that build on xarray (see :ref:`ecosystem`). -.. _Climate and Forecast (CF) conventions: http://cfconventions.org +.. _Climate and Forecast (CF) conventions: https://cfconventions.org .. _cf_variables: diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 2cadf6ff478..9502beec327 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -118,7 +118,7 @@ Bug fixes - No longer raise an error for an all-nan-but-one argument to :py:meth:`DataArray.interpolate_na` when using `method='nearest'` (:issue:`5994`, :pull:`6144`). By `Michael Delgado `_. -- `dt.season `_ can now handle NaN and NaT. (:pull:`5876`). +- `dt.season `_ can now handle NaN and NaT. (:pull:`5876`). By `Pierre Loicq `_. - Determination of zarr chunks handles empty lists for encoding chunks or variable chunks that occurs in certain cirumstances (:pull:`5526`). By `Chris Roat `_. @@ -1933,7 +1933,7 @@ Bug fixes Documentation ~~~~~~~~~~~~~ -- Fix leap year condition in `monthly means example `_. +- Fix leap year condition in `monthly means example `_. By `Mickaël Lalande `_. - Fix the documentation of :py:meth:`DataArray.resample` and :py:meth:`Dataset.resample`, explicitly stating that a @@ -2272,7 +2272,7 @@ Bug fixes Documentation ~~~~~~~~~~~~~ -- Created a `PR checklist `_ +- Created a `PR checklist `_ as a quick reference for tasks before creating a new PR or pushing new commits. By `Gregory Gundersen `_. @@ -3337,7 +3337,7 @@ Backwards incompatible changes simple: convert your objects explicitly into NumPy arrays before calling the ufunc (e.g., with ``.values``). -.. _ufunc methods: https://docs.scipy.org/doc/numpy/reference/ufuncs.html#methods +.. _ufunc methods: https://numpy.org/doc/stable/reference/ufuncs.html#methods Enhancements ~~~~~~~~~~~~ @@ -4029,7 +4029,7 @@ Bug fixes Documentation ~~~~~~~~~~~~~ -- A new `gallery `_ +- A new `gallery `_ allows to add interactive examples to the documentation. By `Fabien Maussion `_. @@ -4781,8 +4781,8 @@ scientists who work with actual x-rays are interested in using this project in their work. Thanks for your understanding and patience in this transition. You can now find our documentation and code repository at new URLs: -- http://xarray.pydata.org -- http://github.com/pydata/xarray/ +- https://docs.xarray.dev +- https://github.com/pydata/xarray/ To ease the transition, we have simultaneously released v0.7.0 of both ``xray`` and ``xarray`` on the Python Package Index. These packages are @@ -5661,9 +5661,9 @@ is supporting out-of-core operations in xray using Dask_, a part of the Blaze_ project. For a preview of using Dask with weather data, read `this blog post`_ by Matthew Rocklin. See :issue:`328` for more details. -.. _Dask: http://dask.pydata.org -.. _Blaze: http://blaze.pydata.org -.. _this blog post: http://matthewrocklin.com/blog/work/2015/02/13/Towards-OOC-Slicing-and-Stacking/ +.. _Dask: https://dask.org +.. _Blaze: https://blaze.pydata.org +.. _this blog post: https://matthewrocklin.com/blog/work/2015/02/13/Towards-OOC-Slicing-and-Stacking v0.3.2 (23 December, 2014) -------------------------- diff --git a/setup.cfg b/setup.cfg index f9f8ae5c4dc..05b202810b4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -51,9 +51,9 @@ long_description = Learn more ---------- - - Documentation: ``_ - - Issue tracker: ``_ - - Source code: ``_ + - Documentation: ``_ + - Issue tracker: ``_ + - Source code: ``_ - SciPy2015 talk: ``_ url = https://github.com/pydata/xarray diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 0ca82555c8f..548b98048ba 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -834,8 +834,8 @@ def open_mfdataset( References ---------- - .. [1] http://xarray.pydata.org/en/stable/dask.html - .. [2] http://xarray.pydata.org/en/stable/dask.html#chunking-and-performance + .. [1] https://docs.xarray.dev/en/stable/dask.html + .. [2] https://docs.xarray.dev/en/stable/dask.html#chunking-and-performance """ if isinstance(paths, str): if is_remote_uri(paths) and engine == "zarr": diff --git a/xarray/backends/h5netcdf_.py b/xarray/backends/h5netcdf_.py index 735aa5fc3bc..70fc3a76266 100644 --- a/xarray/backends/h5netcdf_.py +++ b/xarray/backends/h5netcdf_.py @@ -280,7 +280,7 @@ def prepare_variable( raise NotImplementedError( "h5netcdf does not yet support setting a fill value for " "variable-length strings " - "(https://github.com/shoyer/h5netcdf/issues/37). " + "(https://github.com/h5netcdf/h5netcdf/issues/37). " f"Either remove '_FillValue' from encoding on variable {name!r} " "or set {'dtype': 'S1'} in encoding to use the fixed width " "NC_CHAR type." diff --git a/xarray/backends/plugins.py b/xarray/backends/plugins.py index a45ee78efd0..7444fbf11eb 100644 --- a/xarray/backends/plugins.py +++ b/xarray/backends/plugins.py @@ -126,23 +126,23 @@ def guess_engine(store_spec): f"backends {installed_engines}. Consider explicitly selecting one of the " "installed engines via the ``engine`` parameter, or installing " "additional IO dependencies, see:\n" - "http://xarray.pydata.org/en/stable/getting-started-guide/installing.html\n" - "http://xarray.pydata.org/en/stable/user-guide/io.html" + "https://docs.xarray.dev/en/stable/getting-started-guide/installing.html\n" + "https://docs.xarray.dev/en/stable/user-guide/io.html" ) else: error_msg = ( "xarray is unable to open this file because it has no currently " "installed IO backends. Xarray's read/write support requires " "installing optional IO dependencies, see:\n" - "http://xarray.pydata.org/en/stable/getting-started-guide/installing.html\n" - "http://xarray.pydata.org/en/stable/user-guide/io" + "https://docs.xarray.dev/en/stable/getting-started-guide/installing.html\n" + "https://docs.xarray.dev/en/stable/user-guide/io" ) else: error_msg = ( "found the following matches with the input file in xarray's IO " f"backends: {compatible_engines}. But their dependencies may not be installed, see:\n" - "http://xarray.pydata.org/en/stable/user-guide/io.html \n" - "http://xarray.pydata.org/en/stable/getting-started-guide/installing.html" + "https://docs.xarray.dev/en/stable/user-guide/io.html \n" + "https://docs.xarray.dev/en/stable/getting-started-guide/installing.html" ) raise ValueError(error_msg) diff --git a/xarray/backends/rasterio_.py b/xarray/backends/rasterio_.py index 9600827a807..7f3791ffca2 100644 --- a/xarray/backends/rasterio_.py +++ b/xarray/backends/rasterio_.py @@ -189,7 +189,7 @@ def open_rasterio( >>> from affine import Affine >>> da = xr.open_rasterio( - ... "https://github.com/mapbox/rasterio/raw/1.2.1/tests/data/RGB.byte.tif" + ... "https://github.com/rasterio/rasterio/raw/1.2.1/tests/data/RGB.byte.tif" ... ) >>> da diff --git a/xarray/core/computation.py b/xarray/core/computation.py index 7273d25253d..88eefbdc441 100644 --- a/xarray/core/computation.py +++ b/xarray/core/computation.py @@ -1044,8 +1044,8 @@ def apply_ufunc( References ---------- - .. [1] http://docs.scipy.org/doc/numpy/reference/ufuncs.html - .. [2] http://docs.scipy.org/doc/numpy/reference/c-api.generalized-ufuncs.html + .. [1] https://numpy.org/doc/stable/reference/ufuncs.html + .. [2] https://numpy.org/doc/stable/reference/c-api/generalized-ufuncs.html """ from .dataarray import DataArray from .groupby import GroupBy diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index 6fe865a9f64..20e829d293e 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -3378,7 +3378,7 @@ def sortby( If multiple sorts along the same dimension is given, numpy's lexsort is performed along that dimension: - https://docs.scipy.org/doc/numpy/reference/generated/numpy.lexsort.html + https://numpy.org/doc/stable/reference/generated/numpy.lexsort.html and the FIRST key in the sequence is used as the primary sort key, followed by the 2nd key, etc. diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index 83126f157a4..af59f5cd2f1 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -1892,7 +1892,7 @@ def to_netcdf( invalid_netcdf: bool, default: False Only valid along with ``engine="h5netcdf"``. If True, allow writing hdf5 files which are invalid netcdf as described in - https://github.com/shoyer/h5netcdf. + https://github.com/h5netcdf/h5netcdf. """ if encoding is None: encoding = {} @@ -6069,7 +6069,7 @@ def sortby(self, variables, ascending=True): If multiple sorts along the same dimension is given, numpy's lexsort is performed along that dimension: - https://docs.scipy.org/doc/numpy/reference/generated/numpy.lexsort.html + https://numpy.org/doc/stable/reference/generated/numpy.lexsort.html and the FIRST key in the sequence is used as the primary sort key, followed by the 2nd key, etc. diff --git a/xarray/core/dtypes.py b/xarray/core/dtypes.py index 5f9349051b7..1e87e782fb2 100644 --- a/xarray/core/dtypes.py +++ b/xarray/core/dtypes.py @@ -34,7 +34,7 @@ def __eq__(self, other): # Pairs of types that, if both found, should be promoted to object dtype # instead of following NumPy's own type-promotion rules. These type promotion # rules match pandas instead. For reference, see the NumPy type hierarchy: -# https://docs.scipy.org/doc/numpy-1.13.0/reference/arrays.scalars.html +# https://numpy.org/doc/stable/reference/arrays.scalars.html PROMOTE_TO_OBJECT = [ {np.number, np.character}, # numpy promotes to character {np.bool_, np.character}, # numpy promotes to character diff --git a/xarray/core/indexing.py b/xarray/core/indexing.py index 581572cd0e1..17d026baa59 100644 --- a/xarray/core/indexing.py +++ b/xarray/core/indexing.py @@ -35,7 +35,7 @@ def expanded_indexer(key, ndim): key = (key,) new_key = [] # handling Ellipsis right is a little tricky, see: - # http://docs.scipy.org/doc/numpy/reference/arrays.indexing.html#advanced-indexing + # https://numpy.org/doc/stable/reference/arrays.indexing.html#advanced-indexing found_ellipsis = False for k in key: if k is Ellipsis: @@ -1146,7 +1146,7 @@ def _indexing_array_and_key(self, key): array = self.array # We want 0d slices rather than scalars. This is achieved by # appending an ellipsis (see - # https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html#detailed-notes). + # https://numpy.org/doc/stable/reference/arrays.indexing.html#detailed-notes). key = key.tuple + (Ellipsis,) else: raise TypeError(f"unexpected key type: {type(key)}") diff --git a/xarray/core/nputils.py b/xarray/core/nputils.py index 3e0f550dd30..1feb97c5aa4 100644 --- a/xarray/core/nputils.py +++ b/xarray/core/nputils.py @@ -103,7 +103,7 @@ def _advanced_indexer_subspaces(key): # Nothing to reorder: dimensions on the indexing result are already # ordered like vindex. See NumPy's rule for "Combining advanced and # basic indexing": - # https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html#combining-advanced-and-basic-indexing + # https://numpy.org/doc/stable/reference/arrays.indexing.html#combining-advanced-and-basic-indexing return (), () non_slices = [k for k in key if not isinstance(k, slice)] diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 321759f3ef6..c0e340dd723 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -1294,7 +1294,7 @@ def test_roundtrip_string_with_fill_value_vlen(self): # netCDF4-based backends don't support an explicit fillvalue # for variable length strings yet. # https://github.com/Unidata/netcdf4-python/issues/730 - # https://github.com/shoyer/h5netcdf/issues/37 + # https://github.com/h5netcdf/h5netcdf/issues/37 original = Dataset({"x": ("t", values, {}, {"_FillValue": "XXX"})}) with pytest.raises(NotImplementedError): with self.roundtrip(original) as actual: @@ -4733,7 +4733,7 @@ def test_rasterio_vrt_with_transform_and_size(self): # Test open_rasterio() support of WarpedVRT with transform, width and # height (issue #2864) - # https://github.com/mapbox/rasterio/1768 + # https://github.com/rasterio/rasterio/1768 rasterio = pytest.importorskip("rasterio", minversion="1.0.28") from affine import Affine from rasterio.warp import calculate_default_transform @@ -4763,7 +4763,7 @@ def test_rasterio_vrt_with_transform_and_size(self): def test_rasterio_vrt_with_src_crs(self): # Test open_rasterio() support of WarpedVRT with specified src_crs - # https://github.com/mapbox/rasterio/1768 + # https://github.com/rasterio/rasterio/1768 rasterio = pytest.importorskip("rasterio", minversion="1.0.28") # create geotiff with no CRS and specify it manually diff --git a/xarray/tests/test_cupy.py b/xarray/tests/test_cupy.py index e8f35e12ac6..79a540cdb38 100644 --- a/xarray/tests/test_cupy.py +++ b/xarray/tests/test_cupy.py @@ -11,7 +11,7 @@ def toy_weather_data(): """Construct the example DataSet from the Toy weather data example. - http://xarray.pydata.org/en/stable/examples/weather-data.html + https://docs.xarray.dev/en/stable/examples/weather-data.html Here we construct the DataSet exactly as shown in the example and then convert the numpy arrays to cupy. diff --git a/xarray/tutorial.py b/xarray/tutorial.py index d9ff3b1492d..fd8150bf8a6 100644 --- a/xarray/tutorial.py +++ b/xarray/tutorial.py @@ -33,8 +33,8 @@ def _construct_cache_dir(path): external_urls = {} # type: dict external_rasterio_urls = { - "RGB.byte": "https://github.com/mapbox/rasterio/raw/1.2.1/tests/data/RGB.byte.tif", - "shade": "https://github.com/mapbox/rasterio/raw/1.2.1/tests/data/shade.tif", + "RGB.byte": "https://github.com/rasterio/rasterio/raw/1.2.1/tests/data/RGB.byte.tif", + "shade": "https://github.com/rasterio/rasterio/raw/1.2.1/tests/data/shade.tif", } file_formats = { "air_temperature": 3, @@ -185,7 +185,7 @@ def open_rasterio( References ---------- - .. [1] https://github.com/mapbox/rasterio + .. [1] https://github.com/rasterio/rasterio """ try: import pooch