From ed2134717cb4a8984666133206c73f21224033d8 Mon Sep 17 00:00:00 2001 From: Wolfgang Preimesberger Date: Tue, 20 Jul 2021 13:23:57 +0200 Subject: [PATCH] Doc updates, prepare new release (#72) * Update docs * Update env for docs * Updates for nbsphinx * Updates for nbsphinx * Updates for nbsphinx * Update header * Blacken src --- .gitignore | 3 +- .readthedocs.yml | 16 + CHANGELOG.rst | 6 +- README.rst | 6 - doc_requirements.txt | 5 - ...Creating and working with grid objects.rst | 277 -------------- ...bsetting grid objects with shape files.rst | 57 --- docs/changes.rst | 2 - docs/conf.py | 33 ++ docs/examples.rst | 8 - ...ating_and_working_with_grid_objects.ipynb} | 208 +++++------ docs/examples/index.rst | 10 + ...tting_grid_objects_with_shape_files.ipynb} | 44 ++- docs/index.rst | 1 + docs/requirements.txt | 5 - .../output_6_1.png | Bin 7314 -> 0 bytes .../output_8_1.png | Bin 7743 -> 0 bytes environment.yml | 4 + environment_pinned.yml | 198 ++++++---- src/pygeogrids/__init__.py | 2 +- src/pygeogrids/_version.py | 72 ++-- src/pygeogrids/geodetic_datum.py | 32 +- src/pygeogrids/grids.py | 302 +++++++++------ src/pygeogrids/nearest_neighbor.py | 30 +- src/pygeogrids/netcdf.py | 350 +++++++++++------- src/pygeogrids/plotting.py | 64 ++-- src/pygeogrids/shapefile.py | 11 +- 27 files changed, 811 insertions(+), 935 deletions(-) create mode 100644 .readthedocs.yml delete mode 100644 doc_requirements.txt delete mode 100644 docs/Creating and working with grid objects.rst delete mode 100644 docs/Subsetting grid objects with shape files.rst delete mode 100644 docs/changes.rst delete mode 100644 docs/examples.rst rename docs/{Creating and working with grid objects.ipynb => examples/creating_and_working_with_grid_objects.ipynb} (74%) create mode 100644 docs/examples/index.rst rename docs/{Subsetting grid objects with shape files.ipynb => examples/subsetting_grid_objects_with_shape_files.ipynb} (89%) delete mode 100644 docs/requirements.txt delete mode 100644 docs/subsetting_grids_with_shape_files/output_6_1.png delete mode 100644 docs/subsetting_grids_with_shape_files/output_8_1.png diff --git a/.gitignore b/.gitignore index 44358fd..d343ef0 100644 --- a/.gitignore +++ b/.gitignore @@ -11,7 +11,7 @@ __pycache__/* .cache/* .*.swp -*/.ipynb_checkpoints/* +**/.ipynb_checkpoints/** .DS_Store # Project files @@ -49,3 +49,4 @@ MANIFEST # Per-project virtualenvs .venv*/ .artifacts/* +_example_out/* diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000..a497c3a --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,16 @@ +version: 2 + +sphinx: + configuration: docs/conf.py + +submodules: + include: all + +conda: + environment: environment.yml + +python: + version: 3.8 + install: + - method: pip + path: . \ No newline at end of file diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 47f0ddf..50ee51d 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,10 +2,11 @@ Changelog ========= -Version x.x.x +Version 0.4.0 ============= -- +- Faster implementation of bbox search +- Uses pyscaffold v4 standard Version 0.3.2 ============= @@ -18,7 +19,6 @@ Version 0.3.1 - Add k parameter to nearest neighbor search (number of nearest neighbors to return) - Version 0.3.0 ============= diff --git a/README.rst b/README.rst index 4b12a6a..732ce3c 100644 --- a/README.rst +++ b/README.rst @@ -43,12 +43,6 @@ This package should be installable through pip: pip install pygeogrids -To install the package use pip 6.0 or later. On linux systems `pykdtree -`__ will be installed whereas on -windows systems `scipy `__ ``cKDTree`` will be used. -pykdtree is faster than the scipy implementation but it is at the moment -not available for Windows systems. - Features ======== diff --git a/doc_requirements.txt b/doc_requirements.txt deleted file mode 100644 index d076d90..0000000 --- a/doc_requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -mock -numpydoc -sphinx_rtd_theme -ipython -sphinx==1.5.6 \ No newline at end of file diff --git a/docs/Creating and working with grid objects.rst b/docs/Creating and working with grid objects.rst deleted file mode 100644 index 639365b..0000000 --- a/docs/Creating and working with grid objects.rst +++ /dev/null @@ -1,277 +0,0 @@ - -.. code:: python - - import pygeogrids.grids as grids - import numpy as np - -Let's create a simple regular 10x10 degree grid with grid points at the -center of each 10x10 degree cell. - -First by hand to understand what is going on underneath - -.. code:: python - - # create the longitudes - lons = np.arange(-180 + 5, 180, 10) - print(lons) - lats = np.arange(90 - 5, -90, -10) - print(lats) - - -.. parsed-literal:: - - [-175 -165 -155 -145 -135 -125 -115 -105 -95 -85 -75 -65 -55 -45 -35 - -25 -15 -5 5 15 25 35 45 55 65 75 85 95 105 115 - 125 135 145 155 165 175] - [ 85 75 65 55 45 35 25 15 5 -5 -15 -25 -35 -45 -55 -65 -75 -85] - - -These are just the dimensions or we can also call them the "sides" of -the array that defines all the gridpoints. - -.. code:: python - - # create all the grid points by using the numpy.meshgrid function - longrid, latgrid = np.meshgrid(lons, lats) - -now we can create a BasicGrid. We can also define the shape of the grid. -The first part of the shape must be in longitude direction. - -.. code:: python - - manualgrid = grids.BasicGrid(longrid.flatten(), latgrid.flatten(), shape=(36, 18)) - - # Each point of the grid automatically got a grid point number - gpis, gridlons, gridlats = manualgrid.get_grid_points() - print(gpis[:10], gridlons[:10], gridlats[:10]) - - -.. parsed-literal:: - - (array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), array([-175, -165, -155, -145, -135, -125, -115, -105, -95, -85]), array([85, 85, 85, 85, 85, 85, 85, 85, 85, 85])) - - -The grid point indices or numbers are useful when creating lookup tables -between grids. - -We can now use the manualgrid instance to find the nearest gpi to any -longitude and latitude - -.. code:: python - - ngpi, distance = manualgrid.find_nearest_gpi(15.84, 28.76) - print(ngpi, distance) - # convert the gpi to longitude and latitude - print(manualgrid.gpi2lonlat(ngpi)) - - -.. parsed-literal:: - - (235, 424808.51317782089) - (15, 25) - - -The same grid can also be created by a method for creating regular grids - -.. code:: python - - autogrid = grids.genreg_grid(10, 10) - autogrid == manualgrid - - - - -.. parsed-literal:: - - True - - - -If your grid has a 2D shape like the ones we just created then you can -also get the row and the column of a grid point. This can be useful if -you know that you have data stored on a specific grid and you want to -read the data from a grid point. - -.. code:: python - - row, col = autogrid.gpi2rowcol(ngpi) - print(row, col) - - -.. parsed-literal:: - - (6, 19) - - -Iteration over gridpoints -------------------------- - -.. code:: python - - for i, (gpi, lon, lat) in enumerate(autogrid.grid_points()): - print(gpi, lon, lat) - if i==10: # this is just to keep the example output short - break - - -.. parsed-literal:: - - (0, -175.0, 85.0) - (1, -165.0, 85.0) - (2, -155.0, 85.0) - (3, -145.0, 85.0) - (4, -135.0, 85.0) - (5, -125.0, 85.0) - (6, -115.0, 85.0) - (7, -105.0, 85.0) - (8, -95.0, 85.0) - (9, -85.0, 85.0) - (10, -75.0, 85.0) - - -Calculation of lookup tables ----------------------------- - -If you have a two grids and you know that you want to get the nearest -neighbors for all of its grid points in the second grid you can -calculate a lookup table once and reuse it later. - -.. code:: python - - # lets generate a second grid with 10 random points on the Earth surface. - - randlat = np.random.random(10) * 180 - 90 - randlon = np.random.random(10) * 360 - 180 - print(randlat) - print(randlon) - # This grid has no meaningful 2D shape so none is given - randgrid = grids.BasicGrid(randlon, randlat) - - -.. parsed-literal:: - - [-67.7701097 79.03856366 -71.6134622 63.7418792 -25.91579334 - 19.20630556 -79.29563693 11.49060401 33.88811903 41.03189655] - [ -65.98506205 -86.16694426 112.33747512 -49.55645505 -22.02287726 - 132.29787487 91.23860579 -92.31842844 94.96203201 -66.00963993] - - -Now lets calculate a lookup table to the regular 10x10° grid we created -earlier - -.. code:: python - - lut = randgrid.calc_lut(autogrid) - print(lut) - - -.. parsed-literal:: - - [551 45 605 85 411 283 603 260 207 155] - - -The lookup table contains the grid point indices of the other grid, -autogrid in this case. - -.. code:: python - - lut_lons, lut_lats = autogrid.gpi2lonlat(lut) - print(lut_lats) - print(lut_lons) - - -.. parsed-literal:: - - [-65. 75. -75. 65. -25. 15. -75. 15. 35. 45.] - [ -65. -85. 115. -45. -25. 135. 95. -95. 95. -65.] - - -Storing and loading grids -------------------------- - -Grids can be stored to disk as CF compliant netCDF files - -.. code:: python - - import pygeogrids.netcdf as nc - nc.save_grid('example.nc', randgrid) - -.. code:: python - - loadedgrid = nc.load_grid('example.nc') - -.. code:: python - - loadedgrid - - - - -.. parsed-literal:: - - - - - -.. code:: python - - randgrid - - - - -.. parsed-literal:: - - - - - -Define geodetic datum for grid ------------------------------- - -.. code:: python - - grid_WGS84 = grids.BasicGrid(randlon, randlat, geodatum='WGS84') - -.. code:: python - - grid_GRS80 = grids.BasicGrid(randlon, randlat, geodatum='GRS80') - -.. code:: python - - grid_WGS84.geodatum.a - - - - -.. parsed-literal:: - - 6378137.0 - - - -.. code:: python - - grid_GRS80.geodatum.a - - - - -.. parsed-literal:: - - 6378137.0 - - - -.. code:: python - - grid_WGS84.kdTree.geodatum.sphere - - - - -.. parsed-literal:: - - False - - diff --git a/docs/Subsetting grid objects with shape files.rst b/docs/Subsetting grid objects with shape files.rst deleted file mode 100644 index dc83740..0000000 --- a/docs/Subsetting grid objects with shape files.rst +++ /dev/null @@ -1,57 +0,0 @@ - -Subsetting grids using shapefiles. ----------------------------------- - -.. code:: ipython2 - - import pygeogrids.grids as grids - import pygeogrids.shapefile as shapefile - import numpy as np - -.. code:: ipython2 - - testgrid = grids.genreg_grid(0.1, 0.1) - -We can now subset the 0.1x0.1 degree regular grid with the shapefiles -from http://biogeo.ucdavis.edu/data/gadm2.8/gadm28\_levels.shp.zip which -were downloaded to ``~/Downloads/gadm`` - -.. code:: ipython2 - - austria = shapefile.get_gad_grid_points(testgrid, '/home/cpa/Downloads/gadm/', - 0, name='Austria') - - -We can the plot the resulting grid using a simple scatterplot. - -.. code:: ipython2 - - import matplotlib.pyplot as plt - %matplotlib inline - plt.scatter(austria.arrlon, austria.arrlat) - -.. image:: subsetting_grids_with_shape_files/output_6_1.png - - -Behind the scenes this functionality uses the ``get_shp_grid_points`` -function of the grid object. - -We can also use this directly using any ``ogr.Geometry`` object. - -.. code:: ipython2 - - ring = ogr.Geometry(ogr.wkbLinearRing) - ring.AddPoint(14, 47) - ring.AddPoint(14, 48) - ring.AddPoint(16, 48) - ring.AddPoint(16, 47) - ring.AddPoint(14, 47) - - poly = ogr.Geometry(ogr.wkbPolygon) - poly.AddGeometry(ring) - subgrid = austria.get_shp_grid_points(poly) - plt.scatter(austria.arrlon, austria.arrlat) - plt.scatter(subgrid.arrlon, subgrid.arrlat, c='orange') - -.. image:: subsetting_grids_with_shape_files/output_8_1.png - diff --git a/docs/changes.rst b/docs/changes.rst deleted file mode 100644 index 257630a..0000000 --- a/docs/changes.rst +++ /dev/null @@ -1,2 +0,0 @@ -.. _changes: -.. include:: ../CHANGES.rst diff --git a/docs/conf.py b/docs/conf.py index 32459a7..abf0075 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,6 +12,38 @@ import inspect import shutil +import subprocess + +# Create kernel for notebooks +on_rtd = "READTHEDOCS" in os.environ and os.environ["READTHEDOCS"] +if on_rtd: + rtd_project = os.environ["READTHEDOCS_PROJECT"] + rtd_version = os.environ["READTHEDOCS_VERSION"] + interpreter = ( + f"/home/docs/checkouts/readthedocs.org/user_builds/{rtd_project}/" + f"conda/{rtd_version}/bin/python" + ) +else: + interpreter = "python" + +print("Installing kernel") +subprocess.run( + [ + interpreter, + "-m", + "ipykernel", + "install", + "--user", + "--name", + "conda-env-pygeogrids-py", + "--display-name", + "Python [conda env:pygeogrids]" + ], + check=True, + capture_output=True, +) +print("Done") + # -- Path setup -------------------------------------------------------------- __location__ = os.path.join( @@ -78,6 +110,7 @@ "sphinx.ext.ifconfig", "sphinx.ext.mathjax", "sphinx.ext.napoleon", + "nbsphinx" ] # Add any paths that contain templates here, relative to this directory. diff --git a/docs/examples.rst b/docs/examples.rst deleted file mode 100644 index 5359157..0000000 --- a/docs/examples.rst +++ /dev/null @@ -1,8 +0,0 @@ -Examples -======== - -.. include:: - Creating and working with grid objects.rst - -.. include:: - Subsetting grid objects with shape files.rst diff --git a/docs/Creating and working with grid objects.ipynb b/docs/examples/creating_and_working_with_grid_objects.ipynb similarity index 74% rename from docs/Creating and working with grid objects.ipynb rename to docs/examples/creating_and_working_with_grid_objects.ipynb index f4636f9..c934a00 100644 --- a/docs/Creating and working with grid objects.ipynb +++ b/docs/examples/creating_and_working_with_grid_objects.ipynb @@ -4,15 +4,13 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Examples" + "# Basics" ] }, { "cell_type": "code", - "execution_count": 1, - "metadata": { - "collapsed": false - }, + "execution_count": 8, + "metadata": {}, "outputs": [], "source": [ "import pygeogrids.grids as grids\n", @@ -30,18 +28,16 @@ }, { "cell_type": "code", - "execution_count": 2, - "metadata": { - "collapsed": false - }, + "execution_count": 9, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "[-175 -165 -155 -145 -135 -125 -115 -105 -95 -85 -75 -65 -55 -45 -35\n", - " -25 -15 -5 5 15 25 35 45 55 65 75 85 95 105 115\n", - " 125 135 145 155 165 175]\n", + "[-175 -165 -155 -145 -135 -125 -115 -105 -95 -85 -75 -65 -55 -45\n", + " -35 -25 -15 -5 5 15 25 35 45 55 65 75 85 95\n", + " 105 115 125 135 145 155 165 175]\n", "[ 85 75 65 55 45 35 25 15 5 -5 -15 -25 -35 -45 -55 -65 -75 -85]\n" ] } @@ -63,10 +59,8 @@ }, { "cell_type": "code", - "execution_count": 3, - "metadata": { - "collapsed": false - }, + "execution_count": 10, + "metadata": {}, "outputs": [], "source": [ "# create all the grid points by using the numpy.meshgrid function\n", @@ -82,21 +76,19 @@ }, { "cell_type": "code", - "execution_count": 4, - "metadata": { - "collapsed": false - }, + "execution_count": 12, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "(array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), array([-175, -165, -155, -145, -135, -125, -115, -105, -95, -85]), array([85, 85, 85, 85, 85, 85, 85, 85, 85, 85]))\n" + "[0 1 2 3 4 5 6 7 8 9] [-175 -165 -155 -145 -135 -125 -115 -105 -95 -85] [85 85 85 85 85 85 85 85 85 85]\n" ] } ], "source": [ - "manualgrid = grids.BasicGrid(longrid.flatten(), latgrid.flatten(), shape=(36, 18))\n", + "manualgrid = grids.BasicGrid(longrid.flatten(), latgrid.flatten(), shape=(18, 36))\n", "\n", "# Each point of the grid automatically got a grid point number\n", "gpis, gridlons, gridlats = manualgrid.get_grid_points()\n", @@ -119,16 +111,14 @@ }, { "cell_type": "code", - "execution_count": 5, - "metadata": { - "collapsed": false - }, + "execution_count": 13, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "(235, 424808.51317782089)\n", + "235 424808.5131778209\n", "(15, 25)\n" ] } @@ -149,10 +139,8 @@ }, { "cell_type": "code", - "execution_count": 6, - "metadata": { - "collapsed": false - }, + "execution_count": 14, + "metadata": {}, "outputs": [ { "data": { @@ -160,7 +148,7 @@ "True" ] }, - "execution_count": 6, + "execution_count": 14, "metadata": {}, "output_type": "execute_result" } @@ -180,16 +168,14 @@ }, { "cell_type": "code", - "execution_count": 7, - "metadata": { - "collapsed": false - }, + "execution_count": 15, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "(6, 19)\n" + "6 19\n" ] } ], @@ -207,26 +193,24 @@ }, { "cell_type": "code", - "execution_count": 8, - "metadata": { - "collapsed": false - }, + "execution_count": 16, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "(0, -175.0, 85.0)\n", - "(1, -165.0, 85.0)\n", - "(2, -155.0, 85.0)\n", - "(3, -145.0, 85.0)\n", - "(4, -135.0, 85.0)\n", - "(5, -125.0, 85.0)\n", - "(6, -115.0, 85.0)\n", - "(7, -105.0, 85.0)\n", - "(8, -95.0, 85.0)\n", - "(9, -85.0, 85.0)\n", - "(10, -75.0, 85.0)\n" + "0 -175.0 85.0\n", + "1 -165.0 85.0\n", + "2 -155.0 85.0\n", + "3 -145.0 85.0\n", + "4 -135.0 85.0\n", + "5 -125.0 85.0\n", + "6 -115.0 85.0\n", + "7 -105.0 85.0\n", + "8 -95.0 85.0\n", + "9 -85.0 85.0\n", + "10 -75.0 85.0\n" ] } ], @@ -248,19 +232,17 @@ }, { "cell_type": "code", - "execution_count": 9, - "metadata": { - "collapsed": false - }, + "execution_count": 17, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "[-67.7701097 79.03856366 -71.6134622 63.7418792 -25.91579334\n", - " 19.20630556 -79.29563693 11.49060401 33.88811903 41.03189655]\n", - "[ -65.98506205 -86.16694426 112.33747512 -49.55645505 -22.02287726\n", - " 132.29787487 91.23860579 -92.31842844 94.96203201 -66.00963993]\n" + "[-30.89674861 59.08649779 -49.27545779 -69.52856569 -1.39290501\n", + " -10.88163146 53.67514799 60.14998259 55.15218786 -29.79836856]\n", + "[ 125.68580631 -169.379169 105.7270734 46.42576672 -28.28325787\n", + " 47.49761201 173.67561423 86.59161319 -178.39301312 115.68242529]\n" ] } ], @@ -284,16 +266,14 @@ }, { "cell_type": "code", - "execution_count": 10, - "metadata": { - "collapsed": false - }, + "execution_count": 18, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "[551 45 605 85 411 283 603 260 207 155]\n" + "[462 109 496 562 339 382 143 98 108 425]\n" ] } ], @@ -311,17 +291,15 @@ }, { "cell_type": "code", - "execution_count": 11, - "metadata": { - "collapsed": false - }, + "execution_count": 19, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "[-65. 75. -75. 65. -25. 15. -75. 15. 35. 45.]\n", - "[ -65. -85. 115. -45. -25. 135. 95. -95. 95. -65.]\n" + "[-35. 55. -45. -65. -5. -15. 55. 65. 55. -25.]\n", + "[ 125. -165. 105. 45. -25. 45. 175. 85. -175. 115.]\n" ] } ], @@ -341,10 +319,8 @@ }, { "cell_type": "code", - "execution_count": 12, - "metadata": { - "collapsed": false - }, + "execution_count": 20, + "metadata": {}, "outputs": [], "source": [ "import pygeogrids.netcdf as nc\n", @@ -353,10 +329,8 @@ }, { "cell_type": "code", - "execution_count": 13, - "metadata": { - "collapsed": false - }, + "execution_count": 21, + "metadata": {}, "outputs": [], "source": [ "loadedgrid = nc.load_grid('example.nc')" @@ -364,18 +338,16 @@ }, { "cell_type": "code", - "execution_count": 15, - "metadata": { - "collapsed": false - }, + "execution_count": 22, + "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 15, + "execution_count": 22, "metadata": {}, "output_type": "execute_result" } @@ -386,18 +358,16 @@ }, { "cell_type": "code", - "execution_count": 16, - "metadata": { - "collapsed": false - }, + "execution_count": 23, + "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 16, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } @@ -415,10 +385,8 @@ }, { "cell_type": "code", - "execution_count": 17, - "metadata": { - "collapsed": true - }, + "execution_count": 24, + "metadata": {}, "outputs": [], "source": [ "grid_WGS84 = grids.BasicGrid(randlon, randlat, geodatum='WGS84')" @@ -426,10 +394,8 @@ }, { "cell_type": "code", - "execution_count": 18, - "metadata": { - "collapsed": true - }, + "execution_count": 25, + "metadata": {}, "outputs": [], "source": [ "grid_GRS80 = grids.BasicGrid(randlon, randlat, geodatum='GRS80')" @@ -437,10 +403,8 @@ }, { "cell_type": "code", - "execution_count": 19, - "metadata": { - "collapsed": false - }, + "execution_count": 29, + "metadata": {}, "outputs": [ { "data": { @@ -448,21 +412,19 @@ "6378137.0" ] }, - "execution_count": 19, + "execution_count": 29, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "grid_WGS84.geodatum.a" + "grid_WGS84.geodatum.geod.a" ] }, { "cell_type": "code", - "execution_count": 20, - "metadata": { - "collapsed": false - }, + "execution_count": 30, + "metadata": {}, "outputs": [ { "data": { @@ -470,21 +432,19 @@ "6378137.0" ] }, - "execution_count": 20, + "execution_count": 30, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "grid_GRS80.geodatum.a" + "grid_GRS80.geodatum.geod.a" ] }, { "cell_type": "code", - "execution_count": 21, - "metadata": { - "collapsed": false - }, + "execution_count": 32, + "metadata": {}, "outputs": [ { "data": { @@ -492,35 +452,35 @@ "False" ] }, - "execution_count": 21, + "execution_count": 32, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "grid_WGS84.kdTree.geodatum.sphere" + "grid_WGS84.kdTree.geodatum.geod.sphere" ] } ], "metadata": { "kernelspec": { - "display_name": "Python 2", + "display_name": "Python 3", "language": "python", - "name": "python2" + "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", - "version": 2 + "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.6" + "pygments_lexer": "ipython3", + "version": "3.8.10" } }, "nbformat": 4, - "nbformat_minor": 0 -} + "nbformat_minor": 1 +} \ No newline at end of file diff --git a/docs/examples/index.rst b/docs/examples/index.rst new file mode 100644 index 0000000..ba2b190 --- /dev/null +++ b/docs/examples/index.rst @@ -0,0 +1,10 @@ +Examples +******** + +.. toctree:: + :maxdepth: 1 + + ./creating_and_working_with_grid_objects.ipynb + ./subsetting_grid_objects_with_shape_files.ipynb + +All shown examples are also available as ipython notebooks in ``pygeogrids/docs/examples``. diff --git a/docs/Subsetting grid objects with shape files.ipynb b/docs/examples/subsetting_grid_objects_with_shape_files.ipynb similarity index 89% rename from docs/Subsetting grid objects with shape files.ipynb rename to docs/examples/subsetting_grid_objects_with_shape_files.ipynb index 3236d93..64d362a 100644 --- a/docs/Subsetting grid objects with shape files.ipynb +++ b/docs/examples/subsetting_grid_objects_with_shape_files.ipynb @@ -9,20 +9,19 @@ }, { "cell_type": "code", - "execution_count": 25, - "metadata": { - "collapsed": true - }, + "execution_count": 3, + "metadata": {}, "outputs": [], "source": [ "import pygeogrids.grids as grids\n", "import pygeogrids.shapefile as shapefile\n", - "import numpy as np" + "import numpy as np\n", + "import os" ] }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -33,17 +32,30 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "We can now subset the 0.1x0.1 degree regular grid with the shapefiles from http://biogeo.ucdavis.edu/data/gadm2.8/gadm28_levels.shp.zip which were downloaded to `~/Downloads/gadm`" + "We can now subset the 0.1x0.1 degree regular grid with the shapefiles from http://biogeo.ucdavis.edu/data/gadm2.8/gadm28_levels.shp.zip which were downloaded/extracted to `~/Downloads/gadm`" ] }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 11, "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "AttributeError", + "evalue": "'NoneType' object has no attribute 'GetLayer'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m austria = shapefile.get_gad_grid_points(\n\u001b[0m\u001b[1;32m 2\u001b[0m testgrid, os.path.join('/home', os.environ['USER'], 'Downloads', 'gadm', 'gadm28_levels.shp.zip'), 0, name='Austria')\n", + "\u001b[0;32m/shares/wpreimes/home/code/pygeogrids/src/pygeogrids/shapefile.py\u001b[0m in \u001b[0;36mget_gad_grid_points\u001b[0;34m(grid, gadm_shp_path, level, name, oid)\u001b[0m\n\u001b[1;32m 73\u001b[0m \u001b[0mdrv\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mogr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mGetDriverByName\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'ESRI Shapefile'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 74\u001b[0m \u001b[0mds_in\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdrv\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mOpen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgadm_shp_path\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m'gadm28_adm{:}.shp'\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlevel\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 75\u001b[0;31m \u001b[0mlyr_in\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mds_in\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mGetLayer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 76\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 77\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlevel\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'GetLayer'" + ] + } + ], "source": [ - "austria = shapefile.get_gad_grid_points(testgrid, '/home/cpa/Downloads/gadm/',\n", - " 0, name='Austria')\n" + "austria = shapefile.get_gad_grid_points(\n", + " testgrid, os.path.join('/home', os.environ['USER'], 'Downloads', 'gadm', 'gadm28_levels.shp.zip'), 0, name='Austria')\n" ] }, { @@ -147,21 +159,21 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 2", + "display_name": "Python 3", "language": "python", - "name": "python2" + "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", - "version": 2 + "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.11" + "pygments_lexer": "ipython3", + "version": "3.8.10" } }, "nbformat": 4, diff --git a/docs/index.rst b/docs/index.rst index 3ad265c..6d2a87d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -32,6 +32,7 @@ Contents .. toctree:: :maxdepth: 2 + Examples License Authors Changelog diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100644 index 2ddf98a..0000000 --- a/docs/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -# Requirements file for ReadTheDocs, check .readthedocs.yml. -# To build the module reference correctly, make sure every external package -# under `install_requires` in `setup.cfg` is also listed here! -sphinx>=3.2.1 -# sphinx_rtd_theme diff --git a/docs/subsetting_grids_with_shape_files/output_6_1.png b/docs/subsetting_grids_with_shape_files/output_6_1.png deleted file mode 100644 index a808e4faf5bf925b9ba881788b2f723dba33e79f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7314 zcmai32UwHY(gsnn0n1w0h=9^8fM}wj2Cjl)D54@F-6aqfDM9HifyLFh27)M^kX4$} zJ4nPt#YCh;KxznyMx=x;36T{358b=v-uvIn^FR)Ba?Z?|^Ulos&3F6USu43MySGS5 zNy)*_SYD8ll12k#(WZ^S-{3xa4)C!)3TxO7}3) zA}Ogo$KaMHE=K0gFfr5ayg1gv_*RP>N}aqP68fOYIS0MH@-yGRJ+os&_JMuW4H?>+ zb(XiA`>(328|&@8-1umwrtrmW>qDm=o2H*nI=p$idHMzY`*qjO6%^ifx83|={f*%F z+#Qb|24qoRD3F(F%gt_T#JT0snAs=ePw7ULx4f^>#iQIgE+9Z9&&4t6PKpUxS#mUZ zpzIMyEi!68Crb_|OU@diMEYXpZ*H(Nyo zV#x}Ir61AC3BJ?@qKbROIygylCZlfk7bTBOaC%l&=T$F)LytWB3)XP&WNNj(c+0#n zRiZb)lU*3~vt_I56$MH|AVcK;yu+A1dMkY}Zk4m=%lkV4m&xfit8L1Hl?O^iqtD(m zQ9q7ybJ%T)E8ctp|#Xzr|4&r5I@hH0Gs>^~- zmvhr(-dv4D_0URJi6R~?i0px7MljK`!p^TX2OQWmOp7ug2>mz`H( z3}sSEez9jE&2xNC>d`kPUk?xun9-KT6FgvsjTN`O3rCTYRWUblff3@kueJw8GPrN- za}MTIAl+IKTvf8Omk)EkVIYkNb%45e4u%h~)~u=xeVQ|s=sQPPKA$5T`OlL7wC)cX zqeW~SEdqfXn7oKmT1T1e2*<#+Dr(@Txs$$Th@xlSp?LBQxK?S-&=CS3_WbW|d2DR%+v3y_@_9G(1d-dpZNN&hP^2 zSZ(g#bn3t)t_jKs`G^7W#wyny2&Fh7$EgV=<#{jkA3YUs;GL@XklK_Pq(HPK;mUJRer_F+B~BlzM@3= z)S06rI)IFtF&8CnKXePa2MCPJsf=3NvJmT)9rGvHJJ}c0Jx8A?3!<*XS>Di;Ue!_W zFBcgk6tLDw>@8!X>lshF{IVP$hn!8VPR~5*a9#-vt$!L`9@d3zGE#@0YTd<_p%ATF z>vhRRtNiXxw0z`4&DUHlu^5q&5FyPXOcfuoDX}Jmq~hvZ*L6gX=u_gDHH9NIo~z5UXA-u{SA|d2=dU>)zfQ|2COPtO*jF9@U-${L|P+&%ABMa5%i~5$!I6i zXxFh^oZa#Y5S1+#`n%_@4v+#tkexg&gh~r~SrAzD#(2ejCc04%2#jam0nGz*&rvNO zlAnb+-lJ8>l6k+Htn@qDsghJh{W?A>;)?JVMmm53PcW3pU1m3W9OOFm_&lyZB0G(P4fp;OdJ@n*q?gno7NFdr9E&xah**Q9;@#XJ$rV; z72wUNx)#TtMopFbb)G%H{Jo;SL*mN=Pp5|tg15O*#^N3c5q9qS6$qme=X~! zA$Zn`*InA;MPO%b_H}kcb*ao{gJ#io%H4mj?XH9v5w9)t@uNzR5|c~h<5n-RPqQZm z!^qOp=}X(@{n(#Ug@NnyKn!$vC}HSi6OA=zYtgo5dFv&7feM9>6+9}iiN{;D%EQSD zu^Q^LsiDWKbks+ko91E?+SNZeSuxbsqqWs5biSC8;@zck3{cP5^Y33C7eykGFV98b zSJl~D;EV)^gs{|EDRkc0fLL0hOe1d#?f5>pe~@3)9kw5fg;7*5SA?A1J{sWiDhR`1 z&K_O@U;sGzB24lEw2Giuse2I%BBUBETQ~f|Aa09Qh?H%YSv8^PpK8r7jZ<3|l%Mrs zN&vYdXc#O3Ahn;L=HacDTM8w(ME66Y%F5mZFH9l|c z&LM}JY39ADa*EXogMc&b*gyZRln1EJ@?DjDCKqzTQc2?eQ{@0Uo6E`=L_6vl@PW(D zECitJ4A{^>V!BV{`*s|=kGnIw5aWzgzCjpAna3|x7xCD^@?Qh^sTS4B<@PWFXqDK3Ei}X0)LI9yGSuUANNI!p%R`~bf+5hm( zKV)k-1QxpjG!u4i83=m9xKEbw?m-Lcht#%v$>m-8?ISyg3$6q(pMU3@|E_9qBFE9$ zh*DHLI4A5RbrgO6+c5*Z_soshhXnYzr4J>tqUkMhrHpFC)w{HGz~G$37~!Qx1nr$V zUQfS00D248B7XQS2MoV99DPFkG`I+;*Lyw4m^-NkLlkWT(O98`PlIoobZftw$5tg5 zLj8-MS)-!+YYQ7)!*+Uvg; z=J8gK@SC%Nz!FDeeo849)`<=zHL}vgnR*UNU}UU^5^oVlF3VtsFGGQJbSRFGbDx6? zk6WO~2RLXTb00uZ8}bGc=N2+}HH_I&WXHveh4j2?F3gXNoS0Ga8NmbS@af2$iE>9y zWkhS1OY>!H6ss+IG7=D|z)D&44~nifv&+ZkHQ0{qG{05+F?^bFLtr=x$fj@Z`*@U! zb>miy^oplxgSx(=0U5js1B+d@YVGf?B5!SqNfUY)H_yDdWmYy)9atNa1Guk9_p{54 z?_@9C&&bD|TC`h~3YE@~X-tc?qx4afxJj4Xo%KmG-_E=;4I+)+_L0Fb;WVlcJ%^V*QPk;iv(kX0qPHLa7k_LO9z_%#_U_<6 zJq)St)9{n6f~lJwn*5&4QLIKxbBVHfI4r#^cr{6}T61z6TbhkfT)5IU6E01Xf}%!a zpTB>3t5jl(L?$}?J^5g<{yu5B6W#85aK2 zxVt%85sU;Y*Hpk7qrbE^yzcQyPJj1K`Wnvg&96bF zKutJc@%3EMMgs0sKK=7eI~Df@Tvqej{BWuS`6lz|O90+eoi!ak$6tNrSXm|a@4P;8 z&>`(mc9tEAp={5&XF?A&Ib9xIt^4_5$-PsheGP7Dq0}hvjx_*m2i3kiz8^GF({ZQd z?5^@ljO&hI{cp$828UNy7{T>7V(vMX9M-K2K=P&-pGiuI{WGy5;yzalGnU`iLsGQV7p_RX;yAEIcxa!;4cQ@zI}Ht z;j@O`_OqT+x^dqVTH1MFiNHz3n(SV-G{89TrvMLxh<9Pf1|fL8ToLFwcx_S)98 zss2-A{oSMfl`NQK z3phL-h}rAZ=x=^h1_|GC=U%brTxtCs*SRYlfTF*SBk*Kc*JV6lG~gZ$gj%$?hh(9k0ro zo7L7>-rX2cpY88mkGhZkN;33Qt=qLc8JkQY4dUsE=3h#Bxb#8iu1vpdTxrXKBXG)Z zj-ujF(_L4+O09b2uf$l!UhL7L&@{Ygg9GODMQ5?E^{mi?U4s?$b@OEth_|s^>mbtB z!3|YWpFeGtS2J@H7lR?nnawdGC26NK!}7UX%I<|)S-R;=db1C)f8IGuTAzEop~Wwh zxmo>>q(ro;w(%%bcslWHMIBuKU~&|Fvm!zbjO?hx;y^^m9jF7I97)xm1hYS3BZXTU z%|QaydHHl%NkqKouUIQ5q1heD#~|m7r`Bqb-Eq8TNd1GPQS@EmM4vZMWvr+H^sJKX z=><&ui=5_E=$YZI-zio~4M;E7j%;cU>3NGf`Cpf6chq@XSWe6ElL9eKAE8$9GYCdZ zLW`r!;gbZF5nLS4R)u5h+;It9$EnL8jt~mE@3*{<8 z8h#r8qFih&B}Av}T7C(1xE6R8`gNJFSG@mBu!kx_Tn^oN6v3}gZ~6)Firj-mQ@sJB z^4eYJ=cfJju5|-;?mjsdrD!V9OM5Ud9dI<(O@*Gwx&Vsq?ArB`*0-=Waq69k(8b1W zE%n_z7v-^D0d3=>$m@F6|NX5@KWy6gso<|v=BaM2oV7~3Yhw%HC_<(EXC<|q4>QG4 z)3FU6<^W49?yG+q#sDL88#O%ngE$sCn-+9HMPv+6u*CBNi~WJ^nb$^>V}}qBw9rr0 zBw}9aWg^SCQ#{QtU;4qe89hwv!e0}c3d(y&=s_%N{PO~Caon`%hXKG|Q)sgmgp!Zu$K@_6Z@ z$p$cAzE2dMoa=}xBw>qR=}7#+S!bFxl*so^S;t-!LoNC z{LvQSS;e!Guxc`vt)F=`2siH#*u-SVojhl0dU-MK zcmCrxQ+(9M3jmdloSlM;|BGY58P>b)^D_xPRCTsTg;Gml=AT#%gj*%_CMZ$o_9)uq zubSyksUX0 z4EN0pf57pAzs_fOO=LuObZi8k8#)56m$YVO&4OYsH!Pp^t)H$$G;_vEK0TVHJ{tU5_$rpm)tbl}TpT!=?FWaTlF z1C#bzfv?lAP5ZA}UpQmjk`#b2dc)_S;4ZsoXo;x}zU_w15=1^?@jSe-E)8|RFu)ug z6`*FOH_3h&vhDi3OruQ3&I;0IK`k~C(oQy%6ct(Q9O=3ZB5!ruU^*C?eZ4Wniz*wB z*`;VM7pu_pWb)jGH3?I3p;xhbXOn`!lJht@!el?vvd5W1iYEsdpS#UC*tSXJVHuzz z9&%meX*196%P+7=&`J2L{(-oT7_@JoiE+>yhh0`<+*+&ScjS-025_aY&isH|oYwrU z{C3Qy>40MwPdHPtk-F+VBIxu^u*A>uDR@}4tLpK>725d0b^)0N^;D=^IX$q2ra_sL zHt4x-cKJABw~n`H(>zIWfzutBhcX?^?BnNaN;}DTx`ig6keBSt1u3%B+$GBF4(Q9J z)=H_>{F(23G{rG+x)j+~=quWv5~@|TLs6lT;PW#+$pb1p${&1o=P zZDY6;dCwQ&3(R_v7h~TR$ei=E?liU21mRlK%CfT#Qt=zC~E zH#kC92o1hJ7Iy8z-B57*94qe0?YZ&itJ#%OYP;Pf-_5gJJ9?Z`4-W2^>vvzj-}Su?_pP&MPN=Ti zx=v0`PW7anjgy?5JO;cLuT=tnBL;eM!M|0JM^Cz}1wYr;`lo>R$`N*+k#chDH>`Z+ zdl?w9oSe>wlQu_OuI9}$uyu^+c+LXH>X7{Jgv+OPT-s3cE`9i9_J&iZ^NXs_>fFh; zbE^E&$gK>{b6Jxd<7Vnl3OLHGQG2Ih6+Pqcd1$t`GD>*vl=J0~J84jRlgqzm>{#2m z;lpt|FMIbjr`8JEu%jC-1Zc9LVD4){k3b~3*3KM4?s@*T0DXqX#q?N&w_9+QBwmpy z8=FT~o6U57wy~Kmd1kEFYqC|Wr>9f=zb*;QQ*yE0mRv&w0dVLuv9Zal$I7I0B+-a% z*tT&NNL3Z_XM-(orr4R%X6)Y4ryoN!GSc6%wrEV{{E@_@b>`siw}!nP#*9r;4VvDMY9 zYs?7^DV}oQu}E%T5-Oa1pmO>n_w)T+ey{b$T+77 z`$%3ryzaEXTX3LOWkI08nHUY%oOfaJ``Wk@2M8yj^guwAt_$}$kd>0666~AbM>ChF zj>ePm-tPq^OgZT=O)A&C<|mU=KcJY(Gpjtvm^O@2j-3L}`oPO{5oCNa(zx!IuTJ|# zc*58BAJ{jVYO$Q$;a@KD>VI=7x5LT&SnHsoW^Ms;3I4r+zNH~spgG)H2b zIMBwm*>we_o8w2%Prmx0BwIcwj<-4t%_702aT&9rD2T$_`PvamQ`~5LE~z#~U`7!o zVxt7Xw2;*acq0WVbiT|2CEGK99Extk=MR!(uxGugcF34f!6Ysj<7wQwXllhnA3Uq4 zA9Nog8vyt^tOF67gt~G-V=pa6u~~GIb)IDnY(3IM5|p(A5)!y?0%H;B5?e3$?4Tv?+vnzRqr0ZSp-GdKDy6ke%+I` zUwhlsoCncA4ZJJ;`PQQ!RWTa+0i8FhgfAX&2ti++R6H84=?}FATq~b-^onh_9uEEa zepTdcyRD60&8L#Aj({i9>3um&dalW_&7r^W2uDc;;4S1EG+-ogLvdogE1i5{Zk>Nd z;xl_=vMCi*$o+J|4rpXDhPJdk9%FCGa5}muJh9aKv}fz`Z;^_(-?BiGACF1RGg4X+ zeu^d$ioV0~KX8#Fjwi%=W}RFzbr^;#Dudhk;IMZu@}u$b zZ~KuI5VGBi&&zWrF+Ir1DfP@Dwyu9pyf>ixLg)cpo=@!$X-|(0VSE|>eBEU+(14`w z$3%fEHOs9D)b$R}#qiJgSvDokSh83NPLxX!*8A;s=LX z1H?h+5T~x;s3FA4tnae^S#j0CPQ6}*>4j96kbblG&XY81PKA;70K2DXC+5~jdGOP+ zUOdjk*{u?!Z8=H%XTrUyh1`)Gyv_jv8cpZYX5^t(mjDqhsP6d1(%g*Dh-rQj1zY z0S3ejGv<1}THmAWq6mdoZ|U0Z@)4zreb0=aB_crw`h9uaP?4Jl@ReDLl#?tKU~H^q zKfM6c-9H!*PLY~R&!-Y*UR#OamC(C7C@xOu$x zE`~cvw3Q{Hz=cc_U?Y@y$)QUcVW)$KRg0VR$9=@gG$ng4}c|?Y6K> zYj~jr4Jf%=KQ#?NR52Ml9#lk@ep|!sZ%cTm7pkXc^jiSlcAxpo6Po^hfPg#wN}{a^NTPiZ*gc}q=w+nKyRmZxR@ZWtqMaNetZno;=JMoJv$D}w; zY_Z~@+UarP-peEDm`d;Pfax;!o9SJ@1a|(kB?ztoHKVPxkTnZsSwSCYteiY0a z+!V7FoA}vZ={}$mrbK=AwUVs*OtX;)h+VWShCsEX(=yk$Pbw(UaRYKWv zVPrl07DUUgQAlJr*6=EIQ-6m)38QdVP!jKKg7pk?#G78_#frzrtSq0Ik-t>aNf3 znULkh=-0yQn;K`!I%IMQkz~<{I{|Am7NjL)YM@MTvm8Qy6eq^-l%7aXCy9KZ=xq@M z65@V~#E)e~q337|nX51Z3|*f|5g4U*j7N)*_h#b|_2S@bzbqBbSK1m&h2}ip_;5}4 zHp$0Dt(#Zg|9HB9&5qV3U6E}y!GzBIP{5MF4uc#(gg3MpE&HSXD3?%)ru*frJ1pBpUAD}6D{ z)(v{Cgkr9rne~<`MV#vnO;^|$PbVcZimz|dw!O`eKCo|RMqPJ1c?H|i3j(|oNGdkY zANZ4G4o35no<2=*^N|`sSC81T$Fb0wYd(TwSB_z>#HDTQOWo4y&_}m5wT8!A9)f@2 z9^5?+#@Z*Ji4q4CX-#yS0Q#)0ES!VCXOI)a27j;m&QzXnA~!5=hMbq+k3+A&x;i&S zAL+3mDATdi)SmOgW6mX? zKupu`x9IFYM0jdQc%|T1jjHVS^zPU+o##4qh;UF?66sWqD)oS;d9D)?x;Z6E)g1wyypHJv#*X~D7LaV*R(3R;sI>s)IsMo8mb7%588*SXt$@u*;r6hQ_0}^qx~isDjM5qA&fCm2Ws% zAW~`{No$iBE(xnTwTM(Ev&H!HXfFt?swwqNwGb>3CY7ptryg$d^UCsE$@e#(#UXw- z4=HOu9UeDaTvCdbPA`W8qh6(^m>KmtF;MVJiu13s{CK{|DeJdEOQ$6^;PIL9)iuR z2{%~;B{>qPh5H)fY;4Z|48u(DpCBj4IzS9LO@-XN5Zs!6M*N%PeN;FCCXGUiLnpsB zeNKVbW;3D$<{;n)EYAcCZmoU)n`G;;a5xBu)226h3+lc7T;dm)wACl3k3k2+*MliS zwr6l_P!c~_aO$a8wx(CL2xwM&1N(HVlTd}7_M9dSxl*K<>)&9wI zQD#@1Q;Vegf759apgnLDZodv*7AL+q&<00h(^Vs1V0GqSRj8t{g``@ps>mwAYD@E6 zKpWl7dhwz(J@nqUWxTu};Idv~s4va~f!tLfO_Nx-U7G)Ul9N}f9ich@vhLlRPHw;< zM%x`Q%Hyrk_=b)u^GoCo@IW}N>{Ah1ob-ZE>~>yEk!|)hIFSFP!9F37A*qk$aVhRy zA!NKm)~lbMt~|Q#@89fL*hV|gb`4j5Uu&|+2hH6?W!@~1QQUC&P=!6ClAS<|{=5l_ z*)Le?QN)o%WtV z&-mtcJ+<(GADCP`52z+m3y+4wgG0c&+#xUr$?49g@7>$!F2znXbb3hc*;^c3sufQ% z&pZK3GV|ICuVs>Vw_=-`5DSD>GDd3D`yU)+=&L(^+VK;=rV|aV7#*7^AkOp9p+DH9IBRkA_%h~n@=$YfO z4mYFv91g{YgX^(M9c?u0H02CzI?*uqO8sV~lUv5cv!$l42Q~D=B;4*#>m62}QSNbgj4Wqk`ST-3(x(>Xqz7cex@>BbT>@|$;e z&8M9&Z!D?wZv{8lGO#(Vc+kSRI#{ju_A^~SJVUQ@z44;Fqo%#2qxn<4X^`hcP9xVN z%i@N5O!B~OH(ZL{&<&p~3uRZs#O-XSHitl`<3^pM4FPy(ub9}oZrc}z2H3S1z|`%& ziOLF;mP2G-kFVm$H4rQVOr=WFE|yaf-r%Njf?l%1FHqfVVA8sNcHv{qByy0|J;O!dJe(j>Hsia29%9;T<~T?WD0 z6UR&dCr~tLkR6V!C2O7#PcIxgq5b^qiene^g#^FZC6ME2n)e(CTKY(>75u%@55c-^ zi<5v>4CM~rE(ed{qy)cAyZqPkj?VS;2c9udBFBll&UGAEi9c+xWUn?~Tn5cNz;^Fw z3;;c>0D|3ymOQ=$Ceu;gd(mRg=lo^lOdMF^*>xO&anAL$OuoT@2{cSv`Sf!N`E)J3 zqXq?9>a*$o{z$be-L@%d_2*WWI=#>{a9bcGyjlE{U*Eu(eH6y|f#aoO@KW;4E5RwQ zS3zj|UJxS82}xjtdpPneme=%-1~DU0yJl{leYPbtj@qa?fmT@@-c$_1dVqZ9G!0oa zWoMcBs3e}(-pv@2N6DdpIbQ%t;Uu8sgA=Q|LS`>ztg&xH(v`lzl;T2egQW!+7Xu9r z3cEtgA}k_;yN6Vo*9rZ)y${KOkhYPBQ4nsZYq>V+_dk~Wub`S&`&gl1nvNl&yi2~b zg4ViWEnElMV+$YLWN?oz`&LoGKGjB^R0B%dB+Ma&Pm7j0c#BF|RChd6F(gXBoDRjx z30I@Y=A6bq3reBwiS+IIGUr<N1z+660-VjQlk4m;eFQ|AadmZeFFH49N zHM$FGa-TC$3RV|OyFGV_e|+No+0=WkQZ()AA9R$_CTUz0KBy>2Gie7Z&Hn{S$y!3S zd2>KXdMFVQElA$7Z0%auk~_59xNoV3SPQXq-uQ5GHn3!vjXMJF^ilt`Sul z@9Wtin%l#XFgX-l*=zkM_4Lk3{*hy3wb<{f900}if%jn*Bt6-k2|fy&UunTD+})Fb1P`EvU{Z#mKML1W7%O6pn-QBDk#g} zF8&`XCO@%yxS82gaqbLv_52EDheba9E!(|x*|dZIu;Q43J7<%|)Yiw;!jvNtzMjsw zdDPu1-d9u!ZtTYCP}opOnB~WCy9^9EBmI|Yolll=QtzqaE$nHlE^!v^X%{bdY@oVBuZQRSpQY>H{MNCObJ)TuO{7k5bgy{#+v0S zk!?!2W>yt93)~5p!GX$hRAWPyM+1m3(M6xVR;yyZ%OvGgq=MY#-+Dmjvpg+#zxX`p z5Khf8dGUlbLfwNF>EZIz2cP$8karpNgF~q%>~fv2t<+K(-xfluiJCtKrks{L|82-B zdcLaYnej4t4HOC}GW+^7-3+Vky-iXEeKIHZSrT--QJOBEI*62RE z39!9kXQ5ztk=gzVS*9LSRkVWo>sOkAbw?zNU`NREeb`=|h16AU6R34y(c_+MZ3MQo z4wlpAHl@^4Yt`b>nh-Hy4ghOtGh^*s>mVSxYvv-96k8yIT?UeAGr!@d4)~m}b!huy zE;0Ok`{EcItdv)IDER`}^0|rYMi>YmA#|4c`M9r!^GFoeH2%cR0j%}riBC901v%qelx3!G7IpP3NLJ2wnY_A zw`o45($q*I$v+BXV?kevjUU*lI;HBMdcNXVhCpbnRC9o!%u?~i;~7eaRYym*Evi2p zIT*1yqE*(-vmOx5nLtVFhvjJ6VqW{kVP$A^_h)#fos}v&EFn&0T38e(ZrdrvwTSjW zMLNDEB<3Z6BQKLn6X=4ix~fE5p7i-j^Iw-Ja^Jjc4ceUMHZhuTv7)K45sa1yxiMdW zsnove+FP(k24V|uEVNs`{UV?)udaZ#x-^;k$}0fu`ewz67$*Ihs=W3s!*tR$h2}jR z^Am};#>V$1tUs=*-y0phG&Mk~lUGkuy zv*Cz|Vz4o_;T(TdF2fK!5h+n=O5_wf>ZSSrbZPUobFgdXr-(<1;qjY)5QVRE?l=Hu z2>~iQZ90ZpR$i`XjkCgU<7W+sL~rMBBgeF#ms=H&fxV(BEwHn`@{Xl2G_X&8-Pymh z77wPy;Se^DkYFBn={N+Bp1cxuD3cv(H6+b#u!*Np<3PJKS!@5lVo{dlNNjtQVkmj0q1K#t+rfUGtVF@ zjt|?OInILe+G_-XsO}-|)6~<)p(K@I6)zSKzW!QeqL(Bl* zHpdev!e4o{X$uKs<8Z~s{;m6V8W)q`A9?S@<}yD^GFQt7czCIp!u*d!(&|8hbcnKJ_TzG3JFS$4|l&bEc}Zfky=qLsOMP_A>Ph;$f&Pf6>RZK<0yDyp z!lINu30HOo09^%hdGf@9;D$^=_@MNdzxd#M9BT&@or!Pil#bAR`Yu(fps<4sM2z4v zi!8tGVfTIZW!9O(g})u>q7i>OXF_;MyM_^rTecS}$qab%*25(65mxOsTBaeC{)#5* zb#E4Zap1g00^u|CQI+js`|Keo_$QvhX#5>g?M2|Kq?H8ki+JjJM(4*Ru_<&sX?RrC z3j{ole3=I4*U<1aAhz^1TswrC8`TA*7E3VJZotQdtSsIH4K1K1)G7oX&soSuXCqb> ztmdU2V6!RClxJgw@H+?CIy9AvaCxj^^=%{M1lLg^@MV^Uu#i$ThG|`MHT1f+qhc5r zHYQUMJB4VYhvpP8Wi{I^Bi%u#SzJN(|KhFx>cceInlBFw7c9-{5#SqOxs$eMY-*1B G{`!A`@>-n$ diff --git a/environment.yml b/environment.yml index 9d707be..529e833 100644 --- a/environment.yml +++ b/environment.yml @@ -9,8 +9,12 @@ dependencies: - gdal - pyproj - pykdtree + - scipy + - ipykernel - pip - pip: + - sphinx==4.0.3 # https://github.com/spatialaudio/nbsphinx/issues/584, when this is resolved this line can be deleted to use the latest version. + - nbsphinx - sphinx_rtd_theme - pytest - pytest-cov \ No newline at end of file diff --git a/environment_pinned.yml b/environment_pinned.yml index 3c8c3ee..228baca 100644 --- a/environment_pinned.yml +++ b/environment_pinned.yml @@ -4,96 +4,102 @@ channels: - defaults dependencies: - _libgcc_mutex=0.1 - - atomicwrites=1.3.0 - - attrs=19.1.0 - - boost-cpp=1.70.0 + - _openmp_mutex=4.5 + - boost-cpp=1.74.0 - bzip2=1.0.8 - - ca-certificates=2019.6.16 + - c-ares=1.17.1 + - ca-certificates=2021.5.30 - cairo=1.16.0 - - certifi=2019.6.16 + - certifi=2021.5.30 - cfitsio=3.470 - - cftime=1.0.3.4 - - curl=7.65.3 - - expat=2.2.5 + - cftime=1.5.0 + - curl=7.77.0 + - expat=2.4.1 - fontconfig=2.13.1 - - freetype=2.10.0 - - freexl=1.0.5 - - gdal=2.4.1 - - geos=3.7.2 - - geotiff=1.4.3 + - freetype=2.10.4 + - freexl=1.0.6 + - gdal=3.3.1 + - geos=3.9.1 + - geotiff=1.6.0 - gettext=0.19.8.1 - - giflib=5.1.7 - - glib=2.58.3 - - hdf4=4.2.13 - - hdf5=1.10.5 - - icu=58.2 - - importlib_metadata=0.18 - - jpeg=9c - - json-c=0.13.1 - - kealib=1.4.10 - - krb5=1.16.3 - - libblas=3.8.0 - - libcblas=3.8.0 - - libcurl=7.65.3 - - libdap4=3.20.2 - - libedit=3.1.20170329 - - libffi=3.2.1 - - libgcc-ng=9.1.0 - - libgdal=2.4.1 - - libgfortran-ng=7.3.0 - - libiconv=1.15 + - giflib=5.2.1 + - hdf4=4.2.15 + - hdf5=1.10.6 + - icu=68.1 + - jbig=2.1 + - jpeg=9d + - json-c=0.15 + - kealib=1.4.14 + - krb5=1.19.1 + - ld_impl_linux-64=2.36.1 + - lerc=2.2.1 + - libblas=3.9.0 + - libcblas=3.9.0 + - libcurl=7.77.0 + - libdap4=3.20.6 + - libdeflate=1.7 + - libedit=3.1.20191231 + - libev=4.33 + - libffi=3.3 + - libgcc-ng=9.3.0 + - libgdal=3.3.1 + - libgfortran-ng=9.3.0 + - libgfortran5=9.3.0 + - libglib=2.68.3 + - libgomp=9.3.0 + - libiconv=1.16 - libkml=1.3.0 - - liblapack=3.8.0 - - libnetcdf=4.6.2 - - libopenblas=0.3.6 + - liblapack=3.9.0 + - libnetcdf=4.8.0 + - libnghttp2=1.43.0 + - libopenblas=0.3.15 - libpng=1.6.37 - - libpq=11.4 - - libspatialite=4.3.0a - - libssh2=1.8.2 - - libstdcxx-ng=9.1.0 - - libtiff=4.0.10 + - libpq=13.3 + - librttopo=1.1.0 + - libspatialite=5.0.1 + - libssh2=1.9.0 + - libstdcxx-ng=9.3.0 + - libtiff=4.3.0 - libuuid=2.32.1 + - libwebp-base=1.2.0 - libxcb=1.13 - - libxml2=2.9.9 - - lz4-c=1.8.3 - - more-itertools=7.2.0 - - ncurses=6.1 - - netcdf4=1.5.1.2 - - numpy=1.17.0 - - openjpeg=2.3.1 - - openssl=1.1.1c - - packaging=19.0 - - pandas=0.25.0 - - pcre=8.41 - - pip=19.2.1 - - pixman=0.38.0 - - pluggy=0.12.0 - - poppler=0.67.0 - - poppler-data=0.4.9 - - postgresql=11.4 - - proj4=5.2.0 + - libxml2=2.9.12 + - libzip=1.8.0 + - lz4-c=1.9.3 + - ncurses=6.2 + - netcdf4=1.5.7 + - numpy=1.21.1 + - openjpeg=2.4.0 + - openssl=1.1.1k + - pandas=1.3.0 + - pcre=8.45 + - pip=21.1.3 + - pixman=0.40.0 + - poppler=21.03.0 + - poppler-data=0.4.10 + - postgresql=13.3 + - proj=8.0.1 - pthread-stubs=0.4 - - py=1.8.0 - - pykdtree=1.3.1 - - pyparsing=2.4.2 - - pyproj=1.9.6 - - pytest=5.0.1 - - python=3.6.7 - - python-dateutil=2.8.0 - - pytz=2019.2 - - readline=8.0 - - setuptools=41.0.1 - - six=1.12.0 - - sqlite=3.29.0 - - tk=8.6.9 - - tzcode=2019a - - wcwidth=0.1.7 - - wheel=0.33.4 - - xerces-c=3.2.2 + - pykdtree=1.3.4 + - pyproj=3.1.0 + - python=3.9.6 + - python-dateutil=2.8.2 + - python_abi=3.9 + - pytz=2021.1 + - readline=8.1 + - setuptools=49.6.0 + - six=1.16.0 + - sqlite=3.36.0 + - tiledb=2.3.2 + - tk=8.6.10 + - tzcode=2021a + - tzdata=2021a + - wheel=0.36.2 + - xerces-c=3.2.3 - xorg-kbproto=1.0.7 - xorg-libice=1.0.10 - xorg-libsm=1.2.3 - - xorg-libx11=1.6.8 + - xorg-libx11=1.7.2 - xorg-libxau=1.0.9 - xorg-libxdmcp=1.1.3 - xorg-libxext=1.3.4 @@ -101,9 +107,37 @@ dependencies: - xorg-renderproto=0.11.1 - xorg-xextproto=7.3.0 - xorg-xproto=7.0.31 - - xz=5.2.4 - - zipp=0.5.2 + - xz=5.2.5 - zlib=1.2.11 - - zstd=1.4.0 - - sphinx_rtd_theme - + - zstd=1.5.0 + - pip: + - alabaster==0.7.12 + - attrs==21.2.0 + - babel==2.9.1 + - charset-normalizer==2.0.3 + - coverage==5.5 + - docutils==0.16 + - idna==3.2 + - imagesize==1.2.0 + - iniconfig==1.1.1 + - jinja2==3.0.1 + - markupsafe==2.0.1 + - packaging==21.0 + - pluggy==0.13.1 + - py==1.10.0 + - pygments==2.9.0 + - pyparsing==2.4.7 + - pytest==6.2.4 + - pytest-cov==2.12.1 + - requests==2.26.0 + - snowballstemmer==2.1.0 + - sphinx==4.1.1 + - sphinx-rtd-theme==0.5.2 + - sphinxcontrib-applehelp==1.0.2 + - sphinxcontrib-devhelp==1.0.2 + - sphinxcontrib-htmlhelp==2.0.0 + - sphinxcontrib-jsmath==1.0.1 + - sphinxcontrib-qthelp==1.0.3 + - sphinxcontrib-serializinghtml==1.1.5 + - toml==0.10.2 + - urllib3==1.26.6 \ No newline at end of file diff --git a/src/pygeogrids/__init__.py b/src/pygeogrids/__init__.py index a4aa188..a58f308 100644 --- a/src/pygeogrids/__init__.py +++ b/src/pygeogrids/__init__.py @@ -15,4 +15,4 @@ finally: del version, PackageNotFoundError -from .grids import BasicGrid, CellGrid, genreg_grid, lonlat2cell, reorder_to_cellsize \ No newline at end of file +from .grids import BasicGrid, CellGrid, genreg_grid, lonlat2cell, reorder_to_cellsize diff --git a/src/pygeogrids/_version.py b/src/pygeogrids/_version.py index af10864..cb17738 100644 --- a/src/pygeogrids/_version.py +++ b/src/pygeogrids/_version.py @@ -17,20 +17,21 @@ import subprocess import sys -__location__ = os.path.join(os.getcwd(), os.path.dirname( - inspect.getfile(inspect.currentframe()))) +__location__ = os.path.join( + os.getcwd(), os.path.dirname(inspect.getfile(inspect.currentframe())) +) # these strings will be replaced by git during git-archive git_refnames = "$Format:%d$" git_full = "$Format:%H$" # general settings -tag_prefix = 'v' # tags are like v1.2.0 +tag_prefix = "v" # tags are like v1.2.0 package = "pygeogrids" namespace = [] root_pkg = namespace[0] if namespace else package if namespace: - pkg_path = os.path.join(*namespace[-1].split('.') + [package]) + pkg_path = os.path.join(*namespace[-1].split(".") + [package]) else: pkg_path = package @@ -42,13 +43,16 @@ def __init__(self, command, shell=True, cwd=None): self._cwd = cwd def __call__(self, *args): - command = "{cmd} {args}".format(cmd=self._command, - args=subprocess.list2cmdline(args)) - output = subprocess.check_output(command, - shell=self._shell, - cwd=self._cwd, - stderr=subprocess.STDOUT, - universal_newlines=True) + command = "{cmd} {args}".format( + cmd=self._command, args=subprocess.list2cmdline(args) + ) + output = subprocess.check_output( + command, + shell=self._shell, + cwd=self._cwd, + stderr=subprocess.STDOUT, + universal_newlines=True, + ) return self._yield_output(output) def _yield_output(self, msg): @@ -90,10 +94,9 @@ def version_from_git(tag_prefix, root, verbose=False): return None if not tag.startswith(tag_prefix): if verbose: - print("tag '{}' doesn't start with prefix '{}'".format(tag, - tag_prefix)) + print("tag '{}' doesn't start with prefix '{}'".format(tag, tag_prefix)) return None - tag = tag[len(tag_prefix):] + tag = tag[len(tag_prefix) :] sha1 = next(git("rev-parse", "HEAD")) full = sha1.strip() if tag.endswith("-dirty"): @@ -135,7 +138,7 @@ def version_from_keywords(keywords, tag_prefix, verbose=False): # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -144,24 +147,22 @@ def version_from_keywords(keywords, tag_prefix, verbose=False): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) + tags = set([r for r in refs if re.search(r"\d", r)]) if verbose: - print("discarding '{}', no digits".format(",".join(refs-tags))) + print("discarding '{}', no digits".format(",".join(refs - tags))) if verbose: print("likely tags: {}".format(",".join(sorted(tags)))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] + r = ref[len(tag_prefix) :] if verbose: print("picking {}".format(r)) - return {"version": r, - "full": keywords["full"].strip()} + return {"version": r, "full": keywords["full"].strip()} else: if verbose: print("no suitable tags, using full revision id") - return {"version": keywords["full"].strip(), - "full": keywords["full"].strip()} + return {"version": keywords["full"].strip(), "full": keywords["full"].strip()} def version_from_parentdir(parentdir_prefix, root, verbose=False): @@ -170,24 +171,26 @@ def version_from_parentdir(parentdir_prefix, root, verbose=False): dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: - print("guessing rootdir is '{}', but '{}' doesn't start with " - "prefix '{}'".format(root, dirname, parentdir_prefix)) + print( + "guessing rootdir is '{}', but '{}' doesn't start with " + "prefix '{}'".format(root, dirname, parentdir_prefix) + ) return None - version = dirname[len(parentdir_prefix):].split('-')[0] + version = dirname[len(parentdir_prefix) :].split("-")[0] return {"version": version, "full": ""} def git2pep440(ver_str): - dash_count = ver_str.count('-') + dash_count = ver_str.count("-") if dash_count == 0: return ver_str elif dash_count == 1: - return ver_str.split('-')[0] + "+dirty" + return ver_str.split("-")[0] + "+dirty" elif dash_count == 2: - tag, commits, sha1 = ver_str.split('-') + tag, commits, sha1 = ver_str.split("-") return "{}.post0.dev{}+{}".format(tag, commits, sha1) elif dash_count == 3: - tag, commits, sha1, _ = ver_str.split('-') + tag, commits, sha1, _ = ver_str.split("-") return "{}.post0.dev{}+{}.dirty".format(tag, commits, sha1) else: raise RuntimeError("Invalid version string") @@ -195,7 +198,7 @@ def git2pep440(ver_str): def get_versions(verbose=False): vcs_kwds = {"refnames": git_refnames, "full": git_full} - parentdir = package + '-' + parentdir = package + "-" root = __location__ # pkg_path is the relative path from the top of the source # tree (where the .git directory might live) to this file. @@ -205,10 +208,9 @@ def get_versions(verbose=False): # different version retrieval methods as (method, args, comment) ver_retrieval = [ - (version_from_keywords, (vcs_kwds, tag_prefix, verbose), - 'expanded keywords'), - (version_from_parentdir, (parentdir, root, verbose), 'parentdir'), - (version_from_git, (tag_prefix, root, verbose), 'git') + (version_from_keywords, (vcs_kwds, tag_prefix, verbose), "expanded keywords"), + (version_from_parentdir, (parentdir, root, verbose), "parentdir"), + (version_from_git, (tag_prefix, root, verbose), "git"), ] for method, args, comment in ver_retrieval: ver = method(*args) @@ -218,5 +220,5 @@ def get_versions(verbose=False): break else: ver = {"version": "unknown", "full": ""} - ver['version'] = git2pep440(ver['version']) + ver["version"] = git2pep440(ver["version"]) return ver diff --git a/src/pygeogrids/geodetic_datum.py b/src/pygeogrids/geodetic_datum.py index 4cc1a07..4bd37cd 100644 --- a/src/pygeogrids/geodetic_datum.py +++ b/src/pygeogrids/geodetic_datum.py @@ -30,7 +30,7 @@ import pyproj -class GeodeticDatum(): +class GeodeticDatum: """ Class representing a geodetic datum providing transformation and calculation functionality @@ -43,7 +43,7 @@ class GeodeticDatum(): """ def __init__(self, ellps, **kwargs): - kwargs['ellps'] = ellps + kwargs["ellps"] = ellps self.geod = pyproj.Geod(**kwargs) self.geod.e = np.sqrt(self.geod.es) self.name = ellps @@ -112,7 +112,7 @@ def ParallelRadi(self, lat): radius : np.array, float Radius of parallel """ - x, y, _ = self.toECEF(0., lat) + x, y, _ = self.toECEF(0.0, lat) return np.sqrt(x ** 2 + y ** 2) def GeocentricLat(self, lat): @@ -132,8 +132,7 @@ def GeocentricLat(self, lat): if _element_iterable(lat): lat = np.array(lat, dtype=np.float64) - return np.rad2deg(np.arctan((1 - self.geod.e ** 2) * - np.tan(np.deg2rad(lat)))) + return np.rad2deg(np.arctan((1 - self.geod.e ** 2) * np.tan(np.deg2rad(lat)))) def GeodeticLat(self, lat): """ @@ -151,8 +150,7 @@ def GeodeticLat(self, lat): """ if _element_iterable(lat): lat = np.array(lat, dtype=np.float64) - return np.rad2deg(np.arctan(np.tan(np.deg2rad(lat)) / - (1 - self.geod.e ** 2))) + return np.rad2deg(np.arctan(np.tan(np.deg2rad(lat)) / (1 - self.geod.e ** 2))) def ReducedLat(self, lat): """ @@ -170,8 +168,9 @@ def ReducedLat(self, lat): """ if _element_iterable(lat): lat = np.array(lat, dtype=np.float64) - return np.rad2deg(np.arctan(np.sqrt(1 - self.geod.e ** 2) * - np.tan(np.deg2rad(lat)))) + return np.rad2deg( + np.arctan(np.sqrt(1 - self.geod.e ** 2) * np.tan(np.deg2rad(lat))) + ) def GeocentricDistance(self, lon, lat): """ @@ -190,7 +189,7 @@ def GeocentricDistance(self, lon, lat): Geocentric radius """ x, y, z = self.toECEF(lon, lat) - return np.sqrt(x**2 + y**2 + z**2) + return np.sqrt(x ** 2 + y ** 2 + z ** 2) def EllN(self, lat): """ @@ -209,8 +208,9 @@ def EllN(self, lat): if _element_iterable(lat): lat = np.array(lat, dtype=np.float64) - return self.geod.a / np.sqrt(1 - (self.geod.es) * - (np.sin(np.deg2rad(lat))) ** 2) + return self.geod.a / np.sqrt( + 1 - (self.geod.es) * (np.sin(np.deg2rad(lat))) ** 2 + ) def EllM(self, lat): """ @@ -228,9 +228,9 @@ def EllM(self, lat): """ if _element_iterable(lat): lat = np.array(lat, dtype=np.float64) - return (self.geod.a * (1 - self.geod.es)) / \ - ((1 - self.geod.es) * - (np.sin(np.deg2rad(lat)) ** 2) ** (3. / 2.)) + return (self.geod.a * (1 - self.geod.es)) / ( + (1 - self.geod.es) * (np.sin(np.deg2rad(lat)) ** 2) ** (3.0 / 2.0) + ) def GaussianRadi(self, lat): """ @@ -294,7 +294,7 @@ def MeridianArcDist(self, lat1, lat2): if _element_iterable(lat1) and lat1.shape == lat2.shape: lat1 = np.array(lat1, dtype=np.float64) lat2 = np.array(lat2, dtype=np.float64) - fazi, bazi, dist = self.geod.inv(0., lat1, 0., lat2) + fazi, bazi, dist = self.geod.inv(0.0, lat1, 0.0, lat2) return dist diff --git a/src/pygeogrids/grids.py b/src/pygeogrids/grids.py index 7c3720d..44e764b 100644 --- a/src/pygeogrids/grids.py +++ b/src/pygeogrids/grids.py @@ -32,8 +32,10 @@ import numpy as np import numpy.testing as nptest import warnings + try: from osgeo import ogr + ogr_installed = True except ImportError: ogr_installed = False @@ -154,9 +156,18 @@ class BasicGrid(object): the provided 2d-shape that make up the grid """ - def __init__(self, lon, lat, gpis=None, geodatum='WGS84', subset=None, - setup_kdTree=True, shape=None, transform_lon=None, - kd_tree_name='pykdtree'): + def __init__( + self, + lon, + lat, + gpis=None, + geodatum="WGS84", + subset=None, + setup_kdTree=True, + shape=None, + transform_lon=None, + kd_tree_name="pykdtree", + ): """ init method, prepares lon and lat arrays for _transform_lonlats if necessary @@ -170,8 +181,7 @@ def __init__(self, lon, lat, gpis=None, geodatum='WGS84', subset=None, subset = np.asanyarray(subset) if lat.shape != lon.shape: - raise GridDefinitionError( - "lat and lon np.arrays have to have equal shapes") + raise GridDefinitionError("lat and lon np.arrays have to have equal shapes") self.n_gpi = len(lon) @@ -194,15 +204,19 @@ def __init__(self, lon, lat, gpis=None, geodatum='WGS84', subset=None, if shape is not None and len(shape) == 2: if len(self.arrlat) % shape[0] != 0: - msg = ("Given shape does not have the correct first dimension." - " Length of lat array is not divisible by shape[0] " - " without rest") + msg = ( + "Given shape does not have the correct first dimension." + " Length of lat array is not divisible by shape[0] " + " without rest" + ) raise GridDefinitionError(msg) if len(self.arrlon) % shape[1] != 0: - msg = ("Given shape does not have the correct second " - "dimension. Length of lon array is not divisible by " - "shape[1] without rest") + msg = ( + "Given shape does not have the correct second " + "dimension. Length of lon array is not divisible by " + "shape[1] without rest" + ) raise GridDefinitionError(msg) self.shape = shape @@ -219,8 +233,9 @@ def __init__(self, lon, lat, gpis=None, geodatum='WGS84', subset=None, self.gpidirect = True else: if lat.shape != gpis.shape: - raise GridDefinitionError("lat, lon gpi np.arrays have to " - "have equal shapes") + raise GridDefinitionError( + "lat, lon gpi np.arrays have to " "have equal shapes" + ) self.gpis = gpis self.gpidirect = False @@ -250,8 +265,12 @@ def _setup_kdtree(self): Setup kdTree """ if self.kdTree is None: - self.kdTree = NN.findGeoNN(self.activearrlon, self.activearrlat, - self.geodatum, kd_tree_name=self.kd_tree_name) + self.kdTree = NN.findGeoNN( + self.activearrlon, + self.activearrlat, + self.geodatum, + kd_tree_name=self.kd_tree_name, + ) self.kdTree._build_kdtree() def split(self, n): @@ -303,9 +322,11 @@ def grid_points(self, *args): elif self.issplit and len(args) == 1: return self._split_grid_points(args[0]) - raise GridIterationError("this function only takes an argument if " - "the grid is split, and takes no argument " - "if the grid is not split") + raise GridIterationError( + "this function only takes an argument if " + "the grid is split, and takes no argument " + "if the grid is not split" + ) def get_grid_points(self, *args): """ @@ -329,15 +350,11 @@ def get_grid_points(self, *args): """ if not self.issplit and len(args) == 0: - return (self.activegpis, - self.activearrlon, - self.activearrlat) + return (self.activegpis, self.activearrlon, self.activearrlat) elif self.issplit and len(args) == 1: n = args[0] - return (self.subgpis[n], - self.subarrlons[n], - self.subarrlats[n]) + return (self.subgpis[n], self.subarrlons[n], self.subarrlats[n]) def _normal_grid_points(self): """ @@ -352,8 +369,7 @@ def _normal_grid_points(self): lat : float longitude of gpi """ - for i, (lon, lat) in enumerate(zip(self.activearrlon, - self.activearrlat)): + for i, (lon, lat) in enumerate(zip(self.activearrlon, self.activearrlat)): yield self.activegpis[i], lon, lat def _split_grid_points(self, n): @@ -375,8 +391,7 @@ def _split_grid_points(self, n): Longitude of gpi. """ - for i, (lon, lat) in enumerate(zip(self.subarrlons[n], - self.subarrlats[n])): + for i, (lon, lat) in enumerate(zip(self.subarrlons[n], self.subarrlats[n])): yield self.subgpis[n][i], lon, lat def find_nearest_gpi(self, lon, lat, max_dist=np.Inf): @@ -438,8 +453,7 @@ def find_k_nearest_gpi(self, lon, lat, max_dist=np.Inf, k=1): if self.kdTree is None: self._setup_kdtree() - distance, ind = self.kdTree.find_nearest_index( - lon, lat, max_dist=max_dist, k=k) + distance, ind = self.kdTree.find_nearest_index(lon, lat, max_dist=max_dist, k=k) if self.gpidirect and self.allpoints or len(ind) == 0: gpi = ind @@ -524,7 +538,7 @@ def gpi2rowcol(self, gpi): return index_lat, index_lon else: - raise(GridDefinitionError("Grid has no 2D shape")) + raise (GridDefinitionError("Grid has no 2D shape")) def calc_lut(self, other, max_dist=np.Inf, into_subset=False): """ @@ -556,7 +570,8 @@ def calc_lut(self, other, max_dist=np.Inf, into_subset=False): if self.kdTree.kdtree is not None and other.kdTree.kdtree is not None: dist, index = other.kdTree.find_nearest_index( - self.activearrlon, self.activearrlat, max_dist=max_dist) + self.activearrlon, self.activearrlat, max_dist=max_dist + ) valid_index = np.where(dist != np.inf)[0] dist = dist[valid_index] @@ -600,9 +615,9 @@ def get_shp_grid_points(self, ply): if ogr_installed: lonmin, lonmax, latmin, latmax = ply.GetEnvelope() - gpis, lats, lons = self.get_bbox_grid_points(latmin, latmax, - lonmin, lonmax, - both=True) + gpis, lats, lons = self.get_bbox_grid_points( + latmin, latmax, lonmin, lonmax, both=True + ) lon_ip = [] lat_ip = [] @@ -622,26 +637,36 @@ def get_shp_grid_points(self, ply): return None else: - raise Exception("No supported implementation installed.\ - Please install gdal and osgeo.") + raise Exception( + "No supported implementation installed.\ + Please install gdal and osgeo." + ) @staticmethod def issorted(a): return np.all(a[:-1] <= a[1:]) - def _index_bbox_grid_points(self, gp_info: np.array, - latmin:float, latmax:float, lonmin:float, - lonmax:float) -> (np.array, np.array): - - index = np.where((gp_info[2] <= latmax) & - (gp_info[2] >= latmin) & - (gp_info[1] <= lonmax) & - (gp_info[1] >= lonmin)) + def _index_bbox_grid_points( + self, + gp_info: np.array, + latmin: float, + latmax: float, + lonmin: float, + lonmax: float, + ) -> (np.array, np.array): + + index = np.where( + (gp_info[2] <= latmax) + & (gp_info[2] >= latmin) + & (gp_info[1] <= lonmax) + & (gp_info[1] >= lonmin) + ) return index - def get_bbox_grid_points(self, latmin=-90, latmax=90, lonmin=-180, - lonmax=180, coords=False, both=False): + def get_bbox_grid_points( + self, latmin=-90, latmax=90, lonmin=-180, lonmax=180, coords=False, both=False + ): """ Returns all grid points located in a submitted geographic box, optional as coordinates. @@ -676,8 +701,7 @@ def get_bbox_grid_points(self, latmin=-90, latmax=90, lonmin=-180, gp_info = self.get_grid_points() - index = self._index_bbox_grid_points(gp_info, latmin, latmax, - lonmin, lonmax) + index = self._index_bbox_grid_points(gp_info, latmin, latmax, lonmin, lonmax) gpis, lons, lats = gp_info if coords is True: @@ -705,17 +729,27 @@ def to_cell_grid(self, cellsize=5.0, cellsize_lat=None, cellsize_lon=None): cell_grid : CellGrid object Cell grid object. """ - cells = lonlat2cell(self.arrlon, self.arrlat, cellsize=cellsize, - cellsize_lat=cellsize_lat, - cellsize_lon=cellsize_lon) + cells = lonlat2cell( + self.arrlon, + self.arrlat, + cellsize=cellsize, + cellsize_lat=cellsize_lat, + cellsize_lon=cellsize_lon, + ) if self.gpidirect: gpis = None else: gpis = self.gpis - return CellGrid(self.arrlon, self.arrlat, cells, gpis=gpis, - subset=self.subset, shape=self.shape) + return CellGrid( + self.arrlon, + self.arrlat, + cells, + gpis=gpis, + subset=self.subset, + shape=self.shape, + ) def subgrid_from_gpis(self, gpis): """ @@ -751,20 +785,19 @@ def __eq__(self, other): idx_gpi_other = np.argsort(other.gpis) gpisame = np.all(self.gpis[idx_gpi] == other.gpis[idx_gpi_other]) try: - nptest.assert_allclose(self.arrlon[idx_gpi], - other.arrlon[idx_gpi_other]) + nptest.assert_allclose(self.arrlon[idx_gpi], other.arrlon[idx_gpi_other]) lonsame = True except AssertionError: lonsame = False try: - nptest.assert_allclose(self.arrlat[idx_gpi], - other.arrlat[idx_gpi_other]) + nptest.assert_allclose(self.arrlat[idx_gpi], other.arrlat[idx_gpi_other]) latsame = True except AssertionError: latsame = False if self.subset is not None and other.subset is not None: subsetsame = np.all( - sorted(self.gpis[self.subset]) == sorted(other.gpis[other.subset])) + sorted(self.gpis[self.subset]) == sorted(other.gpis[other.subset]) + ) elif self.subset is None and other.subset is None: subsetsame = True else: @@ -782,8 +815,7 @@ def __eq__(self, other): else: geosame = False - return np.all([lonsame, latsame, gpisame, subsetsame, shapesame, - geosame]) + return np.all([lonsame, latsame, gpisame, subsetsame, shapesame, geosame]) class CellGrid(BasicGrid): @@ -820,19 +852,35 @@ class CellGrid(BasicGrid): if a subset is given otherwise equal to arrlon. """ - def __init__(self, lon, lat, cells, gpis=None, geodatum='WGS84', - subset=None, setup_kdTree=False, **kwargs): - - super(CellGrid, self).__init__(lon, lat, gpis=gpis, - geodatum=geodatum, subset=subset, - setup_kdTree=setup_kdTree, **kwargs) + def __init__( + self, + lon, + lat, + cells, + gpis=None, + geodatum="WGS84", + subset=None, + setup_kdTree=False, + **kwargs, + ): + + super(CellGrid, self).__init__( + lon, + lat, + gpis=gpis, + geodatum=geodatum, + subset=subset, + setup_kdTree=setup_kdTree, + **kwargs, + ) self.gpi_lut = None cells = np.asanyarray(cells) if self.arrlon.shape != cells.shape: raise GridDefinitionError( - "lat, lon and cells np.arrays have to have equal shapes") + "lat, lon and cells np.arrays have to have equal shapes" + ) self.arrcell = cells if subset is not None: @@ -905,17 +953,21 @@ def get_grid_points(self, *args): Cell numbers. """ if not self.issplit and len(args) == 0: - return (self.activegpis, - self.activearrlon, - self.activearrlat, - self.activearrcell) + return ( + self.activegpis, + self.activearrlon, + self.activearrlat, + self.activearrcell, + ) elif self.issplit and len(args) == 1: n = args[0] - return (self.subgpis[n], - self.subarrlons[n], - self.subarrlats[n], - self.subcells[n]) + return ( + self.subgpis[n], + self.subarrlons[n], + self.subarrlats[n], + self.subcells[n], + ) def grid_points_for_cell(self, cells): """ @@ -991,8 +1043,9 @@ def _normal_grid_points(self): for cell in uniq_cells: cell_gpis = np.where(cell == self.activearrcell)[0] for gpi in cell_gpis: - yield self.activegpis[gpi], self.activearrlon[gpi], \ - self.activearrlat[gpi], cell + yield self.activegpis[gpi], self.activearrlon[gpi], self.activearrlat[ + gpi + ], cell def _split_grid_points(self, n): """ @@ -1019,8 +1072,9 @@ def _split_grid_points(self, n): for cell in uniq_cells: cell_gpis = np.where(cell == self.subcells[n])[0] for gpi in cell_gpis: - yield self.subgpis[n][gpi], self.subarrlons[n][gpi], \ - self.subarrlats[n][gpi], cell + yield self.subgpis[n][gpi], self.subarrlons[n][gpi], self.subarrlats[n][ + gpi + ], cell def subgrid_from_gpis(self, gpis): """ @@ -1039,8 +1093,7 @@ def subgrid_from_gpis(self, gpis): sublons, sublats = self.gpi2lonlat(gpis) subcells = self.gpi2cell(gpis) - return CellGrid(sublons, sublats, subcells, gpis, - geodatum=self.geodatum.name) + return CellGrid(sublons, sublats, subcells, gpis, geodatum=self.geodatum.name) def subgrid_from_cells(self, cells): """ @@ -1059,8 +1112,9 @@ def subgrid_from_cells(self, cells): subgpis, sublons, sublats = self.grid_points_for_cell(cells) subcells = self.gpi2cell(subgpis) - return CellGrid(sublons, sublats, subcells, subgpis, - geodatum=self.geodatum.name) + return CellGrid( + sublons, sublats, subcells, subgpis, geodatum=self.geodatum.name + ) def __eq__(self, other): """ @@ -1074,13 +1128,19 @@ def __eq__(self, other): basicsame = super(CellGrid, self).__eq__(other) idx_gpi = np.argsort(self.gpis) idx_gpi_other = np.argsort(other.gpis) - cellsame = np.all(self.arrcell[idx_gpi] - == other.arrcell[idx_gpi_other]) + cellsame = np.all(self.arrcell[idx_gpi] == other.arrcell[idx_gpi_other]) return np.all([basicsame, cellsame]) - def get_bbox_grid_points(self, latmin=-90, latmax=90, lonmin=-180, - lonmax=180, coords=False, both=False, - sort_by_cells=True): + def get_bbox_grid_points( + self, + latmin=-90, + latmax=90, + lonmin=-180, + lonmax=180, + coords=False, + both=False, + sort_by_cells=True, + ): """ Returns all grid points located in a submitted geographic box, optional as coordinates. The points are grouped by cells! @@ -1115,8 +1175,7 @@ def get_bbox_grid_points(self, latmin=-90, latmax=90, lonmin=-180, gp_info = self.get_grid_points() - index = self._index_bbox_grid_points( - gp_info, latmin, latmax, lonmin, lonmax) + index = self._index_bbox_grid_points(gp_info, latmin, latmax, lonmin, lonmax) gpis, lons, lats, cells = gp_info @@ -1133,7 +1192,8 @@ def get_bbox_grid_points(self, latmin=-90, latmax=90, lonmin=-180, else: return gpis -def lonlat2cell(lon, lat, cellsize=5., cellsize_lon=None, cellsize_lat=None): + +def lonlat2cell(lon, lat, cellsize=5.0, cellsize_lon=None, cellsize_lat=None): """ Partition lon, lat points into cells. @@ -1159,19 +1219,20 @@ def lonlat2cell(lon, lat, cellsize=5., cellsize_lon=None, cellsize_lat=None): cellsize_lon = cellsize if cellsize_lat is None: cellsize_lat = cellsize - y = np.clip(np.floor((np.double(lat) + - (np.double(90.0) + 1e-9)) / cellsize_lat), 0, 180) - x = np.clip(np.floor((np.double(lon) + (np.double(180.0) + 1e-9)) - / cellsize_lon), 0, 360) + y = np.clip( + np.floor((np.double(lat) + (np.double(90.0) + 1e-9)) / cellsize_lat), 0, 180 + ) + x = np.clip( + np.floor((np.double(lon) + (np.double(180.0) + 1e-9)) / cellsize_lon), 0, 360 + ) cells = np.int32(x * (np.double(180.0) / cellsize_lat) + y) - max_cells = ((np.double(180.0) / cellsize_lat) * - (np.double(360.0)) / cellsize_lon) + max_cells = (np.double(180.0) / cellsize_lat) * (np.double(360.0)) / cellsize_lon cells = np.where(cells > max_cells - 1, cells - max_cells, cells) return np.int32(cells) -def gridfromdims(londim, latdim, origin='top', **kwargs): +def gridfromdims(londim, latdim, origin="top", **kwargs): """ Defines new grid object from latitude and longitude dimensions. Latitude and longitude dimensions are 1D arrays that give the latitude and @@ -1195,20 +1256,30 @@ def gridfromdims(londim, latdim, origin='top', **kwargs): New grid object. """ lons, lats = np.meshgrid(londim, latdim) - if origin.lower() == 'bottom': + if origin.lower() == "bottom": lats = np.flipud(lats) - elif origin.lower() == 'top': + elif origin.lower() == "top": pass else: - raise ValueError(f"Unexpected origin passed, expected 'top' or 'bottom' " - f"got {origin.lower()}") - - return BasicGrid(lons.flatten(), lats.flatten(), - shape=(len(latdim), len(londim)), **kwargs) - - -def genreg_grid(grd_spc_lat=1, grd_spc_lon=1, minlat=-90.0, maxlat=90.0, - minlon=-180.0, maxlon=180.0, **kwargs): + raise ValueError( + f"Unexpected origin passed, expected 'top' or 'bottom' " + f"got {origin.lower()}" + ) + + return BasicGrid( + lons.flatten(), lats.flatten(), shape=(len(latdim), len(londim)), **kwargs + ) + + +def genreg_grid( + grd_spc_lat=1, + grd_spc_lon=1, + minlat=-90.0, + maxlat=90.0, + minlon=-180.0, + maxlon=180.0, + **kwargs, +): """ Define a global regular lon lat grid which starts in the North Western Corner of minlon, maxlat. The grid points are defined to be in the middle @@ -1283,8 +1354,7 @@ def reorder_to_cellsize(grid, cellsize_lat, cellsize_lon): different ordering. """ - cell_grid = grid.to_cell_grid(cellsize_lat=cellsize_lat, - cellsize_lon=cellsize_lon) + cell_grid = grid.to_cell_grid(cellsize_lat=cellsize_lat, cellsize_lon=cellsize_lon) cell_sort = np.argsort(cell_grid.arrcell) new_arrlon = grid.arrlon[cell_sort] new_arrlat = grid.arrlat[cell_sort] @@ -1296,8 +1366,6 @@ def reorder_to_cellsize(grid, cellsize_lat, cellsize_lon): full_subset[grid.subset] = 1 new_full_subset = full_subset[cell_sort] new_subset = np.where(new_full_subset == 1)[0] - return CellGrid(new_arrlon, new_arrlat, new_arrcell, - gpis=new_gpis, - subset=new_subset) - - + return CellGrid( + new_arrlon, new_arrlat, new_arrcell, gpis=new_gpis, subset=new_subset + ) diff --git a/src/pygeogrids/nearest_neighbor.py b/src/pygeogrids/nearest_neighbor.py index 3d501e8..cdb3399 100644 --- a/src/pygeogrids/nearest_neighbor.py +++ b/src/pygeogrids/nearest_neighbor.py @@ -29,12 +29,14 @@ try: import pykdtree.kdtree as pykd + pykdtree_installed = True except ImportError: pykdtree_installed = False try: import scipy.spatial as sc_spat + scipy_installed = True except ImportError: scipy_installed = False @@ -92,8 +94,7 @@ class that takes lat,lon coordinates, transformes them to cartesian (X,Y,Z) """ - def __init__(self, lon, lat, geodatum, grid=False, - kd_tree_name='pykdtree'): + def __init__(self, lon, lat, geodatum, grid=False, kd_tree_name="pykdtree"): """ init method, prepares lon and lat arrays for _transform_lonlats if necessary @@ -107,8 +108,7 @@ def __init__(self, lon, lat, geodatum, grid=False, self.lon_size = len(lon) else: if lat.shape != lon.shape: - raise Exception( - "lat and lon np.arrays have to have equal shapes") + raise Exception("lat and lon np.arrays have to have equal shapes") lat_init = lat lon_init = lon # Earth radius @@ -137,9 +137,7 @@ def _transform_lonlats(self, lon, lat): lon = np.array(lon) lat = np.array(lat) coords = np.zeros((lon.size, 3), dtype=np.float64) - (coords[:, 0], - coords[:, 1], - coords[:, 2]) = self.geodatum.toECEF(lon, lat) + (coords[:, 0], coords[:, 1], coords[:, 2]) = self.geodatum.toECEF(lon, lat) return coords @@ -147,13 +145,15 @@ def _build_kdtree(self): """ Build the kdtree and saves it in the self.kdtree attribute """ - if self.kd_tree_name == 'pykdtree' and pykdtree_installed: + if self.kd_tree_name == "pykdtree" and pykdtree_installed: self.kdtree = pykd.KDTree(self.coords) elif scipy_installed: self.kdtree = sc_spat.cKDTree(self.coords) else: - raise Exception("No supported kdtree implementation installed.\ - Please install pykdtree and/or scipy.") + raise Exception( + "No supported kdtree implementation installed.\ + Please install pykdtree and/or scipy." + ) def find_nearest_index(self, lon, lat, max_dist=np.Inf, k=1): """ @@ -201,14 +201,14 @@ def find_nearest_index(self, lon, lat, max_dist=np.Inf, k=1): query_coords = self._transform_lonlats(lon, lat) if k is None: - if self.kd_tree_name != 'scipy': + if self.kd_tree_name != "scipy": raise NotImplementedError("Only available for the scipy kdTree") query_coords = query_coords[0] - k = self.kdtree.query_ball_point(query_coords, - r=max_dist, return_length=True) + k = self.kdtree.query_ball_point( + query_coords, r=max_dist, return_length=True + ) - d, ind = self.kdtree.query( - query_coords, distance_upper_bound=max_dist, k=k) + d, ind = self.kdtree.query(query_coords, distance_upper_bound=max_dist, k=k) # if no point was found, d == inf if not np.all(np.isfinite(d)): diff --git a/src/pygeogrids/netcdf.py b/src/pygeogrids/netcdf.py index 194b867..0af386c 100644 --- a/src/pygeogrids/netcdf.py +++ b/src/pygeogrids/netcdf.py @@ -36,12 +36,20 @@ from pygeogrids import CellGrid, BasicGrid -def save_lonlat(filename, arrlon, arrlat, geodatum, arrcell=None, - gpis=None, subsets={}, global_attrs=None, - format='NETCDF4', - zlib=False, - complevel=4, - shuffle=True): +def save_lonlat( + filename, + arrlon, + arrlat, + geodatum, + arrcell=None, + gpis=None, + subsets={}, + global_attrs=None, + format="NETCDF4", + zlib=False, + complevel=4, + shuffle=True, +): """ saves grid information to netCDF file @@ -81,21 +89,22 @@ def save_lonlat(filename, arrlon, arrlat, geodatum, arrcell=None, see netCDF documentation """ - with Dataset(filename, 'w', format=format) as ncfile: + with Dataset(filename, "w", format=format) as ncfile: - if (global_attrs is not None and 'shape' in global_attrs and - type(global_attrs['shape']) is not int and - len(global_attrs['shape']) == 2): + if ( + global_attrs is not None + and "shape" in global_attrs + and type(global_attrs["shape"]) is not int + and len(global_attrs["shape"]) == 2 + ): - latsize = global_attrs['shape'][0] - lonsize = global_attrs['shape'][1] + latsize = global_attrs["shape"][0] + lonsize = global_attrs["shape"][1] ncfile.createDimension("lat", latsize) ncfile.createDimension("lon", lonsize) - gpisize = global_attrs['shape'][0] * global_attrs['shape'][1] + gpisize = global_attrs["shape"][0] * global_attrs["shape"][1] if gpis is None: - gpivalues = np.arange(gpisize, - dtype=np.int32).reshape(latsize, - lonsize) + gpivalues = np.arange(gpisize, dtype=np.int32).reshape(latsize, lonsize) else: gpivalues = gpis.reshape(latsize, lonsize) @@ -103,7 +112,8 @@ def save_lonlat(filename, arrlon, arrlat, geodatum, arrcell=None, lats = arrlat.reshape(latsize, lonsize) # sort arrlon, arrlat and gpis arrlon_sorted, arrlat_sorted, gpivalues = sort_for_netcdf( - lons, lats, gpivalues) + lons, lats, gpivalues + ) # sorts arrlat descending arrlat_store = np.unique(arrlat_sorted)[::-1] @@ -121,108 +131,133 @@ def save_lonlat(filename, arrlon, arrlat, geodatum, arrcell=None, dim = list(ncfile.dimensions.keys()) - crs = ncfile.createVariable('crs', np.dtype('int32').char, - shuffle=shuffle, - zlib=zlib, complevel=complevel) - setattr(crs, 'grid_mapping_name', 'latitude_longitude') - setattr(crs, 'longitude_of_prime_meridian', 0.) - setattr(crs, 'semi_major_axis', geodatum.geod.a) - setattr(crs, 'inverse_flattening', 1. / geodatum.geod.f) - setattr(crs, 'ellipsoid_name', geodatum.name) - - gpi = ncfile.createVariable('gpi', np.dtype('int32').char, dim, - shuffle=shuffle, - zlib=zlib, complevel=complevel) + crs = ncfile.createVariable( + "crs", + np.dtype("int32").char, + shuffle=shuffle, + zlib=zlib, + complevel=complevel, + ) + setattr(crs, "grid_mapping_name", "latitude_longitude") + setattr(crs, "longitude_of_prime_meridian", 0.0) + setattr(crs, "semi_major_axis", geodatum.geod.a) + setattr(crs, "inverse_flattening", 1.0 / geodatum.geod.f) + setattr(crs, "ellipsoid_name", geodatum.name) + + gpi = ncfile.createVariable( + "gpi", + np.dtype("int32").char, + dim, + shuffle=shuffle, + zlib=zlib, + complevel=complevel, + ) if gpis is None: gpi[:] = gpivalues - setattr(gpi, 'long_name', 'Grid point index') - setattr(gpi, 'units', '') - setattr(gpi, 'valid_range', [0, gpisize]) - gpidirect = 0x1b + setattr(gpi, "long_name", "Grid point index") + setattr(gpi, "units", "") + setattr(gpi, "valid_range", [0, gpisize]) + gpidirect = 0x1B else: gpi[:] = gpivalues - setattr(gpi, 'long_name', 'Grid point index') - setattr(gpi, 'units', '') - setattr(gpi, 'valid_range', [np.min(gpivalues), np.max(gpivalues)]) - gpidirect = 0x0b - - latitude = ncfile.createVariable('lat', np.dtype('float64').char, - dim[0], - shuffle=shuffle, - zlib=zlib, complevel=complevel) + setattr(gpi, "long_name", "Grid point index") + setattr(gpi, "units", "") + setattr(gpi, "valid_range", [np.min(gpivalues), np.max(gpivalues)]) + gpidirect = 0x0B + + latitude = ncfile.createVariable( + "lat", + np.dtype("float64").char, + dim[0], + shuffle=shuffle, + zlib=zlib, + complevel=complevel, + ) latitude[:] = arrlat_store - setattr(latitude, 'long_name', 'Latitude') - setattr(latitude, 'units', 'degree_north') - setattr(latitude, 'standard_name', 'latitude') - setattr(latitude, 'valid_range', [-90.0, 90.0]) + setattr(latitude, "long_name", "Latitude") + setattr(latitude, "units", "degree_north") + setattr(latitude, "standard_name", "latitude") + setattr(latitude, "valid_range", [-90.0, 90.0]) if len(dim) == 2: londim = dim[1] else: londim = dim[0] - longitude = ncfile.createVariable('lon', np.dtype('float64').char, - londim, - shuffle=shuffle, - zlib=zlib, complevel=complevel) + longitude = ncfile.createVariable( + "lon", + np.dtype("float64").char, + londim, + shuffle=shuffle, + zlib=zlib, + complevel=complevel, + ) longitude[:] = arrlon_store - setattr(longitude, 'long_name', 'Longitude') - setattr(longitude, 'units', 'degree_east') - setattr(longitude, 'standard_name', 'longitude') - setattr(longitude, 'valid_range', [-180.0, 180.0]) + setattr(longitude, "long_name", "Longitude") + setattr(longitude, "units", "degree_east") + setattr(longitude, "standard_name", "longitude") + setattr(longitude, "valid_range", [-180.0, 180.0]) if arrcell is not None: - cell = ncfile.createVariable('cell', np.dtype('int32').char, - dim, - shuffle=shuffle, - zlib=zlib, complevel=complevel) + cell = ncfile.createVariable( + "cell", + np.dtype("int32").char, + dim, + shuffle=shuffle, + zlib=zlib, + complevel=complevel, + ) if len(dim) == 2: - arrcell = arrcell.reshape(latsize, - lonsize) + arrcell = arrcell.reshape(latsize, lonsize) _, _, arrcell = sort_for_netcdf(lons, lats, arrcell) cell[:] = arrcell - setattr(cell, 'long_name', 'Cell') - setattr(cell, 'units', '') - setattr(cell, 'valid_range', [np.min(arrcell), np.max(arrcell)]) + setattr(cell, "long_name", "Cell") + setattr(cell, "units", "") + setattr(cell, "valid_range", [np.min(arrcell), np.max(arrcell)]) if subsets: for subset_name in subsets.keys(): - flag = ncfile.createVariable(subset_name, np.dtype('int8').char, - dim, - shuffle=shuffle, - zlib=zlib, complevel=complevel) + flag = ncfile.createVariable( + subset_name, + np.dtype("int8").char, + dim, + shuffle=shuffle, + zlib=zlib, + complevel=complevel, + ) # create flag array based on shape of data lf = np.zeros_like(gpivalues) if len(dim) == 2: lf = lf.flatten() - value = subsets[subset_name]['value'] - lf[subsets[subset_name]['points']] = value + value = subsets[subset_name]["value"] + lf[subsets[subset_name]["points"]] = value if len(dim) == 2: lf = lf.reshape(latsize, lonsize) _, _, lf = sort_for_netcdf(lons, lats, lf) flag[:] = lf - setattr(flag, 'long_name', subset_name) - setattr(flag, 'units', '') - setattr(flag, 'coordinates', 'lat lon') - setattr(flag, 'flag_values', np.arange(2, dtype=np.int8)) - setattr(flag, 'flag_meanings', subsets[subset_name]['meaning']) - setattr(flag, 'valid_range', [0, value]) + setattr(flag, "long_name", subset_name) + setattr(flag, "units", "") + setattr(flag, "coordinates", "lat lon") + setattr(flag, "flag_values", np.arange(2, dtype=np.int8)) + setattr(flag, "flag_meanings", subsets[subset_name]["meaning"]) + setattr(flag, "valid_range", [0, value]) s = "%Y-%m-%d %H:%M:%S" date_created = datetime.now().strftime(s) - attr = {'Conventions': 'CF-1.6', - 'id': os.path.split(filename)[1], # file name - 'date_created': date_created, - 'geospatial_lat_min': np.round(np.min(arrlat), 4), - 'geospatial_lat_max': np.round(np.max(arrlat), 4), - 'geospatial_lon_min': np.round(np.min(arrlon), 4), - 'geospatial_lon_max': np.round(np.max(arrlon), 4), - 'gpidirect': gpidirect - } + attr = { + "Conventions": "CF-1.6", + "id": os.path.split(filename)[1], # file name + "date_created": date_created, + "geospatial_lat_min": np.round(np.min(arrlat), 4), + "geospatial_lat_max": np.round(np.max(arrlat), 4), + "geospatial_lon_min": np.round(np.min(arrlon), 4), + "geospatial_lon_max": np.round(np.max(arrlon), 4), + "gpidirect": gpidirect, + } ncfile.setncatts(attr) @@ -261,24 +296,31 @@ def sort_for_netcdf(lons, lats, values): arrlon = lons.flatten() arrval = values.flatten() idxlatsrt = np.argsort(arrlat)[::-1] - idxlat = np.argsort(arrlat[idxlatsrt]. - reshape(lats.shape), - axis=0)[::-1] - idxlon = np.argsort(arrlon[idxlatsrt]. - reshape(lons.shape) - [idxlat, np.arange(lons.shape[1])], axis=1) - - values = arrval[idxlatsrt].reshape( - *lons.shape)[idxlat, np.arange(lons.shape[1])][np.arange(lons.shape[0])[:, None], idxlon] - lons = arrlon[idxlatsrt].reshape( - *lons.shape)[idxlat, np.arange(lons.shape[1])][np.arange(lons.shape[0])[:, None], idxlon] - lats = arrlat[idxlatsrt].reshape( - *lons.shape)[idxlat, np.arange(lons.shape[1])][np.arange(lons.shape[0])[:, None], idxlon] + idxlat = np.argsort(arrlat[idxlatsrt].reshape(lats.shape), axis=0)[::-1] + idxlon = np.argsort( + arrlon[idxlatsrt].reshape(lons.shape)[idxlat, np.arange(lons.shape[1])], axis=1 + ) + + values = arrval[idxlatsrt].reshape(*lons.shape)[idxlat, np.arange(lons.shape[1])][ + np.arange(lons.shape[0])[:, None], idxlon + ] + lons = arrlon[idxlatsrt].reshape(*lons.shape)[idxlat, np.arange(lons.shape[1])][ + np.arange(lons.shape[0])[:, None], idxlon + ] + lats = arrlat[idxlatsrt].reshape(*lons.shape)[idxlat, np.arange(lons.shape[1])][ + np.arange(lons.shape[0])[:, None], idxlon + ] return lons, lats, values -def save_grid(filename, grid, subset_name='subset_flag', subset_value=1., - subset_meaning='water land', global_attrs=None): +def save_grid( + filename, + grid, + subset_name="subset_flag", + subset_value=1.0, + subset_meaning="water land", + global_attrs=None, +): """ save a BasicGrid or CellGrid to netCDF it is assumed that a subset should be used as land_points @@ -309,21 +351,39 @@ def save_grid(filename, grid, subset_name='subset_flag', subset_value=1., if grid.shape is not None: if global_attrs is None: global_attrs = {} - global_attrs['shape'] = grid.shape + global_attrs["shape"] = grid.shape if grid.subset is not None: - subsets = {subset_name: { - 'points': grid.subset, 'meaning': subset_meaning, 'value': subset_value}} + subsets = { + subset_name: { + "points": grid.subset, + "meaning": subset_meaning, + "value": subset_value, + } + } else: subsets = None - save_lonlat(filename, grid.arrlon, grid.arrlat, grid.geodatum, - arrcell=arrcell, gpis=gpis, subsets=subsets, zlib=True, - global_attrs=global_attrs) - - -def load_grid(filename, subset_flag='subset_flag', subset_value=1, - location_var_name='gpi', **grid_kwargs): + save_lonlat( + filename, + grid.arrlon, + grid.arrlat, + grid.geodatum, + arrcell=arrcell, + gpis=gpis, + subsets=subsets, + zlib=True, + global_attrs=global_attrs, + ) + + +def load_grid( + filename, + subset_flag="subset_flag", + subset_value=1, + location_var_name="gpi", + **grid_kwargs +): """ load a grid from netCDF file @@ -346,16 +406,16 @@ def load_grid(filename, subset_flag='subset_flag', subset_value=1, grid instance initialized with the loaded data """ - with Dataset(filename, 'r') as nc_data: + with Dataset(filename, "r") as nc_data: # determine if it is a cell grid or a basic grid arrcell = None - if 'cell' in nc_data.variables.keys(): - arrcell = np.array(nc_data.variables['cell'][:].flatten()) + if "cell" in nc_data.variables.keys(): + arrcell = np.array(nc_data.variables["cell"][:].flatten()) gpis = np.array(nc_data.variables[location_var_name][:].flatten()) shape = None - if hasattr(nc_data, 'shape'): + if hasattr(nc_data, "shape"): try: shape = tuple(nc_data.shape) except TypeError as e: @@ -372,51 +432,61 @@ def load_grid(filename, subset_flag='subset_flag', subset_value=1, # some old grid do not have a shape attribute # this meant that they had shape of len 1 if shape is None: - shape = tuple([len(nc_data.variables['lon'][:])]) + shape = tuple([len(nc_data.variables["lon"][:])]) # check if grid has regular shape if len(shape) == 2: - lons, lats = np.meshgrid(np.array(nc_data.variables['lon'][:]), - np.array(nc_data.variables['lat'][:])) + lons, lats = np.meshgrid( + np.array(nc_data.variables["lon"][:]), + np.array(nc_data.variables["lat"][:]), + ) lons = lons.flatten() lats = lats.flatten() if subset_flag in nc_data.variables.keys(): subset = np.where( - np.isin(nc_data.variables[subset_flag][:].flatten(), subset_value))[0] + np.isin(nc_data.variables[subset_flag][:].flatten(), subset_value) + )[0] elif len(shape) == 1: - lons = np.array(nc_data.variables['lon'][:]) - lats = np.array(nc_data.variables['lat'][:]) + lons = np.array(nc_data.variables["lon"][:]) + lats = np.array(nc_data.variables["lat"][:]) # determine if it has a subset if subset_flag in nc_data.variables.keys(): subset = np.where( - np.isin(np.array(nc_data.variables[subset_flag][:].flatten()), - subset_value))[0] - - if 'crs' in nc_data.variables: - geodatumName = nc_data.variables['crs'].getncattr('ellipsoid_name') + np.isin( + np.array(nc_data.variables[subset_flag][:].flatten()), + subset_value, + ) + )[0] + + if "crs" in nc_data.variables: + geodatumName = nc_data.variables["crs"].getncattr("ellipsoid_name") else: # ellipsoid information is missing, use WGS84 by default - geodatumName = 'WGS84' + geodatumName = "WGS84" if arrcell is None: # BasicGrid - return BasicGrid(lons, - lats, - gpis=gpis, - geodatum=geodatumName, - subset=subset, - shape=shape, - **grid_kwargs) + return BasicGrid( + lons, + lats, + gpis=gpis, + geodatum=geodatumName, + subset=subset, + shape=shape, + **grid_kwargs + ) else: # CellGrid - return CellGrid(lons, - lats, - arrcell, - gpis=gpis, - geodatum=geodatumName, - subset=subset, - shape=shape, - **grid_kwargs) + return CellGrid( + lons, + lats, + arrcell, + gpis=gpis, + geodatum=geodatumName, + subset=subset, + shape=shape, + **grid_kwargs + ) diff --git a/src/pygeogrids/plotting.py b/src/pygeogrids/plotting.py index cc5744f..acd5d0c 100644 --- a/src/pygeogrids/plotting.py +++ b/src/pygeogrids/plotting.py @@ -26,6 +26,7 @@ # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import warnings + try: import matplotlib.pyplot as plt import matplotlib as mp @@ -39,10 +40,9 @@ import pygeogrids.grids as grids -def plot_cell_grid_partitioning(output, - cellsize_lon=5., - cellsize_lat=5.0, - figsize=(12, 6)): +def plot_cell_grid_partitioning( + output, cellsize_lon=5.0, cellsize_lat=5.0, figsize=(12, 6) +): """ Plot an overview of a global cell partitioning. @@ -51,28 +51,50 @@ def plot_cell_grid_partitioning(output, output: string output file name """ - mp.rcParams['font.size'] = 10 - mp.rcParams['text.usetex'] = True + mp.rcParams["font.size"] = 10 + mp.rcParams["text.usetex"] = True plt.figure(figsize=figsize, dpi=300) ax = plt.axes([0, 0, 1, 1]) - map = Basemap(projection="cyl", llcrnrlat=-90, urcrnrlat=90, - llcrnrlon=-180, urcrnrlon=180, ax=ax) - map.drawparallels(np.arange(-90, 90, cellsize_lat), labels=[1, 0, 0, 0], - linewidth=0.5) - map.drawmeridians(np.arange(-180, 180, cellsize_lon), - labels=[0, 0, 0, 1], rotation='vertical', linewidth=0.5) + map = Basemap( + projection="cyl", + llcrnrlat=-90, + urcrnrlat=90, + llcrnrlon=-180, + urcrnrlon=180, + ax=ax, + ) + map.drawparallels( + np.arange(-90, 90, cellsize_lat), labels=[1, 0, 0, 0], linewidth=0.5 + ) + map.drawmeridians( + np.arange(-180, 180, cellsize_lon), + labels=[0, 0, 0, 1], + rotation="vertical", + linewidth=0.5, + ) # fill continents 'coral' (with zorder=0), color wet areas 'aqua' - map.drawmapboundary(fill_color='aqua') - map.fillcontinents(color='0.6', lake_color='aqua') - label_lats = np.arange(-90 + cellsize_lat / 2., 90, cellsize_lat) - label_lons = np.arange(-180 + cellsize_lon / 2., 180, cellsize_lon) + map.drawmapboundary(fill_color="aqua") + map.fillcontinents(color="0.6", lake_color="aqua") + label_lats = np.arange(-90 + cellsize_lat / 2.0, 90, cellsize_lat) + label_lons = np.arange(-180 + cellsize_lon / 2.0, 180, cellsize_lon) lons, lats = np.meshgrid(label_lons, label_lats) x, y = map(lons.flatten(), lats.flatten()) - cells = grids.lonlat2cell(lons.flatten(), lats.flatten(), - cellsize_lon=cellsize_lon, cellsize_lat=cellsize_lat) + cells = grids.lonlat2cell( + lons.flatten(), + lats.flatten(), + cellsize_lon=cellsize_lon, + cellsize_lat=cellsize_lat, + ) for xt, yt, cell in zip(x, y, cells): - plt.text(xt, yt, "{:}".format(cell), fontsize=4, - va="center", ha="center", weight="bold") - plt.savefig(output, format='png', dpi=300) + plt.text( + xt, + yt, + "{:}".format(cell), + fontsize=4, + va="center", + ha="center", + weight="bold", + ) + plt.savefig(output, format="png", dpi=300) plt.close() diff --git a/src/pygeogrids/shapefile.py b/src/pygeogrids/shapefile.py index 47cb341..af1c8a3 100644 --- a/src/pygeogrids/shapefile.py +++ b/src/pygeogrids/shapefile.py @@ -28,9 +28,11 @@ """ Module for extracting grid points from global administrative areas """ +import os try: from osgeo import ogr + ogr_installed = True except ImportError: ogr_installed = False @@ -70,8 +72,8 @@ def get_gad_grid_points(grid, gadm_shp_path, level, name=None, oid=None): ImportError: If gdal or osgeo are not installed """ if ogr_installed: - drv = ogr.GetDriverByName('ESRI Shapefile') - ds_in = drv.Open(gadm_shp_path + 'gadm28_adm{:}.shp'.format(level)) + drv = ogr.GetDriverByName("ESRI Shapefile") + ds_in = drv.Open(os.path.join(gadm_shp_path, f"gadm28_adm{level}.shp")) lyr_in = ds_in.GetLayer(0) if name: if level == 0: @@ -89,5 +91,6 @@ def get_gad_grid_points(grid, gadm_shp_path, level, name=None, oid=None): raise ValueError("Requested Object not found in shapefile.") else: - raise ImportError("No supported implementation installed." - "Please install gdal and osgeo.") + raise ImportError( + "No supported implementation installed." "Please install gdal and osgeo." + )