diff --git a/.travis.yml b/.travis.yml
index b61f83b7..033c2d97 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,40 +1,31 @@
language: python
-sudo: false
+
python:
-- '2.7'
-- '3.5'
-addons:
- apt:
- sources:
- - ubuntu-toolchain-r-test
- packages:
- - g++-4.9
+ - '2.7'
+ - '3.5'
+ - '3.6'
+
+# This section can be removed when Python 3.7 is more cleanly supported in Travis
+matrix:
+ include:
+ - python: '3.7'
+ dist: xenial
+ sudo: true
+
install:
-- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
- wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh;
- else
- wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
- fi
-- bash miniconda.sh -b -p $HOME/miniconda
-- export PATH="$HOME/miniconda/bin:$PATH"
-- hash -r
-- conda config --set always_yes yes --set changeps1 no
-- conda update -q conda
-- conda info -a
-- |
- conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION basemap matplotlib numpy pandas pip pytables requests cython scikit-learn "pytest<4.0"
-- source activate test-environment
-- pip install pytest-cov coveralls pycodestyle osmnet
-- CC=gcc-4.9 CXX=g++-4.9 python setup.py install
+ - pip install .
+ - pip install -r requirements-dev.txt
+ - pip list
+ - pip show pandana
+
script:
-- pycodestyle pandana
-- python setup.py test --pytest-args "--cov pandana --cov-report term-missing"
+ - pycodestyle pandana
+ - python setup.py test --pytest-args "--cov pandana --cov-report term-missing"
+
after_success:
-- coveralls
-- bin/build_docs.sh
-notifications:
- slack:
- secure: a6RjANmfIyE0s3iAz4LPy2wS0bOd+ijGlhh7CJf4bRwVnQPuihDTwzQiT92Uje1rHZVUTY0r5A7QzBcg7QcACs/b3hLQ6nYQ0kIm/beC5DfZUqlyHQAuRl6eK76cEg9Le7bX8OXrjWyfTs9jgH7Z2mRGutMieNXVYQG5wMlEKlU=
+ - coveralls
+ - bin/build_docs.sh
+
env:
global:
secure: CMG0rjBgDBNy5FdfXawaaCCJm9ChzHk7e21ywVhIc1jbVS6lMn6bqwKJUnLaJAyjDhhZuxXTcHy+SALJgbzqLrH4GM5hOOL+8Rf4Jf9ESZzTBryvypRecVnUnk63SpJiq2Ki8maNrOcK1IBUAoFhFzptSgE4MDkxZ0LjsDAums8=
diff --git a/HISTORY.rst b/CHANGELOG.md
similarity index 80%
rename from HISTORY.rst
rename to CHANGELOG.md
index dfa62deb..f81c576b 100644
--- a/HISTORY.rst
+++ b/CHANGELOG.md
@@ -1,3 +1,14 @@
+v0.4.2
+======
+
+2019/8/8
+
+* Speed of network aggregations is improved.
+* Support for aggregating integer values is restored.
+* Thread count and contraction hierarchy status messages are restored.
+* Code written for v0.3 will continue to run, now raising deprecation warnings instead of errors.
+* Compilation improvements for Mac.
+
v0.4.1
======
diff --git a/LICENSE b/LICENSE.txt
similarity index 100%
rename from LICENSE
rename to LICENSE.txt
diff --git a/MANIFEST.in b/MANIFEST.in
index 1c813315..6396b68b 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,3 +1,9 @@
-include ez_setup.py
-include README.rst
+# files to include in the source distribution on pypi (setup.py and README.md are included automatically)
+
+include CHANGELOG.md
+include LICENSE.txt
+include requirements-dev.txt
+include setup.cfg
+
+recursive-include examples *.ipynb *.py
recursive-include src *.h *.cpp
diff --git a/README.md b/README.md
new file mode 100644
index 00000000..b1ee12b9
--- /dev/null
+++ b/README.md
@@ -0,0 +1,46 @@
+[![Build Status](https://travis-ci.org/UDST/pandana.svg?branch=master)](https://travis-ci.org/UDST/pandana)
+[![Coverage Status](https://coveralls.io/repos/github/UDST/pandana/badge.svg?branch=master)](https://coveralls.io/github/UDST/pandana?branch=master)
+
+# Pandana
+
+Pandana is a Python package that uses [contraction hierarchies](https://en.wikipedia.org/wiki/Contraction_hierarchies) to perform rapid network calculations including shortest paths and accessibility buffers. The computations are parallelized for use on multi-core machines using an underlying C/C++ library. Pandana is tested on Mac, Linux, and Windows with Python 2.7, 3.6, and 3.7.
+
+Documentation: http://udst.github.io/pandana
+
+
+### Installation
+
+The easiest way to install Pandana is using the [Anaconda](https://www.anaconda.com/distribution/) package manager. Pandana's Anaconda distributions are pre-compiled and include multi-threading support on all platforms.
+
+`conda install pandana --channel conda-forge`
+
+See the documentation for information about other [installation options](http://udst.github.io/pandana/installation.html).
+
+
+### Demo
+
+[Example.ipynb](https://github.com/UDST/pandana/blob/master/examples/Example.ipynb)
+
+The image below shows the distance to the _second_ nearest restaurant from each street intersection in the city of San Francisco. Pandana can calculate this in about half a second of computation time.
+
+
+
+
+## Acknowledgments
+
+None of this would be possible without the help of Dennis Luxen and
+his [OSRM](https://github.com/DennisOSRM/Project-OSRM) project. Thank you Dennis!
+
+
+### Academic Literature
+
+A [complete description of the
+methodology](http://onlinepubs.trb.org/onlinepubs/conferences/2012/4thITM/Papers-A/0117-000062.pdf)
+was presented at the Transportation Research Board Annual Conference in 2012. Please cite this paper when referring
+to the methodology implemented by this library.
+
+
+### Related UDST libraries
+
+- [OSMnet](https://github.com/udst/osmnet)
+- [UrbanAccess](https://github.com/udst/urbanaccess)
diff --git a/README.rst b/README.rst
deleted file mode 100644
index 6263f149..00000000
--- a/README.rst
+++ /dev/null
@@ -1,76 +0,0 @@
-Pandana
-=======
-
-.. image:: https://travis-ci.org/UDST/pandana.svg?branch=master
- :alt: Build Status
- :target: https://travis-ci.org/UDST/pandana
-
-.. image:: https://coveralls.io/repos/UDST/pandana/badge.svg?branch=master&service=github
- :alt: Coverage Status
- :target: https://coveralls.io/r/UDST/pandana
-
-
-In this case, a picture is worth a thousand words. The image below shows
-the distance to the *2nd* nearest restaurant (rendered by matplotlib)
-for the city of San Francisco. With only a few lines of code, you can
-grab a network from OpenStreetMap, take the restaurants that users of
-OpenStreetMap have recorded, and in about half a second of compute time
-you can get back a Pandas Series of node\_ids and computed values of
-various measures of access to destinations on the street network.
-
-.. figure:: https://raw.githubusercontent.com/udst/pandana/master/docs/img/distance_to_restaurants.png
- :alt: Distance to Restaurants
- :width: 800
-
- Distance to Restaurants
-
-Beyond simple access to destination queries, this library also
-implements more general aggregations along the street network (or any
-network). For a given region, this produces hundreds of thousands of
-overlapping buffer queries (still performed in less than a second) that
-can be used to characterize the local neighborhood around each street
-intersection. The result can then be mapped, or assigned to parcel and
-building records, or used in statistical models as we commonly do with
-`UrbanSim `__. This is in stark
-contrast to the arbitrary non-overlapping geographies ubiquitous in GIS.
-Although there are advantages to the GIS approach, we think network
-queries are a more accurate representation of how people interact with
-their environment.
-
-We look forward to creative uses of a general library like this - please
-let us know if you think you have a great use case by tweeting us at
-``@urbansim`` or post on the UrbanSim `forum`_.
-
-Docs
-----
-
-`Documentation `__ for Pandana is
-now available.
-
-Thorough `API
-documentation `__ for
-Pandana is also available.
-
-Acknowledgments
----------------
-
-None of this would be possible without the help of Dennis Luxen and
-his OSRM (https://github.com/DennisOSRM/Project-OSRM). Thank you Dennis!
-
-Academic Literature
--------------------
-
-A `complete description of the
-methodology `__
-was presented at the Transportation Research Board Annual Conference in 2012. Please cite this paper when referring
-to the methodology implemented by this library.
-
-Related UDST libraries
-----------------------
-
-- `OSMnet`_
-- `UrbanAccess`_
-
-.. _forum: http://discussion.urbansim.com/
-.. _OSMnet: https://github.com/udst/osmnet
-.. _UrbanAccess: https://github.com/UDST/urbanaccess
diff --git a/bin/build_docs.sh b/bin/build_docs.sh
index 2f66469a..7eb5c233 100755
--- a/bin/build_docs.sh
+++ b/bin/build_docs.sh
@@ -33,10 +33,6 @@ if [ "$TRAVIS_REPO_SLUG" == "UDST/pandana" ] && \
[ "$TRAVIS_PULL_REQUEST" == "false" ] && \
[ "$ACTUAL_TRAVIS_JOB_NUMBER" == "1" ]; then
- echo "Installing dependencies"
- conda install --yes --quiet sphinx numpydoc
- pip install sphinx_rtd_theme
-
echo "Building docs"
cd docs
make clean
diff --git a/docs/conf.py b/docs/conf.py
index 4bf080ae..34c5cc9b 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -50,16 +50,16 @@
# General information about the project.
project = 'pandana'
-copyright = '2017, UrbanSim Inc.'
+copyright = '2019, UrbanSim Inc.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
-version = '0.4.1'
+version = '0.4.2'
# The full version, including alpha/beta/rc tags.
-release = '0.4.1'
+release = '0.4.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/docs/index.rst b/docs/index.rst
index daaede70..eba19485 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -10,7 +10,7 @@ Pandana is a neologism representing Pandas Network Analysis.
Pandana performs hundreds of thousands of network queries in under a second
(for walking-scale distances) using a Pandas-like API. The computations are
-parallelized for use on multi-core computers using an underlying C
+parallelized for use on multi-core computers using an underlying C/C++
library.
Contents
diff --git a/docs/installation.rst b/docs/installation.rst
index bc1a02e0..ee7f2467 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -1,118 +1,81 @@
Installation
============
-Pandana depends on a number of libraries from the scientific Python stack.
-The easiest way to get these is to use the `Anaconda`_ python distribution,
-and the instructions below will assume you are using Anaconda.
+Pandana is a Python package that includes a C/C++ extension. Pandana is tested on Mac, Linux, and Windows with Python 2.7, 3.6, and 3.7.
-Dependencies
-------------
+The easiest way to install Pandana is using the `Anaconda`_ package manager. Pandana's Anaconda distributions are pre-compiled and include multi-threading support on all platforms.
-Pandana depends on the following libraries, most of which are in Anaconda:
+If you install Pandana from Pip or from the source code on GitHub, you'll need to compile the C/C++ extension locally. This is automatic, but won't work unless the right build tools are present. See full instructions below.
-* `brewer2mpl`_ >= 1.4
-* `matplotlib`_ >= 1.3.1
-* `numpy`_ >= 1.8.0
-* `pandas`_ >= 0.17.0
-* `tables`_ >= 3.1.0
-* `osmnet`_ >= 0.1.0
-Install the latest release
---------------------------
+Anaconda (recommended)
+----------------------
-.. note::
- Installing via conda or pip on a Mac will install Pandana without
- multithreading support.
- See instructions below for installing on a Mac with multithreading
- support.
+Pandana is hosted on Conda Forge::
-conda
-~~~~~
+ conda install pandana --channel conda-forge
-Pandana is hosted on
-`UDST's Anaconda repository `__. Other dependencies
-can be installed through the ``conda-forge`` channel.
-To add these as default installation channels for conda, run this code
-in a terminal::
- conda config --add channels udst
- conda config --add channels conda-forge
+Pip (requires local compilation)
+--------------------------------
-Then you can install pandana::
+Pandana is also hosted on PyPI::
- conda install pandana
+ pip install pandana
-To update pandana to a new release, run::
+Pandana's C/C++ extension will compile automatically if the right tools are present. See below for troubleshooting.
- conda update pandana
-pip
-~~~
+GitHub source code
+------------------
+
+If you'll be modifying the code, you can install Pandana from the `GitHub source `_::
-Pandana is available on PyPI and can be installed with::
+ git clone https://github.com/udst/pandana.git
+ cd pandana
+ python setup.py develop
- pip install -U pandana
+Pandana's C/C++ extension will compile automatically if the right tools are present. See below for troubleshooting.
-On Windows and Mac this will install binary builds, assuming you are using
-a recent version of `pip`_. On Linux it will perform a source install.
-Development Installation
-------------------------
+Compiling locally
+-----------------
-* Clone the `pandana repo `__
-* Run ``python setup.py develop``
+Building Pandana from source requires C/C++ compilers. On Linux and Mac these are usually already present, but read on for more information.
-(This is a C extension so requires C/C++ compilers, but should compile on
-Linux, Windows, and Mac.)
+Pandana uses OpenMP to parallelize computations --- compiling without OpenMP support will still work but won't allow multi-threading.
+
+.. note::
+ Pandana's C/C++ code references some libraries from NumPy, so it's helpful to have NumPy fully installed before running Pandana's setup script.
+
+Linux
+~~~~~
-Multithreaded Installation on Mac
----------------------------------
+Pandana's setup script expects GCC with support for the C++11 standard and OpenMP. If you run into problems, try doing a fresh install of the core build tools::
-The default compilers on Mac do not support `OpenMP`_ (which we use to
-parallelize the computations).
-To get multithreaded Pandana on Mac you'll need to install `GNU GCC`_
-and then compile Pandana from source.
-The easiest way to get GCC is via `Homebrew`_ or `MacPorts`_:
+ sudo apt-get install --reinstall build-essential
-* Homebrew: ``brew install gcc``
-* MacPorts: ``port install gcc``
+Windows
+~~~~~~~
-Then you must specify the GCC compilers for use during compilation
-via environment variables and tell the ``setup.py`` script explicitly
-to build with OpenMP::
+Compilation is automatic but requires that `Microsoft Visual C++ Build Tools `_ are installed.
- export CC=gcc-4.9
- export CXX=g++-4.9
- export USEOPENMP=1
+Certain older machines may need the `Microsoft Visual C++ 2008 SP1 Redistributable Package (x64) `_ or something similar in order to use Pandana. This provides runtime components of the Visual C++ libraries.
+
+
+Mac
+~~~
+
+The default OS X compilers don't support OpenMP multi-threading. Use these commands to confirm that Xcode Command Line Tools are present and to install some newer compilers from Anaconda::
+
+ xcode-select --install
+ conda install llvm-openmp clang
+
+After installing Pandana, running :code:`examples/simple_example.py` will print out the number of threads that are being used.
.. note::
+ If you get a compilation error like ``'wchar.h' file not found``, you can resolve it in macOS 10.14 by installing some additional header files::
+
+ open /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg
- The value of the variables you set will depend on the
- exact version of GCC installed.
-
-To install the latest release from source using `pip`_::
-
- pip install -U --no-use-wheel pandana
-
-Our you can get the `development repository `__
-and run ``python setup.py install``.
-
-After installation, executing :code:`examples/simple_example.py` will print out the
-number of threads that are being utilized. If Pandana says it is using 1
-thread, and your computer has multiple cores, Pandana is not installed
-correctly. Check the compile output for the gcc compiler you specified
-with :code:`CC` and :code:`CXX` - you might need to change the name slightly depending
-on your platform - for instance :code:`g++-mp-4.9` or :code:`g++-4.8`.
-
-.. _Anaconda: http://docs.continuum.io/anaconda/
-.. _pip: https://pip.pypa.io/en/latest/
-.. _OpenMP: http://openmp.org/wp/
-.. _GNU GCC: https://gcc.gnu.org/
-.. _Homebrew: http://brew.sh/
-.. _MacPorts: https://www.macports.org/
-.. _brewer2mpl: https://github.com/jiffyclub/brewer2mpl/wiki
-.. _matplotlib: http://matplotlib.org/
-.. _numpy: http://www.numpy.org/
-.. _pandas: http://pandas.pydata.org/
-.. _tables: http://www.pytables.org/
-.. _osmnet: http://github.com/udst/osmnet
+.. _Anaconda: https://www.anaconda.com/distribution/
diff --git a/ez_setup.py b/ez_setup.py
deleted file mode 100644
index 90b93f68..00000000
--- a/ez_setup.py
+++ /dev/null
@@ -1,352 +0,0 @@
-#!/usr/bin/env python
-"""Bootstrap setuptools installation
-
-To use setuptools in your package's setup.py, include this
-file in the same directory and add this to the top of your setup.py::
-
- from ez_setup import use_setuptools
- use_setuptools()
-
-To require a specific version of setuptools, set a download
-mirror, or use an alternate download directory, simply supply
-the appropriate options to ``use_setuptools()``.
-
-This file can also be run as a script to install or upgrade setuptools.
-"""
-import os
-import shutil
-import sys
-import tempfile
-import zipfile
-import optparse
-import subprocess
-import platform
-import textwrap
-import contextlib
-
-from distutils import log
-
-try:
- from urllib.request import urlopen
-except ImportError:
- from urllib2 import urlopen
-
-try:
- from site import USER_SITE
-except ImportError:
- USER_SITE = None
-
-DEFAULT_VERSION = "5.7"
-DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
-
-
-def _python_cmd(*args):
- """
- Return True if the command succeeded.
- """
- args = (sys.executable,) + args
- return subprocess.call(args) == 0
-
-
-def _install(archive_filename, install_args=()):
- with archive_context(archive_filename):
- # installing
- log.warn('Installing Setuptools')
- if not _python_cmd('setup.py', 'install', *install_args):
- log.warn('Something went wrong during the installation.')
- log.warn('See the error message above.')
- # exitcode will be 2
- return 2
-
-
-def _build_egg(egg, archive_filename, to_dir):
- with archive_context(archive_filename):
- # building an egg
- log.warn('Building a Setuptools egg in %s', to_dir)
- _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
- # returning the result
- log.warn(egg)
- if not os.path.exists(egg):
- raise IOError('Could not build the egg.')
-
-
-class ContextualZipFile(zipfile.ZipFile):
- """
- Supplement ZipFile class to support context manager for Python 2.6
- """
-
- def __enter__(self):
- return self
-
- def __exit__(self, type, value, traceback):
- self.close()
-
- def __new__(cls, *args, **kwargs):
- """
- Construct a ZipFile or ContextualZipFile as appropriate
- """
- if hasattr(zipfile.ZipFile, '__exit__'):
- return zipfile.ZipFile(*args, **kwargs)
- return super(ContextualZipFile, cls).__new__(cls)
-
-
-@contextlib.contextmanager
-def archive_context(filename):
- # extracting the archive
- tmpdir = tempfile.mkdtemp()
- log.warn('Extracting in %s', tmpdir)
- old_wd = os.getcwd()
- try:
- os.chdir(tmpdir)
- with ContextualZipFile(filename) as archive:
- archive.extractall()
-
- # going in the directory
- subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
- os.chdir(subdir)
- log.warn('Now working in %s', subdir)
- yield
-
- finally:
- os.chdir(old_wd)
- shutil.rmtree(tmpdir)
-
-
-def _do_download(version, download_base, to_dir, download_delay):
- egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
- % (version, sys.version_info[0], sys.version_info[1]))
- if not os.path.exists(egg):
- archive = download_setuptools(version, download_base,
- to_dir, download_delay)
- _build_egg(egg, archive, to_dir)
- sys.path.insert(0, egg)
-
- # Remove previously-imported pkg_resources if present (see
- # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
- if 'pkg_resources' in sys.modules:
- del sys.modules['pkg_resources']
-
- import setuptools
- setuptools.bootstrap_install_from = egg
-
-
-def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
- to_dir=os.curdir, download_delay=15):
- to_dir = os.path.abspath(to_dir)
- rep_modules = 'pkg_resources', 'setuptools'
- imported = set(sys.modules).intersection(rep_modules)
- try:
- import pkg_resources
- except ImportError:
- return _do_download(version, download_base, to_dir, download_delay)
- try:
- pkg_resources.require("setuptools>=" + version)
- return
- except pkg_resources.DistributionNotFound:
- return _do_download(version, download_base, to_dir, download_delay)
- except pkg_resources.VersionConflict as VC_err:
- if imported:
- msg = textwrap.dedent("""
- The required version of setuptools (>={version}) is not available,
- and can't be installed while this script is running. Please
- install a more recent version first, using
- 'easy_install -U setuptools'.
-
- (Currently using {VC_err.args[0]!r})
- """).format(VC_err=VC_err, version=version)
- sys.stderr.write(msg)
- sys.exit(2)
-
- # otherwise, reload ok
- del pkg_resources, sys.modules['pkg_resources']
- return _do_download(version, download_base, to_dir, download_delay)
-
-
-def _clean_check(cmd, target):
- """
- Run the command to download target. If the command fails, clean up before
- re-raising the error.
- """
- try:
- subprocess.check_call(cmd)
- except subprocess.CalledProcessError:
- if os.access(target, os.F_OK):
- os.unlink(target)
- raise
-
-
-def download_file_powershell(url, target):
- """
- Download the file at url to target using Powershell (which will validate
- trust). Raise an exception if the command cannot complete.
- """
- target = os.path.abspath(target)
- ps_cmd = (
- "[System.Net.WebRequest]::DefaultWebProxy.Credentials = "
- "[System.Net.CredentialCache]::DefaultCredentials; "
- "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)"
- % vars()
- )
- cmd = [
- 'powershell',
- '-Command',
- ps_cmd,
- ]
- _clean_check(cmd, target)
-
-
-def has_powershell():
- if platform.system() != 'Windows':
- return False
- cmd = ['powershell', '-Command', 'echo test']
- with open(os.path.devnull, 'wb') as devnull:
- try:
- subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
- except Exception:
- return False
- return True
-
-
-download_file_powershell.viable = has_powershell
-
-
-def download_file_curl(url, target):
- cmd = ['curl', url, '--silent', '--output', target]
- _clean_check(cmd, target)
-
-
-def has_curl():
- cmd = ['curl', '--version']
- with open(os.path.devnull, 'wb') as devnull:
- try:
- subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
- except Exception:
- return False
- return True
-
-
-download_file_curl.viable = has_curl
-
-
-def download_file_wget(url, target):
- cmd = ['wget', url, '--quiet', '--output-document', target]
- _clean_check(cmd, target)
-
-
-def has_wget():
- cmd = ['wget', '--version']
- with open(os.path.devnull, 'wb') as devnull:
- try:
- subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
- except Exception:
- return False
- return True
-
-
-download_file_wget.viable = has_wget
-
-
-def download_file_insecure(url, target):
- """
- Use Python to download the file, even though it cannot authenticate the
- connection.
- """
- src = urlopen(url)
- try:
- # Read all the data in one block.
- data = src.read()
- finally:
- src.close()
-
- # Write all the data in one block to avoid creating a partial file.
- with open(target, "wb") as dst:
- dst.write(data)
-
-
-download_file_insecure.viable = lambda: True
-
-
-def get_best_downloader():
- downloaders = (
- download_file_powershell,
- download_file_curl,
- download_file_wget,
- download_file_insecure,
- )
- viable_downloaders = (dl for dl in downloaders if dl.viable())
- return next(viable_downloaders, None)
-
-
-def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
- to_dir=os.curdir, delay=15,
- downloader_factory=get_best_downloader):
- """
- Download setuptools from a specified location and return its filename
-
- `version` should be a valid setuptools version number that is available
- as an sdist for download under the `download_base` URL (which should end
- with a '/'). `to_dir` is the directory where the egg will be downloaded.
- `delay` is the number of seconds to pause before an actual download
- attempt.
-
- ``downloader_factory`` should be a function taking no arguments and
- returning a function for downloading a URL to a target.
- """
- # making sure we use the absolute path
- to_dir = os.path.abspath(to_dir)
- zip_name = "setuptools-%s.zip" % version
- url = download_base + zip_name
- saveto = os.path.join(to_dir, zip_name)
- if not os.path.exists(saveto): # Avoid repeated downloads
- log.warn("Downloading %s", url)
- downloader = downloader_factory()
- downloader(url, saveto)
- return os.path.realpath(saveto)
-
-
-def _build_install_args(options):
- """
- Build the arguments to 'python setup.py install' on the setuptools package
- """
- return ['--user'] if options.user_install else []
-
-
-def _parse_args():
- """
- Parse the command line for options
- """
- parser = optparse.OptionParser()
- parser.add_option(
- '--user', dest='user_install', action='store_true', default=False,
- help='install in user site package (requires Python 2.6 or later)')
- parser.add_option(
- '--download-base', dest='download_base', metavar="URL",
- default=DEFAULT_URL,
- help='alternative URL from where to download the setuptools package')
- parser.add_option(
- '--insecure', dest='downloader_factory', action='store_const',
- const=lambda: download_file_insecure, default=get_best_downloader,
- help='Use internal, non-validating downloader'
- )
- parser.add_option(
- '--version', help="Specify which version to download",
- default=DEFAULT_VERSION,
- )
- options, args = parser.parse_args()
- # positional arguments are ignored
- return options
-
-
-def main():
- """Install or upgrade setuptools and EasyInstall"""
- options = _parse_args()
- archive = download_setuptools(
- version=options.version,
- download_base=options.download_base,
- downloader_factory=options.downloader_factory,
- )
- return _install(archive, _build_install_args(options))
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/pandana/__init__.py b/pandana/__init__.py
index c6bc977e..f5378715 100644
--- a/pandana/__init__.py
+++ b/pandana/__init__.py
@@ -1,3 +1,3 @@
from .network import Network
-version = __version__ = '0.4.1'
+version = __version__ = '0.4.2'
diff --git a/pandana/network.py b/pandana/network.py
index e06f689e..7f712ad9 100644
--- a/pandana/network.py
+++ b/pandana/network.py
@@ -10,10 +10,27 @@
from .cyaccess import cyaccess
from .loaders import pandash5 as ph5
+import warnings
def reserve_num_graphs(num):
- raise Exception("reserve_num_graphs is no longer required - remove from your code")
+ """
+ This function was previously used to reserve memory space for multiple
+ graphs. It is no longer needed in Pandana 0.4+, and will be removed in a
+ future version.
+
+ Parameters
+ ----------
+ num : int
+ Number of graph to be reserved in memory
+
+ """
+ warnings.warn(
+ "Function reserve_num_graphs() is no longer needed in Pandana 0.4+\
+ and will be removed in a future version",
+ DeprecationWarning
+ )
+ return None
class Network:
@@ -54,7 +71,6 @@ class Network:
def __init__(self, node_x, node_y, edge_from, edge_to, edge_weights,
twoway=True):
-
nodes_df = pd.DataFrame({'x': node_x, 'y': node_y})
edges_df = pd.DataFrame({'from': edge_from, 'to': edge_to}).\
join(edge_weights)
@@ -76,19 +92,19 @@ def __init__(self, node_x, node_y, edge_from, edge_to, edge_weights,
index=nodes_df.index)
edges = pd.concat([self._node_indexes(edges_df["from"]),
- self._node_indexes(edges_df["to"])], axis=1)
+ self._node_indexes(edges_df["to"])], axis=1)
self.net = cyaccess(self.node_idx.values,
- nodes_df.astype('double').as_matrix(),
- edges.as_matrix(),
+ nodes_df.astype('double').values,
+ edges.values,
edges_df[edge_weights.columns].transpose()
.astype('double')
- .as_matrix(),
+ .values,
twoway)
self._twoway = twoway
- self.kdtree = KDTree(nodes_df.as_matrix())
+ self.kdtree = KDTree(nodes_df.values)
@classmethod
def from_hdf5(cls, filename):
@@ -172,6 +188,7 @@ def shortest_path(self, node_a, node_b, imp_name=None):
-------
A numpy array of the nodes that are traversed in the shortest
path between the two nodes
+
"""
# map to internal node indexes
node_idx = self._node_indexes(pd.Series([node_a, node_b]))
@@ -219,8 +236,8 @@ def set(self, node_ids, variable=None, name="tmp"):
Returns
-------
Nothing
- """
+ """
if variable is None:
variable = pd.Series(np.ones(len(node_ids)), index=node_ids.index)
@@ -368,7 +385,7 @@ def get_node_ids(self, x_col, y_col, mapping_distance=None):
"""
xys = pd.DataFrame({'x': x_col, 'y': y_col})
- distances, indexes = self.kdtree.query(xys.as_matrix())
+ distances, indexes = self.kdtree.query(xys.values)
indexes = np.transpose(indexes)[0]
distances = np.transpose(distances)[0]
@@ -455,7 +472,35 @@ def plot(
return bmap, fig, ax
- def set_pois(self, category, maxdist, maxitems, x_col, y_col):
+ def init_pois(self, num_categories, max_dist, max_pois):
+ """
+ Initialize the point of interest infrastructure. This is no longer
+ needed in Pandana 0.4+ and will be removed in a future version.
+
+ Parameters
+ ----------
+ num_categories : int
+ Number of categories of POIs
+ max_dist : float
+ Maximum distance that will be tested to nearest POIs. This will
+ usually be a distance unit in meters however if you have
+ customized the impedance this could be in other
+ units such as utility or time etc.
+ max_pois :
+ Maximum number of POIs to return in the nearest query
+
+ """
+ self.num_categories = num_categories
+ self.max_dist = max_dist
+ self.max_pois = max_pois
+ warnings.warn(
+ "Method init_pois() is no longer needed in Pandana 0.4+ and will be removed in a \
+ future version; maxdist and maxitems should now be passed to set_pois()",
+ DeprecationWarning
+ )
+ return None
+
+ def set_pois(self, category=None, maxdist=None, maxitems=None, x_col=None, y_col=None):
"""
Set the location of all the pois of this category. The pois are
connected to the closest node in the Pandana network which assumes
@@ -478,7 +523,26 @@ def set_pois(self, category, maxdist, maxitems, x_col, y_col):
Returns
-------
Nothing
+
"""
+ # condition to check if missing arguments for keyword arguments using set_pois() from v0.3
+ if maxitems is None:
+ print('Reading parameters from init_pois()')
+ maxitems = self.max_pois
+
+ # condition to check for positional arguments in set_pois() from v0.3
+ elif isinstance(maxitems, type(pd.Series())):
+ y_col = maxitems
+ maxitems = self.max_pois
+
+ if maxdist is None:
+ print('Reading parameters from init_pois()')
+ maxdist = self.max_dist
+
+ elif isinstance(maxdist, type(pd.Series())):
+ x_col = maxdist
+ maxdist = self.max_dist
+
if category not in self.poi_category_names:
self.poi_category_names.append(category)
diff --git a/pandana/tests/test_pandana.py b/pandana/tests/test_pandana.py
index 526288e6..a5da7159 100644
--- a/pandana/tests/test_pandana.py
+++ b/pandana/tests/test_pandana.py
@@ -76,11 +76,6 @@ def random_x_y(sample_osm, ssize):
return x, y
-def test_reserve_num_graphs_raises(sample_osm):
- with pytest.raises(Exception):
- pdna.reserve_num_graphs(1)
-
-
def test_agg_variables_accuracy(sample_osm):
net = sample_osm
@@ -318,7 +313,40 @@ def test_pois2(second_sample_osm):
net2.nearest_pois(2000, "restaurants", num_pois=10)
+def test_pois_pandana3(second_sample_osm):
+ net2 = second_sample_osm
+
+ ssize = 50
+ np.random.seed(0)
+ x, y = random_x_y(second_sample_osm, ssize)
+ pdna.reserve_num_graphs(1)
+
+ net2.init_pois(num_categories=1, max_dist=2000, max_pois=10)
+
+ # make sure poi searches work on second graph
+ net2.set_pois(category="restaurants", x_col=x, y_col=y)
+
+ net2.nearest_pois(2000, "restaurants", num_pois=10)
+
+
+def test_pois_pandana3_pos_args(second_sample_osm):
+ net2 = second_sample_osm
+
+ ssize = 50
+ np.random.seed(0)
+ x, y = random_x_y(second_sample_osm, ssize)
+ pdna.reserve_num_graphs(1)
+
+ net2.init_pois(1, 2000, 10)
+
+ # make sure poi searches work on second graph
+ net2.set_pois("restaurants", x, y)
+
+ net2.nearest_pois(2000, "restaurants", num_pois=10)
+
# test items are sorted
+
+
def test_sorted_pois(sample_osm):
net = sample_osm
diff --git a/requirements-dev.txt b/requirements-dev.txt
new file mode 100644
index 00000000..b29c3742
--- /dev/null
+++ b/requirements-dev.txt
@@ -0,0 +1,9 @@
+# requirements for development and testing
+
+coveralls
+numpydoc
+pycodestyle
+pytest>=3.6,<4.0
+pytest-cov
+sphinx
+sphinx_rtd_theme
\ No newline at end of file
diff --git a/setup.py b/setup.py
index c223d61c..0c1b8f30 100644
--- a/setup.py
+++ b/setup.py
@@ -3,15 +3,16 @@
import sys
import sysconfig
-from ez_setup import use_setuptools
-use_setuptools()
-
from setuptools import find_packages
from distutils.core import setup, Extension
from setuptools.command.test import test as TestCommand
from setuptools.command.build_ext import build_ext
+###############################################
+## Invoking tests
+###############################################
+
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
@@ -34,8 +35,8 @@ def run_tests(self):
class Lint(TestCommand):
def run(self):
os.system("cpplint --filter=-build/include_subdir,-legal/copyright,-runtime/references,-runtime/int src/accessibility.* src/graphalg.*")
- os.system("pep8 src/cyaccess.pyx")
- os.system("pep8 pandana")
+ os.system("pycodestyle src/cyaccess.pyx")
+ os.system("pycodestyle pandana")
class CustomBuildExtCommand(build_ext):
@@ -46,55 +47,67 @@ def run(self):
build_ext.run(self)
-include_dirs = [
- '.'
-]
+###############################################
+## Building the C++ extension
+###############################################
-packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
+extra_compile_args = ['-w', '-std=c++11', '-O3']
+extra_link_args = []
+
+# Mac compilation: flags are for the llvm compilers included with recent
+# versions of Xcode Command Line Tools, or newer versions installed separately
-source_files = [
- 'src/accessibility.cpp',
- 'src/graphalg.cpp',
- "src/cyaccess.pyx",
- 'src/contraction_hierarchies/src/libch.cpp'
-]
-
-extra_compile_args = [
- '-w',
- '-std=c++0x',
- '-O3',
- '-fpic',
- '-g',
-]
-extra_link_args = None
-
-# separate compiler options for Windows
-if sys.platform.startswith('win'):
+if sys.platform.startswith('darwin'): # Mac
+
+ # This environment variable sets the earliest OS version that the compiled
+ # code will be compatible with. In certain contexts the default is too old
+ # to allow using libc++; supporting OS X 10.9 and later seems safe
+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.9'
+
+ extra_compile_args += ['-D NO_TR1_MEMORY', '-stdlib=libc++']
+ extra_link_args += ['-stdlib=libc++']
+
+ # This checks if the user has replaced the default clang compiler (this does
+ # not confirm there's OpenMP support, but is the best we could come up with)
+ if os.popen('which clang').read() != '/usr/bin/clang':
+ os.environ['CC'] = 'clang'
+ extra_compile_args += ['-fopenmp']
+
+# Window compilation: flags are for Visual C++
+
+elif sys.platform.startswith('win'): # Windows
extra_compile_args = ['/w', '/openmp']
-# Use OpenMP if directed or not on a Mac
-elif os.environ.get('USEOPENMP') or not sys.platform.startswith('darwin'):
+
+# Linux compilation: flags are for gcc 4.8 and later
+
+else: # Linux
extra_compile_args += ['-fopenmp']
- extra_link_args = [
- '-lgomp'
- ]
-
-# recent versions of the OS X SDK don't have the tr1 namespace
-# and we need to flag that during compilation.
-# here we need to check what version of OS X is being targeted
-# for the installation.
-# this is potentially different than the version of OS X on the system.
-if platform.system() == 'Darwin':
- mac_ver = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
- if mac_ver:
- mac_ver = [int(x) for x in mac_ver.split('.')]
- if mac_ver >= [10, 7]:
- extra_compile_args += ['-D NO_TR1_MEMORY']
- extra_compile_args += ['-stdlib=libc++']
-
-version = '0.4.1'
+ extra_link_args += ['-lgomp']
+
+
+cyaccess = Extension(
+ name='pandana.cyaccess',
+ sources=[
+ 'src/accessibility.cpp',
+ 'src/graphalg.cpp',
+ 'src/cyaccess.pyx',
+ 'src/contraction_hierarchies/src/libch.cpp'],
+ language='c++',
+ include_dirs=['.'],
+ extra_compile_args=extra_compile_args,
+ extra_link_args=extra_link_args)
+
+
+###############################################
+## Standard setup
+###############################################
+
+version = '0.4.2'
+
+packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
# read long description from README
-with open('README.rst', 'r') as f:
+with open('README.md', 'r') as f:
long_description = f.read()
setup(
@@ -107,34 +120,32 @@ def run(self):
'dataframes of network queries, quickly'),
long_description=long_description,
url='https://udst.github.io/pandana/',
- ext_modules=[Extension(
- 'pandana.cyaccess',
- source_files,
- language="c++",
- include_dirs=include_dirs,
- extra_compile_args=extra_compile_args,
- extra_link_args=extra_link_args,
- )],
+ ext_modules=[cyaccess],
install_requires=[
- 'matplotlib>=1.3.1',
- 'numpy>=1.8.0',
- 'pandas>=0.17.0',
- 'requests>=2.0',
- 'tables>=3.1.0',
- 'osmnet>=0.1.2',
- 'cython>=0.25.2',
- 'scikit-learn>=0.18.1'
+ 'cython >=0.25.2',
+ 'matplotlib >=1.3.1',
+ 'numpy >=1.8.0',
+ 'osmnet >=0.1.2',
+ 'pandas >=0.17.0',
+ 'requests >=2.0',
+ 'scikit-learn >=0.18.1',
+ 'tables >=3.1.0'
+ ],
+ tests_require=[
+ 'pycodestyle',
+ 'pytest'
],
- tests_require=['pytest'],
cmdclass={
'test': PyTest,
'lint': Lint,
'build_ext': CustomBuildExtCommand,
},
classifiers=[
- 'Development Status :: 3 - Alpha',
+ 'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
'License :: OSI Approved :: GNU Affero General Public License v3'
],
)
diff --git a/src/accessibility.cpp b/src/accessibility.cpp
index 348cc383..fc915714 100644
--- a/src/accessibility.cpp
+++ b/src/accessibility.cpp
@@ -349,6 +349,18 @@ Accessibility::aggregateAccessibilityVariable(
double sum = 0.0;
double sumsq = 0.0;
+ std::function sum_function;
+
+ if(decay == "exp")
+ sum_function = [](const double &distance, const float &radius, const float &var)
+ { return exp(-1*distance/radius) * var; };
+ if(decay == "linear")
+ sum_function = [](const double &distance, const float &radius, const float &var)
+ { return (1.0-distance/radius) * var; };
+ if(decay == "flat")
+ sum_function = [](const double &distance, const float &radius, const float &var)
+ { return var; };
+
for (int i = 0 ; i < distances.size() ; i++) {
int nodeid = distances[i].first;
double distance = distances[i].second;
@@ -358,19 +370,7 @@ Accessibility::aggregateAccessibilityVariable(
for (int j = 0 ; j < vars[nodeid].size() ; j++) {
cnt++; // count items
-
- if (decay == "exp") {
- sum += exp(-1*distance/radius) * vars[nodeid][j];
-
- } else if (decay == "linear") {
- sum += (1.0-distance/radius) * vars[nodeid][j];
-
- } else if (decay == "flat") {
- sum += vars[nodeid][j];
-
- } else {
- assert(0);
- }
+ sum += sum_function(distance, radius, vars[nodeid][j]);
// stddev is always flat
sumsq += vars[nodeid][j] * vars[nodeid][j];
diff --git a/src/contraction_hierarchies/src/DataStructures/Percent.h b/src/contraction_hierarchies/src/DataStructures/Percent.h
index 168b5152..fee38543 100644
--- a/src/contraction_hierarchies/src/DataStructures/Percent.h
+++ b/src/contraction_hierarchies/src/DataStructures/Percent.h
@@ -54,8 +54,8 @@ class Percent
_nextThreshold += _intervalPercent;
printPercent( currentValue / (double)_maxValue * 100 );
}
- // if (currentValue + 1 == _maxValue)
- // std::cout << " 100%" << std::endl;
+ if (currentValue + 1 == _maxValue)
+ std::cout << " 100%" << std::endl;
}
void printIncrement()
@@ -77,12 +77,12 @@ class Percent
while (percent >= _lastPercent+_step) {
_lastPercent+=_step;
if (_lastPercent % 10 == 0) {
- // std::cout << " " << _lastPercent << "% ";
+ std::cout << " " << _lastPercent << "% ";
}
else {
- // std::cout << ".";
+ std::cout << ".";
}
- // std::cout.flush();
+ std::cout.flush();
}
}
};
diff --git a/src/contraction_hierarchies/src/libch.cpp b/src/contraction_hierarchies/src/libch.cpp
index b86a9314..8dad3b88 100644
--- a/src/contraction_hierarchies/src/libch.cpp
+++ b/src/contraction_hierarchies/src/libch.cpp
@@ -202,6 +202,10 @@ inline ostream& operator<< (ostream& os, const Edge& e) {
}
}
}
+
+ FILE_LOG(logINFO) << "Range graph removed " << edges.size() - edge
+ << " edges of " << edges.size() << "\n";
+
//INFO("Range graph removed " << edges.size() - edge << " edges of " << edges.size());
assert(edge <= edges.size());
edges.resize( edge );
diff --git a/src/contraction_hierarchies/src/libch.h b/src/contraction_hierarchies/src/libch.h
index 121a350b..d12d3852 100644
--- a/src/contraction_hierarchies/src/libch.h
+++ b/src/contraction_hierarchies/src/libch.h
@@ -34,6 +34,8 @@ or see http://www.gnu.org/licenses/agpl.txt.
#include "DataStructures/StaticGraph.h"
#include "POIIndex/POIIndex.h"
+#define FILE_LOG(logINFO) (std::cout)
+
struct _HeapData {
NodeID parent;
_HeapData( NodeID p ) : parent(p) { }
diff --git a/src/graphalg.cpp b/src/graphalg.cpp
index dd339576..13ec13f5 100644
--- a/src/graphalg.cpp
+++ b/src/graphalg.cpp
@@ -9,6 +9,10 @@ Graphalg::Graphalg(
this->numnodes = numnodes;
int num = omp_get_max_threads();
+
+ FILE_LOG(logINFO) << "Generating contraction hierarchies with "
+ << num << " threads.\n";
+
ch = CH::ContractionHierarchies(num);
vector nv;
@@ -20,6 +24,9 @@ Graphalg::Graphalg(
nv.push_back(n);
}
+ FILE_LOG(logINFO) << "Setting CH node vector of size "
+ << nv.size() << "\n";
+
ch.SetNodeVector(nv);
vector ev;
@@ -30,6 +37,9 @@ Graphalg::Graphalg(
ev.push_back(e);
}
+ FILE_LOG(logINFO) << "Setting CH edge vector of size "
+ << ev.size() << "\n";
+
ch.SetEdgeVector(ev);
ch.RunPreprocessing();
}
diff --git a/src/shared.h b/src/shared.h
index c6d9a0f4..61ea4bd7 100644
--- a/src/shared.h
+++ b/src/shared.h
@@ -8,4 +8,5 @@
#endif
#ifdef _OPENMP
#include
-#endif
\ No newline at end of file
+#endif
+#define FILE_LOG(logINFO) (std::cout)