Merge remote-tracking branch 'upstream/master'

This commit is contained in:
Nat Wilson
2016-04-22 16:04:58 -07:00
87 changed files with 6238 additions and 12271 deletions

2
.coveragerc Normal file
View File

@@ -0,0 +1,2 @@
[report]
omit = *third_party*

7
.gitignore vendored
View File

@@ -5,6 +5,7 @@
*.egg-info
build/
dist/
*.eggs
MANIFEST
.DS_Store
.idea
@@ -12,6 +13,12 @@ MANIFEST
.coverage
*flymake.py
.settings
.vagrant
.pypirc
metadata.csv
txt.txt
eva.py
test.sh
_build
htmlcov

54
.travis.yml Normal file
View File

@@ -0,0 +1,54 @@
language: python
sudo: false
python:
- '2.7'
- '3.5'
cache:
directories:
- ~/.cache/pip
addons:
apt:
packages:
- libgdal1h
- gdal-bin
- libproj-dev
- libhdf5-serial-dev
- libpng-dev
- libgdal-dev
- libatlas-dev
- libatlas-base-dev
- gfortran
env:
global:
- secure: QsF7ignSAbH/WCyO6v9bw1exmCWDQR0DqmHkwJ5swc9N44OOOzbWGsaMSYB5y9h+d70fz4arbxQDhsk2KvX4Zd1/2YIMOrIsbgDYeegpkhVPgyQNPKmVqiX+Tb47t1C/TgkC7A07tiPpuefYcLNMZ8gzz7oKhh1UKapYftqzZ+g=
- secure: HxjeKWSROBQYy9NuNkgQeaK1ubTF8vH5FcR8nUTSAYxxw/qOzKpqkiq4BcJSRcIwTbkvaBf4MshLGVOxPjMeyJFe06UD/6LvTUGS3bwdya+m0RFjHe5/3wzS8/MxLbTlvgzmuGLLKOsJjXCi9eQQchKfHv+QuhGxhYVLQpnbU9E=
- secure: Zq0Z2UA2A7/ieXX8XoMweClJTp8hiVBxoQ1ylJYNd7qsRSk0QvZhn62db5/x48L9S1kELk0sG64q5Pf96/RPLpdjkBUAdEkS7qF+QOvRvAv2woNEHutjlMUvP6jwYGbug+AORg76btZ57OwMOi3aTkagQMMKnokfo7KGbffy0Jo=
- PIP_WHEEL_DIR=$HOME/.cache/pip/wheels
- PIP_FIND_LINKS=file://$HOME/.cache/pip/wheels
before_install:
- pip install -U pip
- pip install wheel
install:
- pip install -r requirements-dev.txt
- pip install -e .
script:
- python setup.py test
deploy:
provider: pypi
user: devseed
password:
secure: WtawFW/999XYszmZfj1Qk82l00OSyP2JBVOOGCERrW1gVO7MYtYsgP31HKRSzNTCTHJNVDpdK4WZWY6zPQqC3l2UfWYYsvRn0hCoI8AJxE5VCUEg6Ccpe6fMJuhp1pq6Zy7yrfBSZcOB9aqSHLBJsunD2o3mNlTC8WV8vNK74ck=
on:
repo: developmentseed/landsat-util
branch:
- master

10
AUTHORS.txt Normal file
View File

@@ -0,0 +1,10 @@
Authors
=======
Scisco https://github.com/scisco
Marc Farra https://github.com/kamicut
Drew Bollinger https://github.com/drewbo
Sean Gillies https://github.com/sgillies
Alex Kappel https://twitter.com/alex_kappel
See also https://github.com/developmentseed/landsat-util/graphs/contributors.

114
CHANGES.txt Normal file
View File

@@ -0,0 +1,114 @@
Changes
=======
0.13.0 (2016-03-25)
------------------
- Python 3.5 support
0.12.2 (2016-03-24)
------------------
- Fix for #167
- Fix for #145
0.12.0 (2016-02-18)
------------------
- Add USGS download fallback closes #89
0.11.0 (2016-01-12)
------------------
- a hotfix for search command not showing outputs #137
- add support for geojson outputs #68
0.10.0 (2016-01-05)
------------------
- add support for bare json output
- faster travis tests
- add street search
- apply ndvigrey to process and download
- fixes #127
- update commands help file
- apply pansharpen and ndvi only if -p is used
- download zip if bands are not specified
- better handle url joins to a fix error on Windows
- other small bug fixes
0.9.1 (2015-10-26)
------------------
- Add missing package (polyline) to setup.py
0.9.0 (2015-10-23)
------------------
- Improved pansharpening
- Use BQA bands for cloud/snow coverage and use in color correction
- Fix a bug in NDVI process where novalues appeared in the image
- Add support for different NDVI color maps (three included)
- Add support for image clipping using the new `--clip` flag
- Multiple bug fixes
0.8.0 (2015-09-22)
------------------
- Improved docs
- Add `--ndvi` flag
- Handle downloading new bands (10, 11, QA)
- Improved color correction
- Remove noise in pansharpened image processing
0.7.0 (2015-05-29)
------------------
- New documentation
- Deployed to readthedocs
- Automate deployment to pypi
- Adds docker support
- skip unzipping if images already unzipped
- add force-unzip flag
- fix a bug where multiple downloads was not followed by multiple process #81
- fix a bug where if scenes was downloaded from google instead of aws, process failed #84
- download band 8 when pansharpen fixes #73
0.6.3 (2015-04-29)
------------------
- adjust lower rescaling bound, closes #66 for now
- better pixel math for #71
- update destination transform calculation for reprojection in polar regions
0.6.2 (2015-04-24)
------------------
- Updated readme
- removed gamma correction from image process
0.6.1 (2015-04-23)
------------------
- Updated tests that run faster
- New upload command for uploading final image to S3
- New command for continuous upload, process and upload
- Updated image processing that produces better color correction
- Other bug fixes
0.5.1 (2015-04-08)
------------------
- Updated Rasterio and Requests versions
0.5.0 (2015-03-10)
------------------
- Fewer dependencies
- Uses wheel on OSX
- New image processing algorithm which is faster and require less storage
- Updated API
- Includes longitude latitude search
- Improved console output
- Accepts bands combinations for processing images
- Includes a new faster downloader
- More comprehensive tests (96% coverage)
- Added to pypi pip install landsat-util
- Brew formula removed
0.2.0 (2014-08-28)
------------------
- New image processing
- New command-line syntax
- A lot of other new additions
- Lots of bug fixes
0.1.0 (2014-08-08)
------------------
- Pre-release version.

15
Dockerfile Normal file
View File

@@ -0,0 +1,15 @@
FROM ubuntu:14.04
RUN apt-get -y update
RUN apt-get install --yes git-core python-pip python-scipy libgdal-dev libatlas-base-dev gfortran libfreetype6-dev libglib2.0-dev zlib1g-dev python-pycurl
ADD . /landsat
RUN pip install setuptools
RUN pip install -U pip
RUN pip install wheel
RUN pip install https://s3-us-west-2.amazonaws.com/ds-satellite-projects/landsat-util/numpy-1.10.4-cp27-cp27mu-linux_x86_64.whl
RUN pip install https://s3-us-west-2.amazonaws.com/ds-satellite-projects/landsat-util/Pillow-3.1.1-cp27-cp27mu-linux_x86_64.whl
RUN pip install https://s3-us-west-2.amazonaws.com/ds-satellite-projects/landsat-util/scikit_image-0.12.3-cp27-cp27mu-manylinux1_x86_64.whl
RUN cd /landsat && pip install -r requirements-dev.txt
RUN sed -i 's/numpy.*//g' /landsat/requirements.txt
RUN sed -i 's/scipy.*//g' /landsat/requirements.txt
RUN sed -i 's/scikit-image.*//g' /landsat/requirements.txt
RUN cd /landsat && pip install -e .

View File

@@ -1,22 +0,0 @@
require "formula"
class LandsatUtil < Formula
homepage "http://www.developmentseed.org"
url "https://github.com/developmentseed/landsat-util/archive/v0.2.0.tar.gz"
sha1 "00e994e82eccec4f951b66c0d30f29af41bf4bb7"
head "https://github.com/developmentseed/landsat-util.git"
depends_on "gdal"
depends_on "libtiff"
depends_on "imagemagick" => "with-libtiff"
depends_on "https://raw.githubusercontent.com/OSGeo/homebrew-osgeo4mac/master/Formula/orfeo-40.rb"
def install
minor = `python -c 'import sys; print(sys.version_info[1])'`.chomp
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python2.#{minor}/site-packages"
system "python", "setup.py", "install",
"--prefix=#{libexec}",
"--install-scripts=#{bin}"
bin.env_script_all_files(libexec+"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
end

View File

@@ -1,6 +1,12 @@
include README.md
include .gitignore
include AUTHORS.txt
include LICENSE
recursive-include landsat/assests *.prj *.sbn *.sbx
recursive-include landsat/assests *.shp *.xml *.shx *.html *.txt *.dbf
recursive-include doc *.html
include README.md
include requirements.txt
include requirements-dev.txt
recursive-include tests *
recursive-exclude * __pycache__
recursive-exclude * *.py[co]
recursive-include docs *.rst conf.py Makefile make.bat
recursive-include landsat/maps *.txt

74
Makefile Normal file
View File

@@ -0,0 +1,74 @@
.PHONY: clean-pyc clean-build docs clean
help:
@echo "clean - remove all build, test, coverage and Python artifacts"
@echo "clean-build - remove build artifacts"
@echo "clean-pyc - remove Python file artifacts"
@echo "clean-test - remove test and coverage artifacts"
@echo "lint - check style with flake8"
@echo "test - run tests quickly with the default Python"
@echo "test-all - run tests on every Python version with tox"
@echo "coverage - check code coverage quickly with the default Python"
@echo "docs - generate Sphinx HTML documentation, including API docs"
@echo "release - package and upload a release"
@echo "dist - package"
@echo "install - install the package to the active Python's site-packages"
clean: clean-build clean-pyc clean-test
clean-build:
rm -fr build/
rm -fr dist/
rm -fr .eggs/
find . -name '*.egg-info' -exec rm -fr {} +
find . -name '*.egg' -exec rm -f {} +
clean-pyc:
find . -name '*.pyc' -exec rm -f {} +
find . -name '*.pyo' -exec rm -f {} +
find . -name '*~' -exec rm -f {} +
find . -name '__pycache__' -exec rm -fr {} +
clean-test:
rm -fr .tox/
rm -f .coverage
rm -fr htmlcov/
test:
nosetests
test-all:
tox
coverage:
coverage run --source landsat setup.py test
coverage report -m
coverage html
open htmlcov/index.html
docs:
rm -f docs/landsat.rst
rm -f docs/modules.rst
sphinx-apidoc -o docs/ landsat
$(MAKE) -C docs clean
$(MAKE) -C docs html
open docs/_build/html/index.html
docs-test:
python setup.py check --restructuredtext
test-release: clean
python setup.py sdist upload -r pypitest
python setup.py bdist_wheel upload -r pypitest
release: clean
python setup.py sdist upload
python setup.py bdist_wheel upload
dist: clean
python setup.py sdist
python setup.py bdist_wheel
ls -l dist
install: clean
python setup.py install

View File

@@ -1,159 +1,44 @@
Landsat-util
===============
.. image:: https://travis-ci.org/developmentseed/landsat-util.svg?branch=master
:target: https://travis-ci.org/developmentseed/landsat-util
.. image:: https://badge.fury.io/py/landsat-util.svg
:target: http://badge.fury.io/py/landsat-util
.. image:: https://img.shields.io/pypi/dm/landsat-util.svg
:target: https://pypi.python.org/pypi/landsat-util/
:alt: Downloads
.. image:: https://img.shields.io/pypi/l/landsat-util.svg
:target: https://pypi.python.org/pypi/landsat-util/
:alt: License
Landsat-util is a command line utility that makes it easy to search, download, and process Landsat imagery.
This tool uses Development Seed's `API for Landsat Metadata <https://github.com/developmentseed/landsat-api>`_.
Docs
+++++
This API is accessible here: http://api.developmentseed.com:8000/landsat
For full documentation visit: https://pythonhosted.org/landsat-util/
You can also run your own API and connect it to this tool.
To run the documentation locally::
Installation
============
**On Mac**
Use brew to install landsat-util:
.. code-block:: console
$: brew install https://raw.githubusercontent.com/developmentseed/landsat-util/master/Formula/landsat-util.rb
For the dev version try:
.. code-block:: console
$: brew install https://raw.githubusercontent.com/developmentseed/landsat-util/master/Formula/landsat-util.rb --HEAD
**On Ubuntu**
Use pip to install landsat-util:
.. code-block:: console
$: sudo apt-add-repository ppa:ubuntugis/ubuntugis-unstable
$: sudo apt-get update
$: sudo apt-get install git python-pip build-essential libssl-dev libffi-dev python-dev python-gdal libgdal1-dev gdal-bin imagemagick geotiff-bin -y
$: sudo pip install -U git+git://github.com/developmentseed/landsat-util.git
**On Other systems**
Make sure you have these dependencies:
- GDAL
- ImageMagick
- Orfeo-40
Then Run:
.. code-block:: console
$: pip install -U git+git://github.com/developmentseed/landsat-util.git
Alternatively, you can also download the package and run:
.. code-block:: console
$: python setup.py install
Overview: What can landsat-util do?
============
Landsat-util has three main functions:
- **Search** for landsat tiles based on several search parameters.
- **Download** landsat images.
- **Image processing** and pan sharpening on landsat images.
These three functions can be performed separately or all at once.
**Help**: Type ``landsat -h`` for detailed usage parameters.
Step 1: Search
============
Search returns information about all landsat tiles that match your criteria. This includes a link to an unprocessed preview of the tile. The most important result is the tile's *sceneID*, which you will need to download the tile (see step 2 below).
Search for landsat tiles in a given geographical region, using any of the following:
- **Paths and rows**: If you know the paths and rows you want to search for.
- **Country name**: If you know what country you want imagery for.
- **Custom shapefile**: Use a tool such as http://geojson.io/ to generate custom shapefiles bounding your geographical region of interest. Landsat-util will download tiles within this shapefile.
Additionally filter your search using the following parameters:
- **Start and end dates** for when imagery was taken
- **Maximum percent cloud cover** (default is 20%)
**Examples of search**:
Search by path and row:
``$: landsat search --cloud 4 --start "january 1 2014" --end "january 10 2014" pr 009 045``
Search by country (The full list of countries is http://goo.gl/8H9wuq):
``$: landsat search --cloud 4 --start "january 1 2014" --end "August 25 2014" country 'Isle of Man'``
Search by custom shapefile:
``$: landsat search --cloud 6 --start "july 01 2014" --end "august 1 2014" shapefile path/to/shapefile.shp``
Step 2: Download
============
You can download tiles using their unique sceneID, which you get from landsat search.
**Examples of download**:
Download images by their custom sceneID, which you get from landsat search:
``$: landsat download LC80090452014008LGN00``
Search and download tiles all at once with the --download flag:
``$: landsat search --download --cloud 4 --start "january 01 2014" --end "january 10 2014" pr 009 045``
Step 3: Image processing
============
You can process your downloaded tiles with our custom image processing algorithms. In addition, you can choose to pansharpen your images.
**Examples of image processing**:
Process images that are already downloaded. Remember, the program only accepts zip files:
``$: landsat process path/to/LC80090452014008LGN00.tar.bz``
Process *and* pansharpen a downloaded image:
``$: landsat process --pansharpen path/to/LC80090452014008LGN00.tar.bz``
Search, download, and process images all at once using the --imageprocess flag:
``$: landsat search --imageprocess --cloud 6 --start "january 01 2014" --end "january 10 2014" shapefile path/to/shapefile.shp``
$ pip install -r requirements/dev.txt
$ cd docs
$ make html
Important Notes
===============
Recently Added Features
+++++++++++++++++++++++
- All downloaded and processed images are stored at your home directory in landsat forlder: ``~/landsat``
- Improved pansharpening
- Use BQA bands for cloud/snow coverage and use in color correction
- Add support for different NDVI color maps (three included)
- Add support for image clipping using the new `--clip` flag
- If you are not sure what images you are looking for, make sure to use ``--onlysearch`` flag to view the results first. The image thumbnail web address that is included in the results can be used to make sure that clouds are not obscuring the subject of interest. Run the search again if you need to narrow down your result and then start downloading images. Each image is usually more than 700mb and it might takes a very long time if there are too many images to download
- Image processing is a very heavy and resource consuming task. Each process takes about 20-30 mins. We recommend that you run the processes in smaller badges. Pansharpening, while increasing image resolution 2x, substantially increases processing time.
- Country based search queries can return a large number of images; for countries that return large search results we recommend selecting best imagery based on thumbnails and then using the download tool to install specific imagery based on Landsat scene ID.
To Do List
Change Log
++++++++++
- Add longitude latitude search
- Add Sphinx Documentation
- Improve console output
- Add more color options such as false color, true color, etc.
- Add capacity for NDVI output
- Add alternative projections (currently only option is default web-mercator; EPSG: 3857)
- Connect search to Google Address API
- Include 16-bit image variant in output
- Add support for color correct looping over multiple compressed inputs (currently just 1)
See `CHANGES.txt <CHANGES.txt>`_.

View File

@@ -1,10 +1,5 @@
#!/usr/bin/env python
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco
#
# Landsat Util
# License: CC0 1.0 Universal
import landsat.landsat

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

192
docs/Makefile Normal file
View File

@@ -0,0 +1,192 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Landsat-util.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Landsat-util.qhc"
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/Landsat-util"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Landsat-util"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

58
docs/api.rst Normal file
View File

@@ -0,0 +1,58 @@
Module Index
============
downloader.py
+++++++++++++++++++++++++
.. automodule:: landsat.downloader
:members:
:undoc-members:
:show-inheritance:
uploader.py
+++++++++++++++++++++++++
.. automodule:: landsat.uploader
:members:
:undoc-members:
:show-inheritance:
image.py
+++++++++++++++++++++++++
.. automodule:: landsat.image
:members:
:undoc-members:
:show-inheritance:
landsat.py
+++++++++++++++++++++++++
.. automodule:: landsat.landsat
:members:
:undoc-members:
:show-inheritance:
mixins.py
++++++++++++++++++++++
.. automodule:: landsat.mixins
:members:
:undoc-members:
:show-inheritance:
search.py
++++++++++++++++++++++
.. automodule:: landsat.search
:members:
:undoc-members:
:show-inheritance:
utils.py
+++++++++++++++++++++
.. automodule:: landsat.utils
:members:
:undoc-members:
:show-inheritance:

138
docs/commands.rst Normal file
View File

@@ -0,0 +1,138 @@
Commands
========
::
usage: landsat [-h] [--version] {search,download,process} ...
Landsat-util is a command line utility that makes it easy to
search, download, and process Landsat imagery.
Commands:
Search:
landsat.py search [-p --pathrow] [--lat] [--lon] [-l LIMIT] [-s START] [-e END] [-c CLOUD] [-h]
optional arguments:
-p, --pathrow Paths and Rows in order separated by comma. Use quotes "001,003".
Example: path,row,path,row 001,001,190,204
--lat Latitude
--lon Longitude
--address Street address
-l LIMIT, --limit LIMIT
Search return results limit default is 10
-s START, --start START
Start Date - Most formats are accepted e.g.
Jun 12 2014 OR 06/12/2014
-e END, --end END End Date - Most formats are accepted e.g.
Jun 12 2014 OR 06/12/2014
--latest N Returns the N latest images within the last 365 days.
-c CLOUD, --cloud CLOUD
Maximum cloud percentage. Default: 20 perct
--json Returns a bare JSON response
--geojson Returns a geojson response
-h, --help Show this help message and exit
Download:
landsat download sceneID [sceneID ...] [-h] [-b --bands]
positional arguments:
sceneID Provide Full sceneIDs. You can add as many sceneIDs as you wish
Example: landast download LC81660392014196LGN00
optional arguments:
-b --bands If you specify bands, landsat-util will try to download the band from S3.
If the band does not exist, an error is returned
-h, --help Show this help message and exit
-d, --dest Destination path
-p, --process Process the image after download
--pansharpen Whether to also pansharpen the processed image.
Pansharpening requires larger memory
--ndvi Calculates NDVI and produce a RGB GTiff with seperate colorbar.
--ndvigrey Calculates NDVI and produce a greyscale GTiff.
--clip Clip the image with the bounding box provided. Values must be in WGS84 datum,
and with longitude and latitude units of decimal degrees separated by comma.
Example: --clip=-346.06658935546875,49.93531194616915,-345.4595947265625,50.2682767372753
-u --upload Upload to S3 after the image processing completed
--key Amazon S3 Access Key (You can also be set AWS_ACCESS_KEY_ID as
Environment Variables)
--secret Amazon S3 Secret Key (You can also be set AWS_SECRET_ACCESS_KEY as
Environment Variables)
--bucket Bucket name (required if uploading to s3)
--region URL to S3 region e.g. s3-us-west-2.amazonaws.com
--force-unzip Force unzip tar file
Process:
landsat.py process path [-h] [-b --bands] [-p --pansharpen]
positional arguments:
path Path to the landsat image folder or zip file
optional arguments:
-b --bands Specify bands. The bands should be written in sequence with no spaces
Default: Natural colors (432)
Example --bands 432
--pansharpen Whether to also pansharpen the process image.
Pansharpening requires larger memory
--ndvi Calculates NDVI and produce a RGB GTiff with seperate colorbar.
--ndvigrey Calculates NDVI and produce a greyscale GTiff.
--clip Clip the image with the bounding box provided. Values must be in WGS84 datum,
and with longitude and latitude units of decimal degrees separated by comma.
Example: --clip=-346.06658935546875,49.93531194616915,-345.4595947265625,50.2682767372753
-v, --verbose Show verbose output
-h, --help Show this help message and exit
-u --upload Upload to S3 after the image processing completed
--key Amazon S3 Access Key (You can also be set AWS_ACCESS_KEY_ID as
Environment Variables)
--secret Amazon S3 Secret Key (You can also be set AWS_SECRET_ACCESS_KEY as
Environment Variables)
--bucket Bucket name (required if uploading to s3)
--region URL to S3 region e.g. s3-us-west-2.amazonaws.com
--force-unzip Force unzip tar file
positional arguments:
{search,download,process}
Landsat Utility
search Search Landsat metdata
download Download images from Google Storage
process Process Landsat imagery
optional arguments:
-h, --help show this help message and exit
--version show program's version number and exit

311
docs/conf.py Normal file
View File

@@ -0,0 +1,311 @@
# -*- coding: utf-8 -*-
#
# Landsat-util documentation build configuration file, created by
# sphinx-quickstart on Thu May 28 17:52:10 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
from mock import Mock as MagicMock
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return Mock()
MOCK_MODULES = ['numpy', 'rasterio', 'scipy', 'scikit-image', 'homura', 'boto',
'termcolor', 'requests', 'python-dateutil']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
import os
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
sys.path.insert(0, project_root)
print project_root
import landsat
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'landsat-util'
copyright = u'2015, Development Seed'
author = u'Development Seed'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = landsat.__version__
# The full version, including alpha/beta/rc tags.
release = landsat.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Landsat-utildoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Landsat-util.tex', u'Landsat-util Documentation',
u'Development Seed', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'landsat-util', u'Landsat-util Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Landsat-util', u'Landsat-util Documentation',
author, 'Landsat-util', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False

39
docs/index.rst Normal file
View File

@@ -0,0 +1,39 @@
landsat-util
============
.. image:: https://travis-ci.org/developmentseed/landsat-util.svg?branch=v0.5
:target: https://travis-ci.org/developmentseed/landsat-util
.. image:: https://badge.fury.io/py/landsat-util.svg
:target: http://badge.fury.io/py/landsat-util
.. image:: https://img.shields.io/pypi/dm/landsat-util.svg
:target: https://pypi.python.org/pypi/landsat-util/
:alt: Downloads
.. image:: https://img.shields.io/pypi/l/landsat-util.svg
:target: https://pypi.python.org/pypi/landsat-util/
:alt: License
Landsat-util is a command line utility that makes it easy to search, download, and process Landsat imagery.
This tool uses Development Seed's `API for Landsat Metadata <https://github.com/developmentseed/landsat-api>`_.
This API is accessible here: https://api.developmentseed.org/landsat
You can also run your own API and connect it to this tool.
**Table of Contents:**
.. toctree::
:maxdepth: 3
installation
overview
commands
todo
api
notes

68
docs/installation.rst Normal file
View File

@@ -0,0 +1,68 @@
Installation
===============
Mac OSX
++++++++
::
$: pip install landsat-util
Ubuntu 14.04
++++++++++++
Use pip to install landsat-util. If you are not using virtualenv, you might have to run ``pip`` as ``sudo``::
$: sudo apt-get update
$: sudo apt-get install python-pip python-numpy python-scipy libgdal-dev libatlas-base-dev gfortran libfreetype6-dev
$: pip install landsat-util
Other systems
+++++++++++++
Make sure Python setuptools is installed::
$: python setup.py numpy six
$: python setup.py install
Docker
++++++
If you have docker installed, you can use landsat-util image on docker::
$: docker pull developmentseed/landsat-util
$: docker run -it developmentseed/landsat-util:latest /bin/sh -c "landsat -h"
To use docker version run::
$: docker run -it -v ~/landsat:/root/landsat developmentseed/landsat-util:latest landsat -h
Example commands::
$: docker run -it -v ~/landsat:/root/landsat developmentseed/landsat-util:latest landsat search --cloud 4 --start "january 1 2014" --end "january 10 2014" -p 009,045
$: docker run -it -v ~/landsat:/root/landsat developmentseed/landsat-util:latest landsat download LC80090452014008LGN00 --bands 432
This commands mounts ``landsat`` folder in your home directory to ``/root/landsat`` in docker. All downloaded and processed images are stored in ``~/landsat`` folder of your computer.
If you are using Windows replace ``~/landsat`` with ``/c/Users/<path>``.
Upgrade
+++++++
::
$: pip install -U landsat-util
If you have installed previous version of landsat using brew, first run::
$: brew uninstall landsat-util
Running Tests
+++++++++++++
::
$: pip install -r requirements-dev.txt
$: python setup.py test

94
docs/landsat.rst Normal file
View File

@@ -0,0 +1,94 @@
landsat package
===============
Submodules
----------
landsat.decorators module
-------------------------
.. automodule:: landsat.decorators
:members:
:undoc-members:
:show-inheritance:
landsat.downloader module
-------------------------
.. automodule:: landsat.downloader
:members:
:undoc-members:
:show-inheritance:
landsat.image module
--------------------
.. automodule:: landsat.image
:members:
:undoc-members:
:show-inheritance:
landsat.landsat module
----------------------
.. automodule:: landsat.landsat
:members:
:undoc-members:
:show-inheritance:
landsat.mixins module
---------------------
.. automodule:: landsat.mixins
:members:
:undoc-members:
:show-inheritance:
landsat.ndvi module
-------------------
.. automodule:: landsat.ndvi
:members:
:undoc-members:
:show-inheritance:
landsat.search module
---------------------
.. automodule:: landsat.search
:members:
:undoc-members:
:show-inheritance:
landsat.settings module
-----------------------
.. automodule:: landsat.settings
:members:
:undoc-members:
:show-inheritance:
landsat.uploader module
-----------------------
.. automodule:: landsat.uploader
:members:
:undoc-members:
:show-inheritance:
landsat.utils module
--------------------
.. automodule:: landsat.utils
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: landsat
:members:
:undoc-members:
:show-inheritance:

263
docs/make.bat Normal file
View File

@@ -0,0 +1,263 @@
@ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=_build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
set I18NSPHINXOPTS=%SPHINXOPTS% .
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^<target^>` where ^<target^> is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
echo. coverage to run coverage check of the documentation if enabled
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
REM Check if sphinx-build is available and fallback to Python version if any
%SPHINXBUILD% 2> nul
if errorlevel 9009 goto sphinx_python
goto sphinx_ok
:sphinx_python
set SPHINXBUILD=python -m sphinx.__init__
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
:sphinx_ok
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Landsat-util.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Landsat-util.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "coverage" (
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
if errorlevel 1 exit /b 1
echo.
echo.Testing of coverage in the sources finished, look at the ^
results in %BUILDDIR%/coverage/python.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
:end

7
docs/modules.rst Normal file
View File

@@ -0,0 +1,7 @@
landsat
=======
.. toctree::
:maxdepth: 4
landsat

12
docs/notes.rst Normal file
View File

@@ -0,0 +1,12 @@
Important Notes
===============
- All downloaded and processed images are stored at your home directory in landsat forlder: ``~/landsat``
- The image thumbnail web address that is included in the results can be used to make sure that clouds are not obscuring the subject of interest. Run the search again if you need to narrow down your result and then start downloading images. Each image is usually more than 700mb and it might takes a very long time if there are too many images to download
- Image processing is a very heavy and resource consuming task. Each process takes about 5-10 mins. We recommend that you run the processes in smaller badges. Pansharpening, while increasing image resolution 2x, substantially increases processing time.
- Landsat-util requires at least 2GB of Memory (RAM).
- Make sure to read over the `section on returned products <overview.html#a-note-on-returned-products>`_ as it is different depending on scene acquisition date.

100
docs/overview.rst Normal file
View File

@@ -0,0 +1,100 @@
Overview: What can landsat-util do?
====================================
Landsat-util has three main functions:
- **Search** for landsat tiles based on several search parameters.
- **Download** landsat images.
- **Image processing** and pan sharpening on landsat images.
These three functions have to be performed separately.
**Help**: Type ``landsat -h`` for detailed usage parameters.
Search
++++++
Search returns information about all landsat tiles that match your criteria. This includes a link to an unprocessed preview of the tile. The most important result is the tile's *sceneID*, which you will need to download the tile (see step 2 below).
Search for landsat tiles in a given geographical region, using any of the following:
- **Paths and rows**: If you know the paths and rows you want to search for.
- **Latidue and Longitude**: If you need the latitude and longitude of the point you want to search for.
Additionally filter your search using the following parameters:
- **Start and end dates** for when imagery was taken
- **Maximum percent cloud cover** (default is 20%)
**Examples of search**:
Search by path and row::
$: landsat search --cloud 4 --start "january 1 2014" --end "january 10 2014" -p 009,045
Search by latitude and longitude::
$: landsat search --lat 38.9004204 --lon -77.0237117
Search by latitude and longitude with pure json output (you should install geojsonio-cli first)::
$: landsat search --lat 38.9004204 --lon -77.0237117 --geojson | geojosnio
Show search output on geojsonio::
$: landsat search
Download
++++++++
You can download tiles using their unique sceneID, which you get from landsat search.
Landsat-util will download a zip file that includes all the bands. You have the option of specifying the bands you want to down. In this case, landsat-util only downloads those bands if they are available online.
**Examples of download**:
Download images by their custom sceneID, which you get from landsat search::
$: landsat download LC80090452014008LGN00
By default landsat-util downloads the full zip file from Google Storage unless you specify the bands or run an image processing right after download. For example to download only band 4, 3 and 2 for a particular sceneID run::
$: landsat download LC80090452014008LGN00 --bands 432
Download multiple sceneIDs::
$: landsat download LC80090452014008LGN00 LC80090452015008LGN00 LC80090452013008LGN00
Image processing
++++++++++++++++
You can process your downloaded tiles with our custom image processing algorithms. In addition, you can choose to pansharpen your images and specify which bands to process.
**Examples of image processing**:
Process images that are already downloaded. Remember, the program accepts both zip files and unzipped folders::
$: landsat process path/to/LC80090452014008LGN00.tar.bz
If unzipped::
$: landsat process path/to/LC80090452014008LGN00
Specify bands 3, 5 and 1::
$: landsat process path/to/LC80090452014008LGN00 --bands 351
Process *and* pansharpen a downloaded image::
$: landsat process path/to/LC80090452014008LGN00.tar.bz --pansharpen
Clip an image before processing (The coordinates shows below show Prague)::
$: landsat process path/to/LC81920252015157LGN00.tar.bz --pansharpen --clip=-346.06658935546875,49.93531194616915,-345.4595947265625,50.2682767372753
Note: Values must be in WGS84 datum, and with longitude and latitude units of decimal degrees separated by comma.
A note on returned products
++++++++++++++++
Scenes acquired after 2015 will be downloaded from `AWS Public Data Sets <http://aws.amazon.com/public-data-sets/landsat/>`_ while scenes acquired before 2015 will be downloaded from `Google Earth Engine <https://earthengine.google.org/>`_. AWS provides the bands separately and so landsat-util will also pass along the bands individually if requested. In the case of Google Earth Engine, only the full, compressed image bundle is available (including all bands and metadata) and will be downloaded no matter what bands are requested.

6
docs/todo.rst Normal file
View File

@@ -0,0 +1,6 @@
To Do List
++++++++++
- Add alternative projections (currently only option is default web-mercator; EPSG: 3857)
- Include 16-bit image variant in output
- Add support for color correct looping over multiple compressed inputs (currently just 1)

View File

@@ -0,0 +1,22 @@
{
"build_systems":
[
{
"file_regex": "^[ ]*File \"(...*?)\", line ([0-9]*)",
"name": "Anaconda Python Builder",
"selector": "source.python",
"shell_cmd": "~/.virtualenvs/landsat-util/bin/python -u \"$file\""
}
],
"folders":
[
{
"follow_symlinks": true,
"path": "."
}
],
"settings":
{
"python_interpreter": "~/.virtualenvs/landsat-util/bin/python"
}
}

View File

@@ -1,5 +1 @@
import settings
import sys
if not settings.DEBUG:
sys.tracebacklimit = 0
__version__ = '0.13.0'

View File

@@ -1,300 +0,0 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" dir="ltr" lang="en-US">
<head profile="http://gmpg.org/xfn/11">
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>Admin 0 &#8211; Countries | Natural Earth</title>
<link rel="shortcut icon" href="favicon.ico" type="image/x-icon">
<link rel="alternate" type="application/rss+xml" title="Natural Earth RSS Feed" href="http://www.naturalearthdata.com/feed/" />
<link rel="pingback" href="http://www.naturalearthdata.com/xmlrpc.php" />
<script type="text/javascript" src="http://www.naturalearthdata.com/wp-content/themes/NEV/includes/js/suckerfish.js"></script>
<!--[if lt IE 7]>
<script src="http://ie7-js.googlecode.com/svn/version/2.0(beta3)/IE7.js" type="text/javascript"></script>
<script defer="defer" type="text/javascript" src="http://www.naturalearthdata.com/wp-content/themes/NEV/includes/js/pngfix.js"></script>
<![endif]-->
<link rel="stylesheet" href="http://www.naturalearthdata.com/wp-content/themes/NEV/style.css" type="text/css" media="screen" />
<meta name='Admin Management Xtended WordPress plugin' content='2.1.1' />
<link rel="alternate" type="application/rss+xml" title="Natural Earth &raquo; Admin 0 &#8211; Countries Comments Feed" href="http://www.naturalearthdata.com/downloads/50m-cultural-vectors/50m-admin-0-countries-2/feed/" />
<link rel='stylesheet' id='sociable-front-css-css' href='http://www.naturalearthdata.com/wp-content/plugins/sociable/sociable.css?ver=2.9.2' type='text/css' media='' />
<link rel="EditURI" type="application/rsd+xml" title="RSD" href="http://www.naturalearthdata.com/xmlrpc.php?rsd" />
<link rel="wlwmanifest" type="application/wlwmanifest+xml" href="http://www.naturalearthdata.com/wp-includes/wlwmanifest.xml" />
<link rel='index' title='Natural Earth' href='http://www.naturalearthdata.com' />
<link rel='start' title='Welcome to the Natural Earth Blog' href='http://www.naturalearthdata.com/blog/miscellaneous/test/' />
<link rel='prev' title='Admin 0 &#8211; Details' href='http://www.naturalearthdata.com/downloads/50m-cultural-vectors/50m-admin-0-details/' />
<link rel='next' title='Lakes + Reservoirs' href='http://www.naturalearthdata.com/downloads/110m-physical-vectors/110mlakes-reservoirs/' />
<meta name="generator" content="WordPress 2.9.2" />
<!-- All in One SEO Pack 1.6.10.2 by Michael Torbert of Semper Fi Web Design[309,457] -->
<meta name="description" content="There are 247 countries in the world. Greenland as separate from Denmark. Most users will want this file instead of sovereign" />
<link rel="canonical" href="http://www.naturalearthdata.com/downloads/50m-cultural-vectors/50m-admin-0-countries-2/" />
<!-- /all in one seo pack -->
<!-- begin gallery scripts -->
<link rel="stylesheet" href="http://www.naturalearthdata.com/wp-content/plugins/featured-content-gallery/css/jd.gallery.css.php" type="text/css" media="screen" charset="utf-8"/>
<link rel="stylesheet" href="http://www.naturalearthdata.com/wp-content/plugins/featured-content-gallery/css/jd.gallery.css" type="text/css" media="screen" charset="utf-8"/>
<script type="text/javascript" src="http://www.naturalearthdata.com/wp-content/plugins/featured-content-gallery/scripts/mootools.v1.11.js"></script>
<script type="text/javascript" src="http://www.naturalearthdata.com/wp-content/plugins/featured-content-gallery/scripts/jd.gallery.js.php"></script>
<script type="text/javascript" src="http://www.naturalearthdata.com/wp-content/plugins/featured-content-gallery/scripts/jd.gallery.transitions.js"></script>
<!-- end gallery scripts -->
<style type="text/css">.broken_link, a.broken_link {
text-decoration: line-through;
}</style><link href="http://www.naturalearthdata.com/wp-content/themes/NEV/css/default.css" rel="stylesheet" type="text/css" />
<style type="text/css">.recentcomments a{display:inline !important;padding:0 !important;margin:0 !important;}</style>
<!--[if lte IE 7]>
<link rel="stylesheet" type="text/css" href="http://www.naturalearthdata.com/wp-content/themes/NEV/ie.css" />
<![endif]-->
<script src="http://www.naturalearthdata.com/wp-content/themes/NEV/js/jquery-1.2.6.min.js" type="text/javascript" charset="utf-8"></script>
<script>
jQuery.noConflict();
</script>
<script type="text/javascript" charset="utf-8">
$(function(){
var tabContainers = $('div#maintabdiv > div');
tabContainers.hide().filter('#comments').show();
$('div#maintabdiv ul#tabnav a').click(function () {
tabContainers.hide();
tabContainers.filter(this.hash).show();
$('div#maintabdiv ul#tabnav a').removeClass('current');
$(this).addClass('current');
return false;
}).filter('#comments').click();
});
</script>
<script type="text/javascript" language="javascript" src="http://www.naturalearthdata.com/dataTables/media/js/jquery.dataTables.js"></script>
<script type="text/javascript" charset="utf-8">
$(document).ready(function() {
$('#ne_table').dataTable();
} );
</script>
</head>
<body>
<div id="page">
<div id="header">
<div id="headerimg">
<h1><a href="http://www.naturalearthdata.com/"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/nev_logo.png" alt="Natural Earth title="Natural Earth" /></a></h1>
<div class="description">Free vector and raster map data at 1:10m, 1:50m, and 1:110m scales</div>
<div class="header_search"><form method="get" id="searchform" action="http://www.naturalearthdata.com/">
<label class="hidden" for="s">Search for:</label>
<div><input type="text" value="" name="s" id="s" />
<input type="submit" id="searchsubmit" value="Search" />
</div>
</form>
</div>
<!--<div class="translate_panel" style="align:top; margin-left:650px; top:50px;">
<div id="google_translate_element" style="float:left;"></div>
<script>
function googleTranslateElementInit() {
new google.translate.TranslateElement({
pageLanguage: 'en'
}, 'google_translate_element');
}
</script>
<script src="http://translate.google.com/translate_a/element.js?cb=googleTranslateElementInit"></script>
</div>-->
</div>
</div>
<div id="pagemenu" style="align:bottom;">
<ul id="page-list" class="clearfix"><li class="page_item page-item-4"><a href="http://www.naturalearthdata.com" title="Home">Home</a></li>
<li class="page_item page-item-10"><a href="http://www.naturalearthdata.com/features/" title="Features">Features</a></li>
<li class="page_item page-item-12"><a href="http://www.naturalearthdata.com/downloads/" title="Downloads">Downloads</a></li>
<li class="page_item page-item-6 current_page_parent"><a href="http://www.naturalearthdata.com/blog/" title="Blog">Blog</a></li>
<li class="page_item page-item-14"><a href="http://www.naturalearthdata.com/forums" title="Forums">Forums</a></li>
<li class="page_item page-item-366"><a href="http://www.naturalearthdata.com/corrections" title="Corrections">Corrections</a></li>
<li class="page_item page-item-16"><a href="http://www.naturalearthdata.com/about/" title="About">About</a></li>
</ul>
</div>
<hr /> <div id="main">
<div id="content" class="narrowcolumn">
&laquo; <a href="http://www.naturalearthdata.com/downloads/50m-cultural-vectors/">1:50m Cultural Vectors</a>&nbsp;
<div class="post" id="post-1541">
<h2>Admin 0 &#8211; Countries</h2>
<div class="entry">
<div class="downloadPromoBlock">
<div style="float: left; width: 170px;"><img class="alignleft size-thumbnail wp-image-92" title="home_image_3" src="http://www.naturalearthdata.com/wp-content/uploads/2009/09/countries_thumnail.png" alt="countries_thumb" width="150" height="97" /></div>
<div style="float: left; width: 410px;"><em>There are 247 countries in the world. Greenland as separate from Denmark. Most users will want this file instead of sovereign states.</em></p>
<div class="download-link-div">
<a class="download-link" rel="nofollow" title="Downloaded 7306 times (Shapefile, geoDB, or TIFF format)" onclick="if (window.urchinTracker) urchinTracker ('http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/50m/cultural/ne_50m_admin_0_countries.zip');" href="http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/50m/cultural/ne_50m_admin_0_countries.zip" onclick="javascript:pageTracker._trackPageview('/downloads/http///download/50m/cultural/ne_50m_admin_0_countries.zip');">Download countries</a> <span class="download-link-span">(798.39 KB) version 2.0.0</span>
</div>
<div class="download-link-div">
<a class="download-link" rel="nofollow" title="Downloaded 3 times (Shapefile, geoDB, or TIFF format)" onclick="if (window.urchinTracker) urchinTracker ('http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/50m/cultural/ne_50m_admin_0_countries_lakes.zip');" href="http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/50m/cultural/ne_50m_admin_0_countries_lakes.zip" onclick="javascript:pageTracker._trackPageview('/downloads/http///download/50m/cultural/ne_50m_admin_0_countries_lakes.zip');">Download without boundary lakes</a> <span class="download-link-span">(854.08 KB) version 2.0.0</span>
</div>
<p><span id="more-1541"></span></div>
</div>
<div class="downloadMainBlock">
<p><img class="alignnone size-full wp-image-1896" title="countries_banner" src="http://www.naturalearthdata.com/wp-content/uploads/2009/09/countries_banner1.png" alt="countries_banner" width="580" height="150" /></p>
<p><strong>About</strong></p>
<p>Countries distinguish between metropolitan (homeland) and independent and semi-independent portions of sovereign states. If you want to see the dependent overseas regions broken out (like in ISO codes, see France for example), use <a href="http://www.naturalearthdata.com/downloads/10m-political-vectors/10m-admin-0-nitty-gritty/" >map units</a> instead.</p>
<p>Each country is coded with a world region that roughly follows the <a href="http://unstats.un.org/unsd/methods/m49/m49regin.htm" onclick="javascript:pageTracker._trackPageview('/outbound/article/http://unstats.un.org/unsd/methods/m49/m49regin.htm');">United Nations setup</a>.</p>
<p>Includes some thematic data from the United Nations, U.S. Central Intelligence Agency, and elsewhere.</p>
<p><strong>Disclaimer</strong></p>
<p>Natural Earth Vector draws boundaries of countries according to defacto status. We show who actually controls the situation on the ground. Please feel free to mashup our disputed areas (link) theme to match your particular political outlook.</p>
<p><strong>Known Problems</strong></p>
<p>None.</p>
<p><strong>Version History</strong></p>
<ul>
<li>
<a rel="nofollow" title="Download version 2.0.0 of ne_50m_admin_0_countries.zip" href="http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/50m/cultural/ne_50m_admin_0_countries.zip" onclick="javascript:pageTracker._trackPageview('/downloads/http///download/50m/cultural/ne_50m_admin_0_countries.zip');">2.0.0</a>
</li>
<li>
1.4.0
</li>
<li>
1.3.0
</li>
<li>
1.1.0
</li>
<li>
1.0.0
</li>
</ul>
<p><a href="https://github.com/nvkelso/natural-earth-vector/blob/master/CHANGELOG" onclick="javascript:pageTracker._trackPageview('/outbound/article/https://github.com/nvkelso/natural-earth-vector/blob/master/CHANGELOG');">The master changelog is available on Github »</a>
</div>
<div class="sociable">
<div class="sociable_tagline">
<strong>Share and Enjoy:</strong>
</div>
<ul>
<li class="sociablefirst"><a rel="nofollow" target="_blank" href="http://twitter.com/home?status=Admin%200%20-%20Countries%20-%20http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F" onclick="javascript:pageTracker._trackPageview('/outbound/article/http://twitter.com/home?status=Admin%200%20-%20Countries%20-%20http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F');" title="Twitter"><img src="http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.gif" title="Twitter" alt="Twitter" style="width: 16px; height: 16px; background: transparent url(http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.png) no-repeat; background-position:-343px -55px" class="sociable-hovers" /></a></li>
<li><a rel="nofollow" target="_blank" href="http://www.facebook.com/share.php?u=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;t=Admin%200%20-%20Countries" onclick="javascript:pageTracker._trackPageview('/outbound/article/http://www.facebook.com/share.php?u=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;t=Admin%200%20-%20Countries');" title="Facebook"><img src="http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.gif" title="Facebook" alt="Facebook" style="width: 16px; height: 16px; background: transparent url(http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.png) no-repeat; background-position:-343px -1px" class="sociable-hovers" /></a></li>
<li><a rel="nofollow" target="_blank" href="http://digg.com/submit?phase=2&amp;url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries&amp;bodytext=%0D%0A%0D%0AThere%20are%20247%20countries%20in%20the%20world.%20Greenland%20as%20separate%20from%20Denmark.%20Most%20users%20will%20want%20this%20file%20instead%20of%20sovereign%20states.%0D%0A%0D%0A%5Bdrain%20file%20114%20show%20nev_download%5D%0D%0A%0D%0A%5Bdrain%20file%20350%20show%20nev_download%5D%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0AAbout%0D%0A%0D%0ACountries%20di" onclick="javascript:pageTracker._trackPageview('/outbound/article/http://digg.com/submit?phase=2&amp;url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries&amp;bodytext=%0D%0A%0D%0AThere%20are%20247%20countries%20in%20the%20world.%20Greenland%20as%20separate%20from%20Denmark.%20Most%20users%20will%20want%20this%20file%20instead%20of%20sovereign%20states.%0D%0A%0D%0A%5Bdrain%20file%20114%20show%20nev_download%5D%0D%0A%0D%0A%5Bdrain%20file%20350%20show%20nev_download%5D%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0AAbout%0D%0A%0D%0ACountries%20di');" title="Digg"><img src="http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.gif" title="Digg" alt="Digg" style="width: 16px; height: 16px; background: transparent url(http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.png) no-repeat; background-position:-235px -1px" class="sociable-hovers" /></a></li>
<li><a rel="nofollow" target="_blank" href="http://delicious.com/post?url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries&amp;notes=%0D%0A%0D%0AThere%20are%20247%20countries%20in%20the%20world.%20Greenland%20as%20separate%20from%20Denmark.%20Most%20users%20will%20want%20this%20file%20instead%20of%20sovereign%20states.%0D%0A%0D%0A%5Bdrain%20file%20114%20show%20nev_download%5D%0D%0A%0D%0A%5Bdrain%20file%20350%20show%20nev_download%5D%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0AAbout%0D%0A%0D%0ACountries%20di" onclick="javascript:pageTracker._trackPageview('/outbound/article/http://delicious.com/post?url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries&amp;notes=%0D%0A%0D%0AThere%20are%20247%20countries%20in%20the%20world.%20Greenland%20as%20separate%20from%20Denmark.%20Most%20users%20will%20want%20this%20file%20instead%20of%20sovereign%20states.%0D%0A%0D%0A%5Bdrain%20file%20114%20show%20nev_download%5D%0D%0A%0D%0A%5Bdrain%20file%20350%20show%20nev_download%5D%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0AAbout%0D%0A%0D%0ACountries%20di');" title="del.icio.us"><img src="http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.gif" title="del.icio.us" alt="del.icio.us" style="width: 16px; height: 16px; background: transparent url(http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.png) no-repeat; background-position:-199px -1px" class="sociable-hovers" /></a></li>
<li><a rel="nofollow" target="_blank" href="http://www.google.com/bookmarks/mark?op=edit&amp;bkmk=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries&amp;annotation=%0D%0A%0D%0AThere%20are%20247%20countries%20in%20the%20world.%20Greenland%20as%20separate%20from%20Denmark.%20Most%20users%20will%20want%20this%20file%20instead%20of%20sovereign%20states.%0D%0A%0D%0A%5Bdrain%20file%20114%20show%20nev_download%5D%0D%0A%0D%0A%5Bdrain%20file%20350%20show%20nev_download%5D%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0AAbout%0D%0A%0D%0ACountries%20di" onclick="javascript:pageTracker._trackPageview('/outbound/article/http://www.google.com/bookmarks/mark?op=edit&amp;bkmk=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries&amp;annotation=%0D%0A%0D%0AThere%20are%20247%20countries%20in%20the%20world.%20Greenland%20as%20separate%20from%20Denmark.%20Most%20users%20will%20want%20this%20file%20instead%20of%20sovereign%20states.%0D%0A%0D%0A%5Bdrain%20file%20114%20show%20nev_download%5D%0D%0A%0D%0A%5Bdrain%20file%20350%20show%20nev_download%5D%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0AAbout%0D%0A%0D%0ACountries%20di');" title="Google Bookmarks"><img src="http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.gif" title="Google Bookmarks" alt="Google Bookmarks" style="width: 16px; height: 16px; background: transparent url(http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.png) no-repeat; background-position:-91px -19px" class="sociable-hovers" /></a></li>
<li><a rel="nofollow" target="_blank" href="http://slashdot.org/bookmark.pl?title=Admin%200%20-%20Countries&amp;url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F" onclick="javascript:pageTracker._trackPageview('/outbound/article/http://slashdot.org/bookmark.pl?title=Admin%200%20-%20Countries&amp;url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F');" title="Slashdot"><img src="http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.gif" title="Slashdot" alt="Slashdot" style="width: 16px; height: 16px; background: transparent url(http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.png) no-repeat; background-position:-145px -55px" class="sociable-hovers" /></a></li>
<li><a rel="nofollow" target="_blank" href="http://www.stumbleupon.com/submit?url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries" onclick="javascript:pageTracker._trackPageview('/outbound/article/http://www.stumbleupon.com/submit?url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries');" title="StumbleUpon"><img src="http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.gif" title="StumbleUpon" alt="StumbleUpon" style="width: 16px; height: 16px; background: transparent url(http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.png) no-repeat; background-position:-217px -55px" class="sociable-hovers" /></a></li>
<li><a rel="nofollow" target="_blank" href="mailto:?subject=Admin%200%20-%20Countries&amp;body=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F" title="email"><img src="http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.gif" title="email" alt="email" style="width: 16px; height: 16px; background: transparent url(http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.png) no-repeat; background-position:-325px -1px" class="sociable-hovers" /></a></li>
<li><a rel="nofollow" target="_blank" href="http://www.linkedin.com/shareArticle?mini=true&amp;url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries&amp;source=Natural+Earth+Free+vector+and+raster+map+data+at+1%3A10m%2C+1%3A50m%2C+and+1%3A110m+scales&amp;summary=%0D%0A%0D%0AThere%20are%20247%20countries%20in%20the%20world.%20Greenland%20as%20separate%20from%20Denmark.%20Most%20users%20will%20want%20this%20file%20instead%20of%20sovereign%20states.%0D%0A%0D%0A%5Bdrain%20file%20114%20show%20nev_download%5D%0D%0A%0D%0A%5Bdrain%20file%20350%20show%20nev_download%5D%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0AAbout%0D%0A%0D%0ACountries%20di" onclick="javascript:pageTracker._trackPageview('/outbound/article/http://www.linkedin.com/shareArticle?mini=true&amp;url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries&amp;source=Natural+Earth+Free+vector+and+raster+map+data+at+1%3A10m%2C+1%3A50m%2C+and+1%3A110m+scales&amp;summary=%0D%0A%0D%0AThere%20are%20247%20countries%20in%20the%20world.%20Greenland%20as%20separate%20from%20Denmark.%20Most%20users%20will%20want%20this%20file%20instead%20of%20sovereign%20states.%0D%0A%0D%0A%5Bdrain%20file%20114%20show%20nev_download%5D%0D%0A%0D%0A%5Bdrain%20file%20350%20show%20nev_download%5D%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0A%0D%0AAbout%0D%0A%0D%0ACountries%20di');" title="LinkedIn"><img src="http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.gif" title="LinkedIn" alt="LinkedIn" style="width: 16px; height: 16px; background: transparent url(http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.png) no-repeat; background-position:-1px -37px" class="sociable-hovers" /></a></li>
<li class="sociablelast"><a rel="nofollow" target="_blank" href="http://reddit.com/submit?url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries" onclick="javascript:pageTracker._trackPageview('/outbound/article/http://reddit.com/submit?url=http%3A%2F%2Fwww.naturalearthdata.com%2Fdownloads%2F50m-cultural-vectors%2F50m-admin-0-countries-2%2F&amp;title=Admin%200%20-%20Countries');" title="Reddit"><img src="http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.gif" title="Reddit" alt="Reddit" style="width: 16px; height: 16px; background: transparent url(http://www.naturalearthdata.com/wp-content/plugins/sociable/images/services-sprite.png) no-repeat; background-position:-55px -55px" class="sociable-hovers" /></a></li>
</ul>
</div>
</div>
</div>
</div>
<div id="sidebar">
<ul><li id='text-5' class='widget widget_text'><h2 class="widgettitle">Stay up to Date</h2>
<div class="textwidget"> Know when a new version of Natural Earth is released by subscribing to our <a href="http://www.naturalearthdata.com/updates/" class="up-to-date-link" >announcement list</a>.</div>
</li></ul><ul><li id='text-2' class='widget widget_text'><h2 class="widgettitle">Find a Problem?</h2>
<div class="textwidget"><div>
<div style="float:left; width:65px;"><a href="/corrections/index.php?a=add"><img class="alignleft" title="New Ticket" src="http://www.naturalearthdata.com/corrections/img/newticket.png" alt="" width="60" height="60" /></a></div><div class="textwidget" style="float:left;width:120px; font-size:1.2em; font-size-adjust:none; font-style:normal;
font-variant:normal; font-weight:normal; line-height:normal;">Submit suggestions and bug reports via our <a href="/corrections/index.php?a=add">correction system</a> and track the progress of your edits.</div>
</div></div>
</li></ul><ul><li id='text-3' class='widget widget_text'><h2 class="widgettitle">Join Our Community</h2>
<div class="textwidget"><div>
<div style="float:left; width:65px;"><a href="/forums/"><img src="http://www.naturalearthdata.com/wp-content/uploads/2009/08/green_globe_chat_bubble_562e.png" alt="forums" title="Chat in the forum!" width="50" height="50" /></a></div><div class="textwidget" style="float:left;width:120px; font-size:1.2em; font-size-adjust:none; font-style:normal;
font-variant:normal; font-weight:normal; line-height:normal;">Talk back and discuss Natural Earth in the <a href="/forums/">Forums</a>.</div>
</div></div>
</li></ul><ul><li id='text-4' class='widget widget_text'><h2 class="widgettitle">Thank You</h2>
<div class="textwidget">Our data downloads are generously hosted by Florida State University.</div>
</li></ul> </div>
</div>
<hr />
<div id="footer">
<div id="footerarea">
<div id="footerlogos">
<p>Supported by:</p>
<div class="footer-ad-box">
<a href="http://www.nacis.org" target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/nacis.png" alt="NACIS" /></a>
</div>
<div class="footer-ad-box">
<a href="http://www.cartotalk.com" target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/cartotalk_ad.png" alt="Cartotalk" /></a>
</div>
<div class="footer-ad-box">
<a href="http://www.mapgiving.org" target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/mapgiving.png" alt="Mapgiving" /></a>
</div>
<div class="footer-ad-box">
<a href="http://www.geography.wisc.edu/cartography/" target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/wisconsin.png" alt="University of Wisconsin Madison - Cartography Dept." /></a>
</div>
<div class="footer-ad-box">
<a href="http://www.shadedrelief.com" target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/shaded_relief.png" alt="Shaded Relief" /></a>
</div>
<div class="footer-ad-box">
<a href="http://www.xnrproductions.com " target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/xnr.png" alt="XNR Productions" /></a>
</div>
<p style="clear:both;"></p>
<div class="footer-ad-box">
<a href="http://www.freac.fsu.edu" target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/fsu.png" alt="Florida State University - FREAC" /></a>
</div>
<div class="footer-ad-box">
<a href="http://www.springercartographics.com" target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/scllc.png" alt="Springer Cartographics LLC" /></a>
</div>
<div class="footer-ad-box">
<a href="http://www.washingtonpost.com" target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/wpost.png" alt="Washington Post" /></a>
</div>
<div class="footer-ad-box">
<a href="http://www.redgeographics.com" target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/redgeo.png" alt="Red Geographics" /></a>
</div>
<div class="footer-ad-box">
<a href="http://kelsocartography.com/blog " target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/kelso.png" alt="Kelso Cartography" /></a>
</div>
<p style="clear:both;"></p>
<div class="footer-ad-box">
<a href="http://www.avenza.com" target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/avenza.png" alt="Avenza Systems Inc." /></a>
</div>
<div class="footer-ad-box">
<a href="http://www.stamen.com" target="_blank"><img src="http://www.naturalearthdata.com/wp-content/themes/NEV/images/stamen_ne_logo.png" alt="Stamen Design" /></a>
</div>
</div>
<p style="clear:both;"></p>
<span id="footerleft">
&copy; 2012. Natural Earth. All rights reserved.
</span>
<span id="footerright">
<!-- Please help promote WordPress and simpleX. Do not remove -->
<div>Powered by <a href="http://wordpress.org/">WordPress</a></div>
<div><a href="http://www.naturalearthdata.com/wp-admin">Staff Login &raquo;</a></div>
</span>
</div>
</div>
<!-- Google Analytics for WordPress | http://yoast.com/wordpress/google-analytics/ -->
<script type="text/javascript">
var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
document.write(unescape("%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%3E%3C/script%3E"));
</script>
<script type="text/javascript">
try {
var pageTracker = _gat._getTracker("UA-10168306-1");
} catch(err) {}
</script>
<script src="http://www.naturalearthdata.com/wp-content/plugins/google-analytics-for-wordpress/custom_se.js" type="text/javascript"></script>
<script type="text/javascript">
try {
// Cookied already:
pageTracker._trackPageview();
} catch(err) {}
</script>
<!-- End of Google Analytics code -->
</body>
</html>

View File

@@ -1 +0,0 @@
GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]

View File

@@ -1 +0,0 @@
GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]

File diff suppressed because one or more lines are too long

View File

@@ -1,166 +0,0 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco, KAPPS-
#
# License: CC0 1.0 Universal
import os
import re
import errno
import shutil
from tempfile import mkdtemp
import ogr2ogr
import settings
import ogrinfo
from general_helper import (Capturing, check_create_folder, three_digit,
get_file, create_paired_list)
class Clipper(object):
def __init__(self):
self.assests_dir = settings.ASSESTS_DIR
self.shapefile_output = mkdtemp()
def shapefile(self, file):
"""
Clip the shapefiles and provide rows and paths
Attributes:
file - a string containing the full path to the shapefile
"""
try:
if self.__srs_adjustment(file, 'a'):
if self.__srs_adjustment(file, 't'):
if self.__clip_shapefile(file):
rps = self.__generate_path_row('landsat-tiles.shp',
'landsat-tiles')
self._cleanup()
return rps
return False
except ogr2ogr.OgrError:
return False
def country(self, name):
"""
Provide the rows and paths of the country name provided:
Attributes:
name - string of the country name or country alpha-3 code. For
the full list consult: http://goo.gl/8H9wuq
Return:
paired tupiles of paths and rows. e.g. [('145', u'057'),
('145', u'058')]
"""
try:
self.__extract_country(name)
self.__clip_shapefile('country.shp')
rps = self.__generate_path_row('landsat-tiles.shp', 'landsat-tiles')
self._cleanup()
return rps
except ogr2ogr.OgrError:
return False
def _cleanup(self):
""" Remove temp folder """
try:
shutil.rmtree(self.shapefile_output)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
def __srs_adjustment(self, file, load='a', type='EPSG:4326'):
""" Run SRS adjustments
Attributes:
file - full path to the shapefile
load - load key, consult ogr2ogr documentation
type - type key, consult ogr2ogr documentation
"""
print "Executing SRS adjustments"
output = '%s/%s' % (self.shapefile_output, get_file(file))
argv = ['', '-%s_srs' % load, type, os.path.dirname(output), file]
if os.path.isfile(output):
input = output
argv.insert(1, '-overwrite')
ogr2ogr.main(argv)
return True
def __extract_country(self, name):
""" Create a new country shapefile with rows and paths
Attributes:
name - name of the country shapefile name e.g. country.shp
"""
print "Extracting the country: %s" % name
input = '%s/ne_50m_admin_0_countries/ne_50m_admin_0_countries.shp' % \
self.assests_dir
output = '%s/country.shp' % self.shapefile_output
argv = ['', '-where', 'admin like "%s" or adm0_a3 like "%s"' %
(name, name), output, input]
if os.path.isfile(output):
argv.insert(1, '-overwrite')
ogr2ogr.main(argv)
return True
def __clip_shapefile(self, file):
""" Create a new shapefile with rows and paths added to it """
print "Clipping the shapefile: %s" % get_file(file)
clipper = '%s/%s' % (self.shapefile_output, get_file(file))
output = '%s/landsat-tiles.shp' % self.shapefile_output
input = '%s/wrs2_descending/wrs2_descending.shp' % self.assests_dir
argv = ['', '-clipsrc', clipper, output, input]
if os.path.isfile(output):
argv.insert(1, '-overwrite')
ogr2ogr.main(argv)
return True
def __generate_path_row(self, source, layer=''):
""" Filter rows and paths based on the clipped shapefile """
print "Generating paths and rows"
source = self.shapefile_output + '/' + source
with Capturing() as output:
ogrinfo.main(
['',
'-sql',
'SELECT PATH, ROW FROM "%s"' % layer, source, layer
])
# Convert the above output into a list with rows and paths
rp = [re.sub(r'([A-Z]|[a-z]|\s|\(|\)|\'|\"|=|,|:|\.0|\.)', '', a)
for a in str(output).split(',') if ('ROW' in a or 'PATH' in a)
and '(3.0)' not in a]
for k, v in enumerate(rp):
rp[k] = three_digit(v)
s = open('%s/rows_paths.txt' % (self.shapefile_output), 'w')
s.write(','.join(rp))
s.close()
print 'The paths and rows are: "%s"' % ','.join(rp)
return create_paired_list(rp)

13
landsat/decorators.py Normal file
View File

@@ -0,0 +1,13 @@
import warnings
import rasterio
def rasterio_decorator(func):
def wrapped_f(*args, **kwargs):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
with rasterio.drivers():
return func(*args, **kwargs)
return wrapped_f

314
landsat/downloader.py Normal file
View File

@@ -0,0 +1,314 @@
# Landsat Util
# License: CC0 1.0 Universal
from __future__ import print_function, division, absolute_import
from xml.etree import ElementTree
from os.path import join, exists, getsize
import requests
from usgs import api, USGSError
from homura import download as fetch
from .utils import check_create_folder, url_builder
from .mixins import VerbosityMixin
from . import settings
class RemoteFileDoesntExist(Exception):
""" Exception to be used when the remote file does not exist """
pass
class IncorrectSceneId(Exception):
""" Exception to be used when scene id is incorrect """
pass
class USGSInventoryAccessMissing(Exception):
""" Exception for when User does not have Inventory Service access """
pass
class Downloader(VerbosityMixin):
""" The downloader class """
def __init__(self, verbose=False, download_dir=None, usgs_user=None, usgs_pass=None):
self.download_dir = download_dir if download_dir else settings.DOWNLOAD_DIR
self.google = settings.GOOGLE_STORAGE
self.s3 = settings.S3_LANDSAT
self.usgs_user = usgs_user
self.usgs_pass = usgs_pass
# Make sure download directory exist
check_create_folder(self.download_dir)
def download(self, scenes, bands=None):
"""
Download scenese from Google Storage or Amazon S3 if bands are provided
:param scenes:
A list of scene IDs
:type scenes:
List
:param bands:
A list of bands. Default value is None.
:type scenes:
List
:returns:
(List) includes downloaded scenes as key and source as value (aws or google)
"""
if isinstance(scenes, list):
files = []
for scene in scenes:
# for all scenes if bands provided, first check AWS, if the bands exist
# download them, otherwise use Google and then USGS.
try:
# if bands are not provided, directly go to Goodle and then USGS
if not isinstance(bands, list):
raise RemoteFileDoesntExist
files.append(self.amazon_s3(scene, bands))
except RemoteFileDoesntExist:
try:
files.append(self.google_storage(scene, self.download_dir))
except RemoteFileDoesntExist:
files.append(self.usgs_eros(scene, self.download_dir))
return files
else:
raise Exception('Expected sceneIDs list')
def usgs_eros(self, scene, path):
""" Downloads the image from USGS """
# download from usgs if login information is provided
if self.usgs_user and self.usgs_pass:
try:
api_key = api.login(self.usgs_user, self.usgs_pass)
except USGSError as e:
error_tree = ElementTree.fromstring(str(e.message))
error_text = error_tree.find("SOAP-ENV:Body/SOAP-ENV:Fault/faultstring", api.NAMESPACES).text
raise USGSInventoryAccessMissing(error_text)
download_url = api.download('LANDSAT_8', 'EE', [scene], api_key=api_key)
if download_url:
self.output('Source: USGS EarthExplorer', normal=True, arrow=True)
return self.fetch(download_url[0], path)
raise RemoteFileDoesntExist('%s is not available on AWS S3, Google or USGS Earth Explorer' % scene)
raise RemoteFileDoesntExist('%s is not available on AWS S3 or Google Storage' % scene)
def google_storage(self, scene, path):
"""
Google Storage Downloader.
:param scene:
The scene id
:type scene:
String
:param path:
The directory path to where the image should be stored
:type path:
String
:returns:
Boolean
"""
sat = self.scene_interpreter(scene)
url = self.google_storage_url(sat)
self.remote_file_exists(url)
self.output('Source: Google Storage', normal=True, arrow=True)
return self.fetch(url, path)
def amazon_s3(self, scene, bands):
"""
Amazon S3 downloader
"""
sat = self.scene_interpreter(scene)
# Always grab MTL.txt and QA band if bands are specified
if 'BQA' not in bands:
bands.append('QA')
if 'MTL' not in bands:
bands.append('MTL')
urls = []
for band in bands:
# get url for the band
url = self.amazon_s3_url(sat, band)
# make sure it exist
self.remote_file_exists(url)
urls.append(url)
# create folder
path = check_create_folder(join(self.download_dir, scene))
self.output('Source: AWS S3', normal=True, arrow=True)
for url in urls:
self.fetch(url, path)
return path
def fetch(self, url, path):
""" Downloads the given url.
:param url:
The url to be downloaded.
:type url:
String
:param path:
The directory path to where the image should be stored
:type path:
String
:param filename:
The filename that has to be downloaded
:type filename:
String
:returns:
Boolean
"""
segments = url.split('/')
filename = segments[-1]
# remove query parameters from the filename
filename = filename.split('?')[0]
self.output('Downloading: %s' % filename, normal=True, arrow=True)
# print(join(path, filename))
# raise Exception
if exists(join(path, filename)):
size = getsize(join(path, filename))
if size == self.get_remote_file_size(url):
self.output('%s already exists on your system' % filename, normal=True, color='green', indent=1)
else:
fetch(url, path)
self.output('stored at %s' % path, normal=True, color='green', indent=1)
return join(path, filename)
def google_storage_url(self, sat):
"""
Returns a google storage url the contains the scene provided.
:param sat:
Expects an object created by scene_interpreter method
:type sat:
dict
:returns:
(String) The URL to a google storage file
"""
filename = sat['scene'] + '.tar.bz'
return url_builder([self.google, sat['sat'], sat['path'], sat['row'], filename])
def amazon_s3_url(self, sat, band):
"""
Return an amazon s3 url the contains the scene and band provided.
:param sat:
Expects an object created by scene_interpreter method
:type sat:
dict
:param filename:
The filename that has to be downloaded from Amazon
:type filename:
String
:returns:
(String) The URL to a S3 file
"""
if band != 'MTL':
filename = '%s_B%s.TIF' % (sat['scene'], band)
else:
filename = '%s_%s.txt' % (sat['scene'], band)
return url_builder([self.s3, sat['sat'], sat['path'], sat['row'], sat['scene'], filename])
def remote_file_exists(self, url):
""" Checks whether the remote file exists.
:param url:
The url that has to be checked.
:type url:
String
:returns:
**True** if remote file exists and **False** if it doesn't exist.
"""
status = requests.head(url).status_code
if status != 200:
raise RemoteFileDoesntExist
def get_remote_file_size(self, url):
""" Gets the filesize of a remote file.
:param url:
The url that has to be checked.
:type url:
String
:returns:
int
"""
headers = requests.head(url).headers
return int(headers['content-length'])
def scene_interpreter(self, scene):
""" Conver sceneID to rows, paths and dates.
:param scene:
The scene ID.
:type scene:
String
:returns:
dict
:Example output:
>>> anatomy = {
'path': None,
'row': None,
'sat': None,
'scene': scene
}
"""
anatomy = {
'path': None,
'row': None,
'sat': None,
'scene': scene
}
if isinstance(scene, str) and len(scene) == 21:
anatomy['path'] = scene[3:6]
anatomy['row'] = scene[6:9]
anatomy['sat'] = 'L' + scene[2:3]
return anatomy
else:
raise IncorrectSceneId('Received incorrect scene')
if __name__ == '__main__':
d = Downloader()
# d.download(['LC81990242015046LGN00', 'LC80030172015001LGN00'])

View File

@@ -1,131 +0,0 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco, KAPPS-
#
# License: CC0 1.0 Universal
import os
import sys
from cStringIO import StringIO
from datetime import datetime
class Capturing(list):
def __enter__(self):
self._stdout = sys.stdout
sys.stdout = self._stringio = StringIO()
return self
def __exit__(self, *args):
self.extend(self._stringio.getvalue().splitlines())
sys.stdout = self._stdout
def exit(message):
print message
sys.exit()
def create_paired_list(i):
""" Create a list of paired items from a string
Arguments:
i - the format must be 003,003,004,004 (commas with no space)
Returns:
[['003','003'], ['004', '004']]
"""
if isinstance(i, str):
array = i.split(',')
elif isinstance(i, list):
# Make sure it is not already paired
if isinstance(i[0], list) or isinstance(i[0], tuple):
return i
else:
array = i
else:
return i
# Make sure the elements in the list are even and pairable
if len(array) % 2 == 0:
new_array = [list(array[i:i + 2])
for i in range(0, len(array), 2)]
return new_array
else:
raise ValueError('The string should include pairs and be formated. '
'The format must be 003,003,004,004 (commas with '
'no space)')
def check_create_folder(folder_path):
""" Check whether a folder exists, if not the folder is created
Always return folder_path
"""
if not os.path.exists(folder_path):
os.makedirs(folder_path)
print "%s folder created" % folder_path
return folder_path
def get_file(path):
""" Separate the name of the file or folder from the path and return it
Example: /path/to/file ---> file
"""
return os.path.basename(path)
def get_filename(path):
""" Return the filename without extension. e.g. index.html --> index """
return os.path.splitext(get_file(path))[0]
def three_digit(number):
""" Add 0s to inputs that their length is less than 3.
For example: 1 --> 001 | 02 --> 020 | st --> 0st
"""
number = str(number)
if len(number) == 1:
return u'00%s' % number
elif len(number) == 2:
return u'0%s' % number
else:
return number
def georgian_day(date):
""" Returns the number of days passed since the start of the year
Accepted format: %m/%d/%Y
"""
try:
fmt = '%m/%d/%Y'
return datetime.strptime(date, fmt).timetuple().tm_yday
except (ValueError, TypeError):
return 0
def year(date):
""" Returns the year
Accepted format: %m/%d/%Y
"""
try:
fmt = '%m/%d/%Y'
return datetime.strptime(date, fmt).timetuple().tm_year
except ValueError:
return 0
def reformat_date(date, new_fmt='%Y-%m-%d'):
""" Return reformated date. Example: 01/28/2014 & %d/%m/%Y -> 28/01/2014
Accepted date format: %m/%d/%Y
"""
try:
if type(date) is datetime:
return date.strftime(new_fmt)
else:
fmt = '%m/%d/%Y'
return datetime.strptime(date, fmt).strftime(new_fmt)
except ValueError:
return date

View File

@@ -1,236 +0,0 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco, KAPPS-
#
# License: CC0 1.0 Universal
import os
import subprocess
from zipfile import ZipFile
import tarfile
from dateutil.parser import parse
from general_helper import check_create_folder, create_paired_list, exit
import settings
class GsHelper(object):
def __init__(self):
self.scene_file_url = settings.SCENE_FILE_URL
self.download_dir = settings.DOWNLOAD_DIR
self.zip_dir = settings.ZIP_DIR
self.unzip_dir = settings.UNZIP_DIR
self.scene_file = settings.SCENE_FILE
self.source_url = settings.SOURCE_URL
# Make sure download directory exist
check_create_folder(self.download_dir)
def search(self, rows_paths, start=None, end=None):
""" Search in Landsat's scene_list file. The file is stored as .zip on
Google Storage and includes the gs url of all available images.
Example of file information on scene_list.zip:
gs://earthengine-public/landsat/L8/232/096/LT82320962013127LGN01.tar.bz
Arguments:
rows_paths - a string of paired values. e.g. '003,003,001,001'
start - a string containing the start date. e.g. 12/23/2014
end - a string containing the end date. e.g. 12/24/2014
Return:
a list containing download urls. e.g.:
['gs://earthengine-public/landsat/L8/232/094/LT82320942013127LGN01.tar.bz',
'gs://earthengine-public/landsat/L8/232/093/LT82320932013127LGN01.tar.bz']
"""
# Turning rows and paths to paired tuples
try:
paired = create_paired_list(rows_paths)
except ValueError, e:
exit('Error: %s' % e.args[0])
files = []
self._fetch_gs_scence_list()
file = open(self.scene_file, 'r')
files.extend(self._search_scene_list(scene=file,
query=paired,
start=parse(start),
end=parse(end)))
return files
def single_download(self, row, path, name, sat_type='L8'):
url = '%s/%s/%s/%s/%s.tar.bz' % (self.source_url,
sat_type,
path,
row,
name)
""" Download single image from Landsat on Google Storage
Arguments:
row - string in this format xxx, e.g. 003
path - string in this format xxx, e.g. 003
name - zip file name without .tar.bz e.g. LT81360082013127LGN01
sat_type - e.g. L7, L8, ...
"""
try:
subprocess.check_call(
["gsutil", "cp", "-n", url, "%s/%s" % (self.zip_dir,
'%s.tar.bz' % name)])
return True
except subprocess.CalledProcessError:
return False
def batch_download(self, image_list):
"""
Download batch group of images
Arguments:
image_list - a list of google storage urls e.g.
['gs://earthengine-public/landsat/L8/136/008/'
'LT81360082013127LGN01.tar.bz',
'gs://earthengine-public/landsat/L8/136/008/'
'LT81360082013127LGN01.tar.bz']
"""
try:
self._download_images(image_list)
return True
except subprocess.CalledProcessError:
return False
def extract_row_path(self, scene_name):
return [scene_name[3:6], scene_name[6:9]]
def unzip(self):
"""
Unzip all files stored at settings.ZIP_DIR and save them in
settings.UNZIP_DIR
"""
return self._unzip_images()
#################
# Private Methods
#################
def _fetch_gs_scence_list(self):
if not os.path.isfile(self.scene_file):
# Download the file
subprocess.call(
["gsutil", "cp", "-n",
self.scene_file_url, "%s.zip" % self.scene_file])
# Unzip the file
zip = ZipFile('%s.zip' % self.scene_file, 'r')
zip.extractall(path=self.download_dir)
zip.close()
print "scene_file unziped"
# return open(self.scene_file, 'r')
def _search_scene_list(self, scene, query, start=None, end=None):
"""
Search scene_list for the provided rows, paths and date range.
Arguments:
query - a list of paired tuples e.g.[('003', '003'),('003', '004')]
start - a datetime object
end - a datetime object
"""
file_list = []
found = 0
# Query date range
start_year = start.timetuple().tm_year
end_year = end.timetuple().tm_year
start_jd = start.timetuple().tm_yday
end_jd = end.timetuple().tm_yday
if start and end:
print ('Searching for images from %s to %s'
% (start.strftime('%b %d, %Y'),
end.strftime('%b %d, %Y')))
print 'Rows and Paths searched: '
print query
scene.seek(0)
for line in scene:
url = line.split('/')
file_name = url[len(url) - 1]
f_query = (file_name[3:6], file_name[6:9])
jd = int(file_name[13:16].lstrip('0')) # Julian Day
year = int(file_name[9:13])
if f_query in query:
if start and end:
if year == start_year and year == end_year:
if jd >= start_jd and jd <= end_jd:
file_list.append(line.replace('\n', ''))
found += 1
elif year == start_year:
if jd >= start_jd:
file_list.append(line.replace('\n', ''))
found += 1
elif year == end_year:
if jd <= end_jd:
file_list.append(line.replace('\n', ''))
found += 1
elif (year > start_year and year < end_year):
file_list.append(line.replace('\n', ''))
found += 1
else:
file_list.append(line.replace('\n', ''))
found += 1
print "Search completed! %s images found." % found
return file_list
def _download_images(self, files):
check_create_folder(self.zip_dir)
print "Downloading %s files from Google Storage..." % len(files)
for url in files:
url_brk = url.split('/')
image_name = url_brk[len(url_brk) - 1]
subprocess.check_call(
["gsutil", "cp", "-n", url,
"%s/%s" % (self.zip_dir, image_name)])
def _unzip_images(self):
images = os.listdir(self.zip_dir)
check_create_folder(self.unzip_dir)
for image in images:
# Get the image name for creating folder
image_name = image.split('.')
if image_name[0] and self._check_if_not_unzipped(image_name[0]):
# Create folder
check_create_folder('%s/%s' % (self.unzip_dir, image_name[0]))
print "Unzipping %s ...be patient!" % image
# Unzip
tar = tarfile.open('%s/%s' % (self.zip_dir, image))
tar.extractall(path='%s/%s' % (self.unzip_dir, image_name[0]))
tar.close()
return True
return False
def _check_if_not_unzipped(self, folder_name):
if os.path.exists('%s/%s' % (self.unzip_dir, folder_name)):
print "%s is already unzipped" % folder_name
return False
else:
return True

535
landsat/image.py Normal file
View File

@@ -0,0 +1,535 @@
# Pansharpened Image Process using Rasterio
# Landsat Util
# License: CC0 1.0 Universal
from __future__ import print_function, division, absolute_import
import os
import tarfile
import glob
from copy import copy
import subprocess
from shutil import copyfile
from os.path import join, isdir
import numpy
import rasterio
from rasterio.coords import disjoint_bounds
from rasterio.warp import reproject, RESAMPLING, transform, transform_bounds
from skimage import transform as sktransform
from skimage.util import img_as_ubyte
from skimage.exposure import rescale_intensity
from polyline.codec import PolylineCodec
from .mixins import VerbosityMixin
from .utils import get_file, check_create_folder, exit, adjust_bounding_box
from .decorators import rasterio_decorator
class FileDoesNotExist(Exception):
""" Exception to be used when the file does not exist. """
pass
class BoundsDoNotOverlap(Exception):
""" Exception for when bounds do not overlap with the image """
pass
class BaseProcess(VerbosityMixin):
"""
Image procssing class
To initiate the following parameters must be passed:
:param path:
Path of the image.
:type path:
String
:param bands:
The band sequence for the final image. Must be a python list. (optional)
:type bands:
List
:param dst_path:
Path to the folder where the image should be stored. (optional)
:type dst_path:
String
:param verbose:
Whether the output should be verbose. Default is False.
:type verbose:
boolean
:param force_unzip:
Whether to force unzip the tar file. Default is False
:type force_unzip:
boolean
"""
def __init__(self, path, bands=None, dst_path=None, verbose=False, force_unzip=False, bounds=None):
self.projection = {'init': 'epsg:3857'}
self.dst_crs = {'init': u'epsg:3857'}
self.scene = get_file(path).split('.')[0]
self.bands = bands if isinstance(bands, list) else [4, 3, 2]
self.clipped = False
# Landsat source path
self.src_path = path.replace(get_file(path), '')
# Build destination folder if doesn't exist
self.dst_path = dst_path if dst_path else os.getcwd()
self.dst_path = check_create_folder(join(self.dst_path, self.scene))
self.verbose = verbose
# Path to the unzipped folder
self.scene_path = join(self.src_path, self.scene)
# Unzip files
if self._check_if_zipped(path):
self._unzip(join(self.src_path, get_file(path)), join(self.src_path, self.scene), self.scene, force_unzip)
if (bounds):
self.bounds = bounds
self.scene_path = self.clip()
self.clipped = True
self.bands_path = []
for band in self.bands:
self.bands_path.append(join(self.scene_path, self._get_full_filename(band)))
def _get_boundaries(self, src, shape):
self.output("Getting boundaries", normal=True, arrow=True)
output = {'ul': {'x': [0, 0], 'y': [0, 0]}, # ul: upper left
'ur': {'x': [0, 0], 'y': [0, 0]}, # ur: upper right
'll': {'x': [0, 0], 'y': [0, 0]}, # ll: lower left
'lr': {'x': [0, 0], 'y': [0, 0]}} # lr: lower right
output['ul']['x'][0] = src['affine'][2]
output['ul']['y'][0] = src['affine'][5]
output['ur']['x'][0] = output['ul']['x'][0] + self.pixel * src['shape'][1]
output['ur']['y'][0] = output['ul']['y'][0]
output['ll']['x'][0] = output['ul']['x'][0]
output['ll']['y'][0] = output['ul']['y'][0] - self.pixel * src['shape'][0]
output['lr']['x'][0] = output['ul']['x'][0] + self.pixel * src['shape'][1]
output['lr']['y'][0] = output['ul']['y'][0] - self.pixel * src['shape'][0]
output['ul']['x'][1], output['ul']['y'][1] = transform(src['crs'], self.projection,
[output['ul']['x'][0]],
[output['ul']['y'][0]])
output['ur']['x'][1], output['ur']['y'][1] = transform(src['crs'], self.projection,
[output['ur']['x'][0]],
[output['ur']['y'][0]])
output['ll']['x'][1], output['ll']['y'][1] = transform(src['crs'], self.projection,
[output['ll']['x'][0]],
[output['ll']['y'][0]])
output['lr']['x'][1], output['lr']['y'][1] = transform(src['crs'], self.projection,
[output['lr']['x'][0]],
[output['lr']['y'][0]])
dst_corner_ys = [output[k]['y'][1][0] for k in output.keys()]
dst_corner_xs = [output[k]['x'][1][0] for k in output.keys()]
y_pixel = abs(max(dst_corner_ys) - min(dst_corner_ys)) / shape[0]
x_pixel = abs(max(dst_corner_xs) - min(dst_corner_xs)) / shape[1]
return (min(dst_corner_xs), x_pixel, 0.0, max(dst_corner_ys), 0.0, -y_pixel)
def _read_bands(self):
""" Reads a band with rasterio """
bands = []
try:
for i, band in enumerate(self.bands):
bands.append(rasterio.open(self.bands_path[i]).read_band(1))
except IOError as e:
exit(e.message, 1)
return bands
def _warp(self, proj_data, bands, new_bands):
self.output("Projecting", normal=True, arrow=True)
for i, band in enumerate(bands):
self.output("band %s" % self.bands[i], normal=True, color='green', indent=1)
reproject(band, new_bands[i], src_transform=proj_data['transform'], src_crs=proj_data['crs'],
dst_transform=proj_data['dst_transform'], dst_crs=self.dst_crs, resampling=RESAMPLING.nearest,
num_threads=2)
def _unzip(self, src, dst, scene, force_unzip=False):
""" Unzip tar files """
self.output("Unzipping %s - It might take some time" % scene, normal=True, arrow=True)
try:
# check if file is already unzipped, skip
if isdir(dst) and not force_unzip:
self.output('%s is already unzipped.' % scene, normal=True, color='green', indent=1)
return
else:
tar = tarfile.open(src, 'r')
tar.extractall(path=dst)
tar.close()
except tarfile.ReadError:
check_create_folder(dst)
subprocess.check_call(['tar', '-xf', src, '-C', dst])
def _get_full_filename(self, band):
base_file = '%s_B%s.*' % (self.scene, band)
try:
return glob.glob(join(self.scene_path, base_file))[0].split('/')[-1]
except IndexError:
raise FileDoesNotExist('%s does not exist' % '%s_B%s.*' % (self.scene, band))
def _check_if_zipped(self, path):
""" Checks if the filename shows a tar/zip file """
filename = get_file(path).split('.')
if filename[-1] in ['bz', 'bz2', 'gz']:
return True
return False
def _read_metadata(self):
output = {}
try:
with open(self.scene_path + '/' + self.scene + '_MTL.txt', 'rU') as mtl:
lines = mtl.readlines()
for line in lines:
if 'REFLECTANCE_ADD_BAND_3' in line:
output['REFLECTANCE_ADD_BAND_3'] = float(line.replace('REFLECTANCE_ADD_BAND_3 = ', ''))
if 'REFLECTANCE_MULT_BAND_3' in line:
output['REFLECTANCE_MULT_BAND_3'] = float(line.replace('REFLECTANCE_MULT_BAND_3 = ', ''))
if 'REFLECTANCE_ADD_BAND_4' in line:
output['REFLECTANCE_ADD_BAND_4'] = float(line.replace('REFLECTANCE_ADD_BAND_4 = ', ''))
if 'REFLECTANCE_MULT_BAND_4' in line:
output['REFLECTANCE_MULT_BAND_4'] = float(line.replace('REFLECTANCE_MULT_BAND_4 = ', ''))
if 'CLOUD_COVER' in line:
output['CLOUD_COVER'] = float(line.replace('CLOUD_COVER = ', ''))
return output
except IOError:
return output
def _get_image_data(self):
src = rasterio.open(self.bands_path[-1])
# Get pixel size from source
self.pixel = src.affine[0]
# Only collect src data that is needed and delete the rest
image_data = {
'transform': src.transform,
'crs': src.crs,
'affine': src.affine,
'shape': src.shape,
'dst_transform': None
}
image_data['dst_transform'] = self._get_boundaries(image_data, image_data['shape'])
return image_data
def _generate_new_bands(self, shape):
new_bands = []
for i in range(0, 3):
new_bands.append(numpy.empty(shape, dtype=numpy.uint16))
return new_bands
@rasterio_decorator
def _write_to_file(self, new_bands, **kwargs):
# Read coverage from QBA
coverage = self._calculate_cloud_ice_perc()
self.output("Final Steps", normal=True, arrow=True)
suffix = 'bands_%s' % "".join(map(str, self.bands))
output_file = join(self.dst_path, self._filename(suffix=suffix))
output = rasterio.open(output_file, 'w', **kwargs)
for i, band in enumerate(new_bands):
# Color Correction
band = self._color_correction(band, self.bands[i], 0, coverage)
output.write_band(i + 1, img_as_ubyte(band))
new_bands[i] = None
self.output("Writing to file", normal=True, color='green', indent=1)
return output_file
def _color_correction(self, band, band_id, low, coverage):
if self.bands == [4, 5]:
return band
else:
self.output("Color correcting band %s" % band_id, normal=True, color='green', indent=1)
p_low, cloud_cut_low = self._percent_cut(band, low, 100 - (coverage * 3 / 4))
temp = numpy.zeros(numpy.shape(band), dtype=numpy.uint16)
cloud_divide = 65000 - coverage * 100
mask = numpy.logical_and(band < cloud_cut_low, band > 0)
temp[mask] = rescale_intensity(band[mask], in_range=(p_low, cloud_cut_low), out_range=(256, cloud_divide))
temp[band >= cloud_cut_low] = rescale_intensity(band[band >= cloud_cut_low],
out_range=(cloud_divide, 65535))
return temp
def _percent_cut(self, color, low, high):
return numpy.percentile(color[numpy.logical_and(color > 0, color < 65535)], (low, high))
def _calculate_cloud_ice_perc(self):
self.output('Calculating cloud and snow coverage from QA band', normal=True, arrow=True)
a = rasterio.open(join(self.scene_path, self._get_full_filename('QA'))).read_band(1)
count = 0
snow = [56320, 39936, 31744, 28590, 26656, 23552]
cloud = [61440, 59424, 57344, 53248, 28672, 36896, 36864, 24576]
for item in cloud:
count += numpy.extract(a == item, a).size
for item in snow:
count += numpy.extract(a == item, a).size * 2
perc = numpy.true_divide(count, a.size) * 100
self.output('cloud/snow coverage: %s' % round(perc, 2), indent=1, normal=True, color='green')
return perc
def _filename(self, name=None, suffix=None, prefix=None):
""" File name generator for processed images """
filename = ''
if prefix:
filename += str(prefix) + '_'
if name:
filename += str(name)
else:
filename += str(self.scene)
if suffix:
filename += '_' + str(suffix)
if self.clipped:
bounds = [tuple(self.bounds[0:2]), tuple(self.bounds[2:4])]
polyline = PolylineCodec().encode(bounds)
filename += '_clipped_' + polyline
filename += '.TIF'
return filename
@rasterio_decorator
def clip(self):
""" Clip images based on bounds provided
Implementation is borrowed from
https://github.com/brendan-ward/rasterio/blob/e3687ce0ccf8ad92844c16d913a6482d5142cf48/rasterio/rio/convert.py
"""
self.output("Clipping", normal=True)
# create new folder for clipped images
path = check_create_folder(join(self.scene_path, 'clipped'))
try:
temp_bands = copy(self.bands)
temp_bands.append('QA')
for i, band in enumerate(temp_bands):
band_name = self._get_full_filename(band)
band_path = join(self.scene_path, band_name)
self.output("Band %s" % band, normal=True, color='green', indent=1)
with rasterio.open(band_path) as src:
bounds = transform_bounds(
{
'proj': 'longlat',
'ellps': 'WGS84',
'datum': 'WGS84',
'no_defs': True
},
src.crs,
*self.bounds
)
if disjoint_bounds(bounds, src.bounds):
bounds = adjust_bounding_box(src.bounds, bounds)
window = src.window(*bounds)
out_kwargs = src.meta.copy()
out_kwargs.update({
'driver': 'GTiff',
'height': window[0][1] - window[0][0],
'width': window[1][1] - window[1][0],
'transform': src.window_transform(window)
})
with rasterio.open(join(path, band_name), 'w', **out_kwargs) as out:
out.write(src.read(window=window))
# Copy MTL to the clipped folder
copyfile(join(self.scene_path, self.scene + '_MTL.txt'), join(path, self.scene + '_MTL.txt'))
return path
except IOError as e:
exit(e.message, 1)
class Simple(BaseProcess):
@rasterio_decorator
def run(self):
""" Executes the image processing.
:returns:
(String) the path to the processed image
"""
self.output('Image processing started for bands %s' % '-'.join(map(str, self.bands)), normal=True, arrow=True)
bands = self._read_bands()
image_data = self._get_image_data()
new_bands = self._generate_new_bands(image_data['shape'])
self._warp(image_data, bands, new_bands)
# Bands are no longer needed
del bands
rasterio_options = {
'driver': 'GTiff',
'width': image_data['shape'][1],
'height': image_data['shape'][0],
'count': 3,
'dtype': numpy.uint8,
'nodata': 0,
'transform': image_data['dst_transform'],
'photometric': 'RGB',
'crs': self.dst_crs
}
return self._write_to_file(new_bands, **rasterio_options)
class PanSharpen(BaseProcess):
def __init__(self, path, bands=None, **kwargs):
if bands:
bands.append(8)
else:
bands = [4, 3, 2, 8]
self.band8 = bands.index(8)
super(PanSharpen, self).__init__(path, bands, **kwargs)
@rasterio_decorator
def run(self):
""" Executes the pansharpen image processing.
:returns:
(String) the path to the processed image
"""
self.output('PanSharpened Image processing started for bands %s' % '-'.join(map(str, self.bands)),
normal=True, arrow=True)
bands = self._read_bands()
image_data = self._get_image_data()
new_bands = self._generate_new_bands(image_data['shape'])
bands[:3] = self._rescale(bands[:3])
new_bands.append(numpy.empty(image_data['shape'], dtype=numpy.uint16))
self._warp(image_data, bands, new_bands)
# Bands are no longer needed
del bands
# Calculate pan band
pan = self._pansize(new_bands)
del self.bands[self.band8]
del new_bands[self.band8]
rasterio_options = {
'driver': 'GTiff',
'width': image_data['shape'][1],
'height': image_data['shape'][0],
'count': 3,
'dtype': numpy.uint8,
'nodata': 0,
'transform': image_data['dst_transform'],
'photometric': 'RGB',
'crs': self.dst_crs
}
return self._write_to_file(new_bands, pan, **rasterio_options)
@rasterio_decorator
def _write_to_file(self, new_bands, pan, **kwargs):
# Read coverage from QBA
coverage = self._calculate_cloud_ice_perc()
self.output("Final Steps", normal=True, arrow=True)
suffix = 'bands_%s_pan' % "".join(map(str, self.bands))
output_file = join(self.dst_path, self._filename(suffix=suffix))
output = rasterio.open(output_file, 'w', **kwargs)
for i, band in enumerate(new_bands):
# Color Correction
band = numpy.multiply(band, pan)
band = self._color_correction(band, self.bands[i], 0, coverage)
output.write_band(i + 1, img_as_ubyte(band))
new_bands[i] = None
self.output("Writing to file", normal=True, color='green', indent=1)
return output_file
def _pansize(self, bands):
self.output('Calculating Pan Ratio', normal=True, arrow=True)
m = numpy.add(bands[0], bands[1])
m = numpy.add(m, bands[2])
pan = numpy.multiply(numpy.nan_to_num(numpy.true_divide(1, m)), bands[self.band8])
return pan
def _rescale(self, bands):
""" Rescale bands """
self.output("Rescaling", normal=True, arrow=True)
for key, band in enumerate(bands):
self.output("band %s" % self.bands[key], normal=True, color='green', indent=1)
bands[key] = sktransform.rescale(band, 2)
bands[key] = (bands[key] * 65535).astype('uint16')
return bands
if __name__ == '__main__':
p = PanSharpen('/Users/ajdevseed/Desktop/LC81950282014159LGN00')
p.run()

View File

@@ -1,438 +0,0 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco, KAPPS-
#
# License: CC0 1.0 Universal
import os
import subprocess
import errno
import shutil
import tarfile
from tempfile import mkdtemp
import numpy
from osgeo import gdal
try:
import cv2
except ImportError:
pass
import settings
from general_helper import check_create_folder, get_file
def gdalwarp(src, dst, t_srs=None):
""" A subporcess wrapper for gdalwarp """
argv = ['gdalwarp']
if t_srs:
argv.append('-t_srs')
argv.append(t_srs)
argv.append('-overwrite')
argv.append(src)
argv.append(dst)
return subprocess.check_call(argv)
def gdal_translate(src, dst, **kwargs):
""" A subprocess wrapper for gdal_translate """
argv = ['gdal_translate']
for key, value in kwargs.iteritems():
argv.append('-%s' % key)
if isinstance(value, list):
for item in value:
argv.append(str(item))
else:
argv.append(str(value))
argv.append(src)
argv.append(dst)
return subprocess.check_call(argv)
class Process(object):
""" Full image processing class
Steps needed for a full process
1) _wrap()
2) _scale_pan()
3) _combine()
4) _image_correction()
5) _final_conversions()
"""
def __init__(self, zip_image, bands=[4, 3, 2], path=None):
""" Initating the Process class
Arguments:
image - the string containing the name of the image folder e.g. LC80030032014158LGN00
bands - a list of desired bands. Default is for True color
path - Path to where the image folder is located
"""
self.image = get_file(zip_image).split('.')[0]
self.destination = settings.PROCESSED_IMAGE
self.bands = bands
self.btm_prct = 2
self.top_prct = 2
if path:
self.path = path
self.temp = mkdtemp()
self.src_image_path = self.temp + '/' + self.image
self.warp_path = self.temp + '/' + self.image + '/warp'
self.scaled_path = self.temp + '/' + self.image + '/scaled'
self.final_path = self.temp + '/' + self.image + '/final'
self.delivery_path = self.destination + '/' + self.image
check_create_folder(self.src_image_path)
check_create_folder(self.warp_path)
check_create_folder(self.scaled_path)
check_create_folder(self.final_path)
check_create_folder(self.delivery_path)
self._unzip(zip_image, self.src_image_path)
def full(self):
""" Conducts the full image processing """
self._warp()
self._scale_pan()
self._combine()
self._image_correction()
self._final_conversions()
final_image = self._create_mask()
shutil.copy(final_image, self.delivery_path)
self._cleanup()
return
def full_with_pansharpening(self):
self._warp()
self._scale_pan()
self._combine()
self._image_correction()
self._final_conversions()
final_image = self._create_mask()
shutil.copy(final_image, self.delivery_path)
shutil.copy(self._pansharpen(), self.delivery_path)
self._cleanup()
return
def _cleanup(self):
""" Remove temp folder """
try:
shutil.rmtree(self.temp)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
def _pansharpen(self):
shutil.copy('%s/%s_B4.tfw' % (self.warp_path, self.image),
'%s/comp.tfw' % self.final_path)
argv = ['gdal_edit.py', '-a_srs', 'EPSG:3857',
'%s/comp.TIF' % self.final_path]
subprocess.check_call(argv)
argv = ['otbcli_BundleToPerfectSensor',
# '-ram', '6500',
'-inp', '%s/%s_B8.TIF' % (self.warp_path, self.image),
'-inxs', '%s/comp.TIF' % self.final_path,
'-out', '%s/pan.TIF' % self.final_path,
'uint16']
subprocess.check_call(argv)
for i in range(1, 4):
gdal_translate('%s/pan.TIF' % self.final_path,
'%s/pan-%s.TIF' % (self.final_path, i),
b=i)
argv = ['convert', '-combine']
for i in range(1, 4):
argv.append('%s/pan-%s.TIF' % (self.final_path, i))
argv.append('%s/pan.TIF' % self.final_path)
subprocess.check_call(argv)
argv = ['convert', '-depth', '8',
'%s/pan.TIF' % self.final_path,
'%s/final-pan.TIF' % self.final_path]
subprocess.check_call(argv)
argv = ['listgeo', '-tfw',
'%s/%s_B8.TIF' % (self.warp_path, self.image)]
subprocess.check_call(argv)
shutil.copy('%s/%s_B8.tfw' % (self.warp_path, self.image),
'%s/final-pan.tfw' % self.final_path)
argv = ['gdal_edit.py', '-a_srs', 'EPSG:3857',
'%s/final-pan.TIF' % self.final_path]
subprocess.check_call(argv)
return '%s/final-pan.TIF' % self.final_path
def _create_mask(self):
argv = ['gdal_calc.py',
'-A', '%s/%s_B2.TIF' % (self.warp_path, self.image),
'--outfile=%s/band-mask.TIF' % self.final_path,
'--calc=1*(A>0)',
'--type=UInt16']
subprocess.check_call(argv)
for i in range(1, 4):
gdal_translate('%s/final-color.TIF' % self.final_path,
'%s/band-%s.TIF' % (self.final_path, i),
b=i)
for i in range(1, 4):
argv = ['gdal_calc.py',
'-A', '%s/band-%s.TIF' % (self.final_path, i),
'-B', '%s/band-mask.TIF' % (self.final_path),
'--outfile=%s/maksed-final-%s.TIF' % (self.final_path, i),
'--calc=A*B',
'--type=UInt16']
subprocess.check_call(argv)
argv = ['convert', '-combine']
for i in range(1, 4):
argv.append('%s/maksed-final-%s.TIF' % (self.final_path, i))
argv.append('%s/comp.TIF' % self.final_path)
subprocess.check_call(argv)
argv = ['convert', '-depth', '8',
'%s/comp.TIF' % self.final_path,
'%s/final.TIF' % self.final_path]
subprocess.check_call(argv)
argv = ['listgeo', '-tfw',
'%s/%s_B4.TIF' % (self.warp_path, self.image)]
subprocess.check_call(argv)
shutil.copy('%s/%s_B4.tfw' % (self.warp_path, self.image),
'%s/final.tfw' % self.final_path)
argv = ['gdal_edit.py', '-a_srs', 'EPSG:3857',
'%s/final.TIF' % self.final_path]
subprocess.check_call(argv)
return '%s/final.TIF' % self.final_path
def _final_conversions(self):
""" Final color conversions. Return final image temp path """
print 'Convertin image tweaks'
# First conversion
argv = ['convert',
'-channel', 'B', '-gamma', '0.97',
'-channel', 'R', '-gamma', '1.04',
'-channel', 'RGB', '-sigmoidal-contrast', '40x15%',
'%s/rgb-null.TIF' % self.final_path,
'%s/rgb-sig.TIF' % self.final_path]
subprocess.check_call(argv)
# Second conversion
argv = ['convert',
'-channel', 'B', '-gamma', '0.97',
'-channel', 'R', '-gamma', '1.04',
'%s/rgb-scaled.TIF' % self.final_path,
'%s/rgb-scaled-cc.TIF' % self.final_path]
subprocess.check_call(argv)
print 'Convert: averaging'
# Fourth conversion
argv = ['convert',
'%s/rgb-sig.TIF' % self.final_path,
'%s/rgb-scaled-cc.TIF' % self.final_path,
'-evaluate-sequence', 'mean',
'%s/final-color.TIF' % self.final_path]
subprocess.check_call(argv)
def _image_correction(self):
try:
corrected_list = []
band_correction = [[2, 0.97], [4, 1.04]]
for band in self.bands:
print 'Starting the image processing'
file_path = ('%s/%s_B%s.TIF' % (self.warp_path,
self.image, band))
img = cv2.imread(file_path, 0) #-1 if the next package is released and includes (https://github.com/Itseez/opencv/pull/3033)
# Gamma Correction
for c in band_correction:
if c[0] == band:
img = img ** c[1]
# adding color corrected band back to list
corrected_list.append(img.astype(numpy.uint8))
# combining bands in list into a bgr img (opencv format for true color)
b, g, r = corrected_list[2], corrected_list[1], corrected_list[0]
img_comp = cv2.merge((b, g, r))
# converting bgr to ycrcb
imgy = cv2.cvtColor(img_comp, cv2.COLOR_BGR2YCR_CB)
# extracting y
y, cr, cb = cv2.split(imgy)
# equalizing y with CLAHE
clahe = cv2.createCLAHE(clipLimit=1.0, tileGridSize=(950, 950))
y = clahe.apply(y)
# merging equalized y with cr and cb
imgy = cv2.merge((y, cr, cb))
# converting ycrcb back to bgr
img = cv2.cvtColor(imgy, cv2.COLOR_YCR_CB2BGR)
# writing final equalized file
cv2.imwrite('%s/eq-hist.tif' % self.final_path, img)
except NameError, e:
print e.args[0]
print "Skipping Image Correction using OpenCV"
def _combine(self):
argv = ['convert', '-identify', '-combine']
for band in self.bands:
argv.append('%s/%s_B%s.TIF' % (self.warp_path, self.image, band))
argv.append('%s/rgb-null.TIF' % self.final_path)
subprocess.check_call(argv)
argv = ['convert', '-identify', '-combine']
for band in self.bands:
argv.append('%s/%s_B%s.TIF' % (self.scaled_path, self.image, band))
argv.append('%s/rgb-scaled.TIF' % self.final_path)
subprocess.check_call(argv)
def _scale_pan(self):
""" scaling pan to min max with 2 percent cut """
min_max = self._calculate_min_max()
# min_max = [6247, 32888]
min_max.extend([1, 255])
for band in self.bands:
print 'scaling pan to min max with 2%% cut for band %s' % band
gdal_translate('%s/%s_B%s.TIF' % (self.warp_path,
self.image, band),
'%s/%s_B%s.TIF' % (self.scaled_path,
self.image, band),
ot='byte', scale=min_max
)
def _calculate_min_max(self):
""" Calculate Min/Max values with 2 percent cut """
min_max_list = []
for band in self.bands:
file_path = ('%s/%s_B%s.TIF' % (self.warp_path,
self.image, band))
if os.path.exists(file_path):
print ('Starting the Min/Max process with designated -percent '
'cut- for band %s of %s' % (band, self.image))
print '...'
# Open images in the warp folder
ds = gdal.Open(file_path)
# converting raster to numpy array
values = numpy.array(ds.GetRasterBand(1).ReadAsArray())
to_list = values.tolist()
full_list = [item for sublist in to_list for item in sublist]
# removing zeros
value_list = filter(lambda x: x != 0, full_list)
list_len = len(value_list)
value_list.sort()
# determining number of integers to cut from bottom of list
cut_value_bottom = int(float(self.btm_prct) /
float(100) * float(list_len))
# determining number of integers to cut from top of list
cut_value_top = int(float(self.top_prct) /
float(100) * float(list_len))
# establishing new min and max with percent cut
cut_list = value_list[
(cut_value_bottom + 1):(list_len - cut_value_top)]
# adding min and max with percent cut values to list
min_max_list.extend([cut_list[0], cut_list[-1]])
print 'Finished processing band %s of %s' % (band, self.image)
return [min(min_max_list), max(min_max_list)]
def _warp(self):
""" Warping the images on provided bands + band 8 """
# Adding band 8 to the band list
new_bands = list(self.bands)
new_bands.append(8)
# Warping
for band in new_bands:
gdalwarp('%s/%s_B%s.TIF' % (self.src_image_path, self.image, band),
'%s/%s_B%s.TIF' % (self.warp_path, self.image, band),
t_srs='EPSG:3857')
def _unzip(self, src, dst):
print "Unzipping %s - It might take some time" % self.image
tar = tarfile.open(src)
tar.extractall(path=dst)
tar.close()

View File

@@ -1,28 +1,35 @@
#!/usr/bin/env python
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco, KAPPS-
#
# Landsat Util
# License: CC0 1.0 Universal
from __future__ import print_function
import sys
import subprocess
from __future__ import print_function, division, absolute_import
import argparse
import textwrap
import json
from os.path import join
try:
from urllib.request import URLError
except ImportError:
from urllib2 import URLError
from datetime import datetime
from dateutil.relativedelta import relativedelta
from dateutil.parser import parse
import pycurl
from boto.exception import NoAuthHandlerFound
from gs_helper import GsHelper
from clipper_helper import Clipper
from search_helper import Search
from general_helper import reformat_date
from image_helper import Process
import settings
from .downloader import Downloader, IncorrectSceneId, RemoteFileDoesntExist, USGSInventoryAccessMissing
from .search import Search
from .uploader import Uploader
from .utils import reformat_date, convert_to_integer_list, timer, exit, get_file, convert_to_float_list
from .mixins import VerbosityMixin
from .image import Simple, PanSharpen, FileDoesNotExist
from .ndvi import NDVIWithManualColorMap, NDVI
from .__init__ import __version__
from . import settings
DESCRIPTION = """Landsat-util is a command line utility that makes it easy to
@@ -30,248 +37,446 @@ search, download, and process Landsat imagery.
Commands:
Search:
landsat.py search [-h] [-l LIMIT] [-s START] [-e END] [-c CLOUD]
[--onlysearch] [--imageprocess]
{pr,shapefile,country}
positional arguments:
{pr,shapefile,country}
Search commands
pr Activate paths and rows
shapefile Activate Shapefile
country Activate country
landsat.py search [-p --pathrow] [--lat] [--lon] [--address] [-l LIMIT] [-s START] [-e END] [-c CLOUD]
[-h]
optional arguments:
-h, --help show this help message and exit
-l LIMIT, --limit LIMIT
Search return results limit default is 100
-p, --pathrow Paths and Rows in order separated by comma. Use quotes "001,003".
Example: path,row,path,row 001,001,190,204
-s START, --start START
Start Date - Most formats are accepted e.g.
Jun 12 2014 OR 06/12/2014
--lat Latitude
-e END, --end END End Date - Most formats are accepted e.g.
Jun 12 2014 OR 06/12/2014
--lon Longitude
-c CLOUD, --cloud CLOUD
Maximum cloud percentage default is 20 perct
--address Street address
-d, --download Use this flag to download found images
-l LIMIT, --limit LIMIT
Search return results limit default is 10
--imageprocess If this flag is used, the images are downloaded
and process. Be cautious as it might take a
long time to both download and process large
batches of images
-s START, --start START
Start Date - Most formats are accepted e.g.
Jun 12 2014 OR 06/12/2014
--pansharpen Whether to also pansharpen the process image.
Pansharpening takes a long time
-e END, --end END End Date - Most formats are accepted e.g.
Jun 12 2014 OR 06/12/2014
--latest N Returns the N latest images within the last 365 days.
-c CLOUD, --cloud CLOUD
Maximum cloud percentage. Default: 20 perct
--json Returns a bare JSON response
--geojson Returns a geojson response
-h, --help Show this help message and exit
Download:
landsat download [-h] sceneID [sceneID ...]
landsat download sceneID [sceneID ...] [-h] [-b --bands]
positional arguments:
sceneID Provide Full sceneID, e.g. LC81660392014196LGN00
positional arguments:
sceneID Provide Full sceneIDs. You can add as many sceneIDs as you wish
Example: landast download LC81660392014196LGN00
optional arguments:
-b --bands If you specify bands, landsat-util will try to download the band from S3.
If the band does not exist, an error is returned
-h, --help Show this help message and exit
-d, --dest Destination path
-p, --process Process the image after download
--pansharpen Whether to also pansharpen the processed image.
Pansharpening requires larger memory
--ndvi Calculates NDVI and produce a RGB GTiff with seperate colorbar.
--ndvigrey Calculates NDVI and produce a greyscale GTiff.
--clip Clip the image with the bounding box provided. Values must be in WGS84 datum,
and with longitude and latitude units of decimal degrees separated by comma.
Example: --clip=-346.06658935546875,49.93531194616915,-345.4595947265625,
50.2682767372753
-u --upload Upload to S3 after the image processing completed
--key Amazon S3 Access Key (You can also be set AWS_ACCESS_KEY_ID as
Environment Variables)
--secret Amazon S3 Secret Key (You can also be set AWS_SECRET_ACCESS_KEY as
Environment Variables)
--bucket Bucket name (required if uploading to s3)
--region URL to S3 region e.g. s3-us-west-2.amazonaws.com
--force-unzip Force unzip tar file
--username USGS Eros account Username (only works if the account has special
inventory access). Username and password as a fallback if the image
is not found on AWS S3 or Google Storage
--password USGS Eros account Password
Process:
landsat.py process [-h] [--pansharpen] path
landsat.py process path [-h] [-b --bands] [-p --pansharpen]
positional arguments:
path Path to the compressed image file
positional arguments:
path Path to the landsat image folder or zip file
optional arguments:
--pansharpen Whether to also pansharpen the process image.
Pansharpening takes a long time
optional arguments:
-b --bands Specify bands. The bands should be written in sequence with no spaces
Default: Natural colors (432)
Example --bands 432
--pansharpen Whether to also pansharpen the process image.
Pansharpening requires larger memory
--ndvi Calculates NDVI and produce a RGB GTiff with seperate colorbar.
--ndvigrey Calculates NDVI and produce a greyscale GTiff.
--clip Clip the image with the bounding box provided. Values must be in WGS84 datum,
and with longitude and latitude units of decimal degrees separated by comma.
Example: --clip=-346.06658935546875,49.93531194616915,-345.4595947265625,
50.2682767372753
-v, --verbose Show verbose output
-h, --help Show this help message and exit
-u --upload Upload to S3 after the image processing completed
--key Amazon S3 Access Key (You can also be set AWS_ACCESS_KEY_ID as
Environment Variables)
--secret Amazon S3 Secret Key (You can also be set AWS_SECRET_ACCESS_KEY as
Environment Variables)
--bucket Bucket name (required if uploading to s3)
--region URL to S3 region e.g. s3-us-west-2.amazonaws.com
--force-unzip Force unzip tar file
"""
def args_options():
""" Generates an arugment parser.
:returns:
Parser object
"""
parser = argparse.ArgumentParser(prog='landsat',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent(DESCRIPTION))
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent(DESCRIPTION))
subparsers = parser.add_subparsers(help='Landsat Utility',
dest='subs')
parser.add_argument('--version', action='version', version='%(prog)s version ' + __version__)
# Search Logic
parser_search = subparsers.add_parser('search',
help='Search Landsat metdata')
# Global search options
parser_search.add_argument('-l', '--limit', default=100, type=int,
parser_search.add_argument('-l', '--limit', default=10, type=int,
help='Search return results limit\n'
'default is 100')
'default is 10')
parser_search.add_argument('-s', '--start',
help='Start Date - Most formats are accepted '
'e.g. Jun 12 2014 OR 06/12/2014')
parser_search.add_argument('-e', '--end',
help='End Date - Most formats are accepted '
'e.g. Jun 12 2014 OR 06/12/2014')
parser_search.add_argument('-c', '--cloud', type=float, default=20.0,
parser_search.add_argument('--latest', default=-1, type=int,
help='returns the N latest images within the last 365 days')
parser_search.add_argument('-c', '--cloud', type=float, default=100.0,
help='Maximum cloud percentage '
'default is 20 perct')
parser_search.add_argument('-d', '--download', action='store_true',
help='Use this flag to download found images')
parser_search.add_argument('--imageprocess', action='store_true',
help='If this flag is used, the images are '
'downloaded and process. Be cautious as it '
'might take a long time to both download and '
'process large batches of images')
parser_search.add_argument('--pansharpen', action='store_true',
help='Whether to also pansharpen the process '
'image. Pan sharpening takes a long time')
search_subparsers = parser_search.add_subparsers(help='Search commands',
dest='search_subs')
search_pr = search_subparsers.add_parser('pr',
help="Activate paths and rows")
search_pr.add_argument('paths_rows',
metavar='path_row',
type=int,
nargs="+",
help="Provide paths and rows")
search_shapefile = search_subparsers.add_parser('shapefile',
help="Activate Shapefile")
search_shapefile.add_argument('path',
help="Path to shapefile")
search_country = search_subparsers.add_parser('country',
help="Activate country")
search_country.add_argument('name', help="Country name e.g. ARE")
'default is 100 perct')
parser_search.add_argument('-p', '--pathrow',
help='Paths and Rows in order separated by comma. Use quotes ("001").'
'Example: path,row,path,row 001,001,190,204')
parser_search.add_argument('--lat', type=float, help='The latitude')
parser_search.add_argument('--lon', type=float, help='The longitude')
parser_search.add_argument('--address', type=str, help='The address')
parser_search.add_argument('--json', action='store_true', help='Returns a bare JSON response')
parser_search.add_argument('--geojson', action='store_true', help='Returns a geojson response')
parser_download = subparsers.add_parser('download',
help='Download images from Google Storage')
parser_download.add_argument('scenes',
metavar='sceneID',
nargs="+",
help="Provide Full sceneID, e.g. "
"LC81660392014196LGN00")
help="Provide Full sceneID, e.g. LC81660392014196LGN00")
parser_process = subparsers.add_parser('process',
help='Process Landsat imagery')
parser_download.add_argument('-b', '--bands', help='If you specify bands, landsat-util will try to download '
'the band from S3. If the band does not exist, an error is returned', default='')
parser_download.add_argument('-d', '--dest', help='Destination path')
parser_download.add_argument('-p', '--process', help='Process the image after download', action='store_true')
parser_download.add_argument('--pansharpen', action='store_true',
help='Whether to also pansharpen the process '
'image. Pansharpening requires larger memory')
parser_download.add_argument('--ndvi', action='store_true',
help='Whether to run the NDVI process. If used, bands parameter is disregarded')
parser_download.add_argument('--ndvigrey', action='store_true', help='Create an NDVI map in grayscale (grey)')
parser_download.add_argument('--clip', help='Clip the image with the bounding box provided. Values must be in ' +
'WGS84 datum, and with longitude and latitude units of decimal degrees ' +
'separated by comma.' +
'Example: --clip=-346.06658935546875,49.93531194616915,-345.4595947265625,' +
'50.2682767372753')
parser_download.add_argument('-u', '--upload', action='store_true',
help='Upload to S3 after the image processing completed')
parser_download.add_argument('--username', help='USGS Eros account Username (only works if the account has' +
' special inventory access). Username and password as a fallback if the image' +
'is not found on AWS S3 or Google Storage')
parser_download.add_argument('--password', help='USGS Eros username, used as a fallback')
parser_download.add_argument('--key', help='Amazon S3 Access Key (You can also be set AWS_ACCESS_KEY_ID as '
'Environment Variables)')
parser_download.add_argument('--secret', help='Amazon S3 Secret Key (You can also be set AWS_SECRET_ACCESS_KEY '
'as Environment Variables)')
parser_download.add_argument('--bucket', help='Bucket name (required if uploading to s3)')
parser_download.add_argument('--region', help='URL to S3 region e.g. s3-us-west-2.amazonaws.com')
parser_download.add_argument('--force-unzip', help='Force unzip tar file', action='store_true')
parser_process = subparsers.add_parser('process', help='Process Landsat imagery')
parser_process.add_argument('path',
help='Path to the compressed image file')
parser_process.add_argument('--pansharpen', action='store_true',
help='Whether to also pansharpen the process '
'image. Pan sharpening takes a long time')
'image. Pansharpening requires larger memory')
parser_process.add_argument('--ndvi', action='store_true', help='Create an NDVI map in color.')
parser_process.add_argument('--ndvigrey', action='store_true', help='Create an NDVI map in grayscale (grey)')
parser_process.add_argument('--clip', help='Clip the image with the bounding box provided. Values must be in ' +
'WGS84 datum, and with longitude and latitude units of decimal degrees ' +
'separated by comma.' +
'Example: --clip=-346.06658935546875,49.93531194616915,-345.4595947265625,' +
'50.2682767372753')
parser_process.add_argument('-b', '--bands', help='specify band combinations. Default is 432'
'Example: --bands 321', default='432')
parser_process.add_argument('-v', '--verbose', action='store_true',
help='Turn on verbosity')
parser_process.add_argument('-u', '--upload', action='store_true',
help='Upload to S3 after the image processing completed')
parser_process.add_argument('--key', help='Amazon S3 Access Key (You can also be set AWS_ACCESS_KEY_ID as '
'Environment Variables)')
parser_process.add_argument('--secret', help='Amazon S3 Secret Key (You can also be set AWS_SECRET_ACCESS_KEY '
'as Environment Variables)')
parser_process.add_argument('--bucket', help='Bucket name (required if uploading to s3)')
parser_process.add_argument('--region', help='URL to S3 region e.g. s3-us-west-2.amazonaws.com')
parser_process.add_argument('--force-unzip', help='Force unzip tar file', action='store_true')
return parser
def main(args):
"""
Main function - launches the program
Main function - launches the program.
:param args:
The Parser arguments
:type args:
Parser object
:returns:
List
:example:
>>> ["The latitude and longitude values must be valid numbers", 1]
"""
if args:
if args.subs == 'process':
p = Process(args.path)
if args.pansharpen:
p.full_with_pansharpening()
else:
p.full()
v = VerbosityMixin()
exit("The output is stored at %s." % settings.PROCESSED_IMAGE)
if args:
if 'clip' in args:
bounds = convert_to_float_list(args.clip)
else:
bounds = None
if args.subs == 'process':
verbose = True if args.verbose else False
force_unzip = True if args.force_unzip else False
stored = process_image(args.path, args.bands, verbose, args.pansharpen, args.ndvi, force_unzip,
args.ndvigrey, bounds)
if args.upload:
u = Uploader(args.key, args.secret, args.region)
u.run(args.bucket, get_file(stored), stored)
return ["The output is stored at %s" % stored]
elif args.subs == 'search':
try:
if args.start:
args.start = reformat_date(parse(args.start))
if args.end:
args.end = reformat_date(parse(args.end))
except TypeError:
exit("You date format is incorrect. Please try again!", 1)
if args.latest > 0:
args.limit = 25
end = datetime.now()
start = end - relativedelta(days=+365)
args.end = end.strftime("%Y-%m-%d")
args.start = start.strftime("%Y-%m-%d")
except (TypeError, ValueError):
return ["Your date format is incorrect. Please try again!", 1]
s = Search()
if args.search_subs == 'pr':
result = s.search(row_paths=args.paths_rows,
limit=args.limit,
start_date=args.start,
end_date=args.end,
cloud_max=args.cloud)
elif args.search_subs == 'shapefile':
clipper = Clipper()
result = s.search(clipper.shapefile(args.path),
limit=args.limit,
start_date=args.start,
end_date=args.end,
cloud_max=args.cloud)
elif args.search_subs == 'country':
clipper = Clipper()
prs = clipper.country(args.name)
if prs:
result = s.search(prs,
limit=args.limit,
start_date=args.start,
end_date=args.end,
cloud_max=args.cloud)
try:
if args.lat is not None:
lat = float(args.lat)
else:
lat = None
if args.lon is not None:
lon = float(args.lon)
else:
lon = None
except ValueError:
return ["The latitude and longitude values must be valid numbers", 1]
address = args.address
if address and (lat and lon):
return ["Cannot specify both address and latitude-longitude"]
result = s.search(paths_rows=args.pathrow,
lat=lat,
lon=lon,
address=address,
limit=args.limit,
start_date=args.start,
end_date=args.end,
cloud_max=args.cloud,
geojson=args.geojson)
if 'status' in result:
if result['status'] == 'SUCCESS':
print('%s items were found' % result['total'])
if args.json:
return json.dumps(result)
if args.latest > 0:
datelist = []
for i in range(0, result['total_returned']):
datelist.append((result['results'][i]['date'], result['results'][i]))
datelist.sort(key=lambda tup: tup[0], reverse=True)
datelist = datelist[:args.latest]
result['results'] = []
for i in range(0, len(datelist)):
result['results'].append(datelist[i][1])
result['total_returned'] = len(datelist)
else:
v.output('%s items were found' % result['total'], normal=True, arrow=True)
if result['total'] > 100:
exit('Too many results. Please narrow your search')
return ['Over 100 results. Please narrow your search', 1]
else:
print(json.dumps(result, sort_keys=True, indent=4))
# If only search
if args.download:
gs = GsHelper()
print('Starting the download:')
for item in result['results']:
gs.single_download(row=item['row'],
path=item['path'],
name=item['sceneID'])
print("%s images were downloaded"
% result['total_returned'])
if args.imageprocess:
for item in result['results']:
p = Process('%s/%s.tar.bz' % (gs.zip_dir,
item['sceneID']))
if args.pansharpen:
p.full_with_pansharpening()
else:
p.full()
else:
exit("The downloaded images are located here: %s" %
gs.zip_dir)
else:
exit('Done!')
v.output(json.dumps(result, sort_keys=True, indent=4), normal=True, color='green')
return ['Search completed!']
elif result['status'] == 'error':
exit(result['message'])
except KeyError:
exit('Too Many API queries. You can only query DevSeed\'s '
'API 5 times per minute', 1)
return [result['message'], 1]
if args.geojson:
return json.dumps(result)
elif args.subs == 'download':
gs = GsHelper()
print('Starting the download:')
for scene in args.scenes:
gs.single_download(row=gs.extract_row_path(scene)[1],
path=gs.extract_row_path(scene)[0],
name=scene)
exit("The downloaded images are located here: %s" % gs.zip_dir)
d = Downloader(download_dir=args.dest, usgs_user=args.username, usgs_pass=args.password)
try:
bands = convert_to_integer_list(args.bands)
if args.process:
if args.pansharpen:
bands.append(8)
if args.ndvi or args.ndvigrey:
bands = [4, 5]
if not args.bands:
bands = [4, 3, 2]
files = d.download(args.scenes, bands)
if args.process:
if not args.bands:
args.bands = '432'
force_unzip = True if args.force_unzip else False
for f in files:
stored = process_image(f, args.bands, False, args.pansharpen, args.ndvi, force_unzip,
args.ndvigrey, bounds=bounds)
if args.upload:
try:
u = Uploader(args.key, args.secret, args.region)
except NoAuthHandlerFound:
return ["Could not authenticate with AWS", 1]
except URLError:
return ["Connection timeout. Probably the region parameter is incorrect", 1]
u.run(args.bucket, get_file(stored), stored)
return ['The output is stored at %s' % stored, 0]
else:
return ['Download Completed', 0]
except IncorrectSceneId:
return ['The SceneID provided was incorrect', 1]
except (RemoteFileDoesntExist, USGSInventoryAccessMissing) as e:
return [e.message, 1]
def exit(message, code=0):
print(message)
sys.exit(code)
def process_image(path, bands=None, verbose=False, pansharpen=False, ndvi=False, force_unzip=None,
ndvigrey=False, bounds=None):
""" Handles constructing and image process.
:param path:
The path to the image that has to be processed
:type path:
String
:param bands:
List of bands that has to be processed. (optional)
:type bands:
List
:param verbose:
Sets the level of verbosity. Default is False.
:type verbose:
boolean
:param pansharpen:
Whether to pansharpen the image. Default is False.
:type pansharpen:
boolean
def package_installed(package):
"""
Check if a package is installed on the machine
:returns:
(String) path to the processed image
"""
try:
bands = convert_to_integer_list(bands)
if pansharpen:
p = PanSharpen(path, bands=bands, dst_path=settings.PROCESSED_IMAGE,
verbose=verbose, force_unzip=force_unzip, bounds=bounds)
elif ndvigrey:
p = NDVI(path, verbose=verbose, dst_path=settings.PROCESSED_IMAGE, force_unzip=force_unzip, bounds=bounds)
elif ndvi:
p = NDVIWithManualColorMap(path, dst_path=settings.PROCESSED_IMAGE,
verbose=verbose, force_unzip=force_unzip, bounds=bounds)
else:
p = Simple(path, bands=bands, dst_path=settings.PROCESSED_IMAGE, verbose=verbose, force_unzip=force_unzip,
bounds=bounds)
print("Checking if %s is installed on the system" % package)
installed = not subprocess.call(["which", package])
if installed:
print("%s is installed" % package)
return True
else:
print("You have to install %s first!" % package)
return False
except IOError as err:
exit(str(err), 1)
except FileDoesNotExist as err:
exit(str(err), 1)
return p.run()
def __main__():
@@ -279,7 +484,14 @@ def __main__():
global parser
parser = args_options()
args = parser.parse_args()
main(args)
if args.subs == 'search' and (hasattr(args, 'json') or hasattr(args, 'geojson')):
print(main(args))
else:
with timer():
exit(*main(args))
if __name__ == "__main__":
__main__()
try:
__main__()
except (KeyboardInterrupt, pycurl.error):
exit('Received Ctrl + C... Exiting! Bye.', 1)

View File

@@ -0,0 +1,275 @@
# Thu Jul 23 2015 10:14:15 GMT+0200
# ---------------------------------------------
# R/G/B cubehelix colour scheme
#
# see http://www.mrao.cam.ac.uk/~dag/CUBEHELIX/
#----------------------------------------------
# see Green (2011), BASI, 39, 289.
#
# start............: 1.0
# rotations........: -0.8
# hue..............: 1.6
# gamma............: 1.0
# number of levels.: 256
#----------------------------------------------
# Dave Green: dag@mrao.cam.ac.uk
#----------------------------------------------
# The value "mode" is the maximum value of the color intensity, e.g. 1 or 255
mode = 1
0 0 0
0.009 0.002 0.001
0.018 0.004 0.002
0.027 0.005 0.004
0.036 0.007 0.005
0.046 0.009 0.007
0.055 0.01 0.009
0.064 0.012 0.012
0.073 0.013 0.014
0.082 0.015 0.017
0.092 0.016 0.02
0.101 0.018 0.023
0.11 0.019 0.027
0.119 0.02 0.031
0.128 0.021 0.035
0.137 0.023 0.039
0.146 0.024 0.044
0.155 0.025 0.048
0.164 0.026 0.053
0.173 0.027 0.058
0.182 0.029 0.064
0.19 0.03 0.069
0.199 0.031 0.075
0.208 0.032 0.081
0.216 0.033 0.087
0.224 0.035 0.094
0.233 0.036 0.1
0.241 0.037 0.107
0.249 0.038 0.114
0.257 0.04 0.121
0.265 0.041 0.129
0.272 0.042 0.136
0.28 0.044 0.144
0.287 0.045 0.152
0.294 0.046 0.16
0.301 0.048 0.168
0.308 0.05 0.177
0.315 0.051 0.185
0.322 0.053 0.194
0.328 0.054 0.203
0.335 0.056 0.212
0.341 0.058 0.221
0.347 0.06 0.23
0.353 0.062 0.239
0.359 0.064 0.248
0.364 0.066 0.258
0.369 0.068 0.268
0.375 0.07 0.277
0.38 0.073 0.287
0.384 0.075 0.297
0.389 0.077 0.307
0.393 0.08 0.317
0.398 0.082 0.327
0.402 0.085 0.337
0.406 0.088 0.347
0.409 0.091 0.358
0.413 0.094 0.368
0.416 0.097 0.378
0.419 0.1 0.389
0.422 0.103 0.399
0.425 0.106 0.409
0.428 0.11 0.42
0.43 0.113 0.43
0.432 0.117 0.44
0.434 0.121 0.451
0.436 0.124 0.461
0.438 0.128 0.471
0.439 0.132 0.481
0.441 0.136 0.492
0.442 0.14 0.502
0.443 0.145 0.512
0.444 0.149 0.522
0.444 0.154 0.532
0.445 0.158 0.542
0.445 0.163 0.552
0.445 0.168 0.562
0.445 0.172 0.571
0.445 0.177 0.581
0.444 0.182 0.59
0.444 0.188 0.6
0.443 0.193 0.609
0.443 0.198 0.618
0.442 0.203 0.627
0.441 0.209 0.636
0.439 0.215 0.645
0.438 0.22 0.654
0.437 0.226 0.662
0.435 0.232 0.671
0.433 0.238 0.679
0.432 0.244 0.687
0.43 0.25 0.695
0.428 0.256 0.703
0.426 0.263 0.711
0.423 0.269 0.718
0.421 0.275 0.725
0.419 0.282 0.732
0.416 0.289 0.739
0.414 0.295 0.746
0.411 0.302 0.753
0.409 0.309 0.759
0.406 0.316 0.766
0.403 0.323 0.772
0.4 0.33 0.777
0.397 0.337 0.783
0.394 0.344 0.789
0.391 0.351 0.794
0.389 0.358 0.799
0.386 0.365 0.804
0.383 0.373 0.809
0.379 0.38 0.813
0.376 0.387 0.817
0.373 0.395 0.822
0.37 0.402 0.825
0.367 0.41 0.829
0.364 0.417 0.833
0.361 0.425 0.836
0.358 0.432 0.839
0.355 0.44 0.842
0.353 0.448 0.845
0.35 0.455 0.847
0.347 0.463 0.849
0.344 0.47 0.852
0.341 0.478 0.853
0.339 0.486 0.855
0.336 0.494 0.857
0.334 0.501 0.858
0.331 0.509 0.859
0.329 0.517 0.86
0.327 0.524 0.861
0.325 0.532 0.861
0.322 0.539 0.862
0.32 0.547 0.862
0.319 0.555 0.862
0.317 0.562 0.862
0.315 0.57 0.862
0.314 0.577 0.861
0.312 0.585 0.861
0.311 0.592 0.86
0.31 0.6 0.859
0.309 0.607 0.858
0.308 0.614 0.856
0.307 0.622 0.855
0.307 0.629 0.854
0.306 0.636 0.852
0.306 0.643 0.85
0.306 0.65 0.848
0.306 0.657 0.846
0.306 0.664 0.844
0.306 0.671 0.842
0.307 0.678 0.839
0.307 0.685 0.837
0.308 0.691 0.834
0.309 0.698 0.832
0.31 0.705 0.829
0.311 0.711 0.826
0.313 0.718 0.823
0.315 0.724 0.82
0.316 0.73 0.817
0.318 0.736 0.814
0.321 0.743 0.811
0.323 0.749 0.808
0.326 0.754 0.805
0.328 0.76 0.802
0.331 0.766 0.798
0.335 0.772 0.795
0.338 0.777 0.792
0.341 0.783 0.788
0.345 0.788 0.785
0.349 0.794 0.782
0.353 0.799 0.778
0.357 0.804 0.775
0.361 0.809 0.772
0.366 0.814 0.769
0.371 0.819 0.765
0.376 0.823 0.762
0.381 0.828 0.759
0.386 0.833 0.756
0.392 0.837 0.753
0.397 0.841 0.75
0.403 0.846 0.747
0.409 0.85 0.744
0.415 0.854 0.741
0.421 0.858 0.739
0.428 0.862 0.736
0.434 0.865 0.734
0.441 0.869 0.731
0.448 0.873 0.729
0.455 0.876 0.727
0.462 0.879 0.725
0.469 0.883 0.723
0.477 0.886 0.721
0.484 0.889 0.719
0.492 0.892 0.718
0.5 0.895 0.717
0.508 0.898 0.715
0.516 0.901 0.714
0.524 0.903 0.713
0.532 0.906 0.713
0.541 0.908 0.712
0.549 0.911 0.712
0.558 0.913 0.711
0.566 0.915 0.711
0.575 0.918 0.711
0.584 0.92 0.712
0.593 0.922 0.712
0.602 0.924 0.713
0.611 0.926 0.713
0.62 0.928 0.714
0.629 0.929 0.716
0.638 0.931 0.717
0.647 0.933 0.719
0.656 0.934 0.721
0.665 0.936 0.723
0.675 0.938 0.725
0.684 0.939 0.727
0.693 0.94 0.73
0.702 0.942 0.733
0.712 0.943 0.736
0.721 0.945 0.739
0.73 0.946 0.743
0.739 0.947 0.746
0.748 0.948 0.75
0.758 0.95 0.755
0.767 0.951 0.759
0.776 0.952 0.764
0.785 0.953 0.768
0.794 0.954 0.773
0.802 0.956 0.779
0.811 0.957 0.784
0.82 0.958 0.79
0.829 0.959 0.795
0.837 0.96 0.801
0.845 0.962 0.808
0.854 0.963 0.814
0.862 0.964 0.821
0.87 0.965 0.828
0.878 0.967 0.835
0.886 0.968 0.842
0.894 0.969 0.849
0.901 0.971 0.857
0.909 0.972 0.865
0.916 0.973 0.873
0.923 0.975 0.881
0.93 0.976 0.889
0.937 0.978 0.898
0.944 0.98 0.906
0.95 0.981 0.915
0.956 0.983 0.924
0.963 0.985 0.933
0.969 0.987 0.942
0.974 0.989 0.951
0.98 0.991 0.961
0.985 0.993 0.971
0.99 0.995 0.98
0.995 0.998 0.99
1.000 1.000 1.000

View File

@@ -0,0 +1,262 @@
# Tue Jul 14 2015 14:40:26 GMT+0200
# ---------------------------------------------
# Intuitive Colormap for NDVI mapping. Not suited for grayscale print or colorblind people though...
# The value "mode" is the maximum value of the color intensity, e.g. 1 or 255
mode = 1
0 0 0
0.006299213 0.006299213 0.006299213
0.012598425 0.012598425 0.012598425
0.018897638 0.018897638 0.018897638
0.02519685 0.02519685 0.02519685
0.031496063 0.031496063 0.031496063
0.037795275 0.037795275 0.037795275
0.044094488 0.044094488 0.044094488
0.050393701 0.050393701 0.050393701
0.056692913 0.056692913 0.056692913
0.062992126 0.062992126 0.062992126
0.069291338 0.069291338 0.069291338
0.075590551 0.075590551 0.075590551
0.081889763 0.081889763 0.081889763
0.088188976 0.088188976 0.088188976
0.094488189 0.094488189 0.094488189
0.100787401 0.100787401 0.100787401
0.107086614 0.107086614 0.107086614
0.113385826 0.113385826 0.113385826
0.119685039 0.119685039 0.119685039
0.125984251 0.125984251 0.125984251
0.132283464 0.132283464 0.132283464
0.138582677 0.138582677 0.138582677
0.144881889 0.144881889 0.144881889
0.151181102 0.151181102 0.151181102
0.157480314 0.157480314 0.157480314
0.163779527 0.163779527 0.163779527
0.17007874 0.17007874 0.17007874
0.176377952 0.176377952 0.176377952
0.182677165 0.182677165 0.182677165
0.188976377 0.188976377 0.188976377
0.19527559 0.19527559 0.19527559
0.201574802 0.201574802 0.201574802
0.207874015 0.207874015 0.207874015
0.214173228 0.214173228 0.214173228
0.22047244 0.22047244 0.22047244
0.226771653 0.226771653 0.226771653
0.233070865 0.233070865 0.233070865
0.239370078 0.239370078 0.239370078
0.24566929 0.24566929 0.24566929
0.251968503 0.251968503 0.251968503
0.25826773 0.25826773 0.25826773
0.264566928 0.264566928 0.264566928
0.270866156 0.270866156 0.270866156
0.277165353 0.277165353 0.277165353
0.283464581 0.283464581 0.283464581
0.289763778 0.289763778 0.289763778
0.296063006 0.296063006 0.296063006
0.302362204 0.302362204 0.302362204
0.308661431 0.308661431 0.308661431
0.314960629 0.314960629 0.314960629
0.321259856 0.321259856 0.321259856
0.327559054 0.327559054 0.327559054
0.333858281 0.333858281 0.333858281
0.340157479 0.340157479 0.340157479
0.346456707 0.346456707 0.346456707
0.352755904 0.352755904 0.352755904
0.359055132 0.359055132 0.359055132
0.365354329 0.365354329 0.365354329
0.371653557 0.371653557 0.371653557
0.377952754 0.377952754 0.377952754
0.384251982 0.384251982 0.384251982
0.39055118 0.39055118 0.39055118
0.396850407 0.396850407 0.396850407
0.403149605 0.403149605 0.403149605
0.409448832 0.409448832 0.409448832
0.41574803 0.41574803 0.41574803
0.422047257 0.422047257 0.422047257
0.428346455 0.428346455 0.428346455
0.434645683 0.434645683 0.434645683
0.44094488 0.44094488 0.44094488
0.447244108 0.447244108 0.447244108
0.453543305 0.453543305 0.453543305
0.459842533 0.459842533 0.459842533
0.466141731 0.466141731 0.466141731
0.472440958 0.472440958 0.472440958
0.478740156 0.478740156 0.478740156
0.485039383 0.485039383 0.485039383
0.491338581 0.491338581 0.491338581
0.497637808 0.497637808 0.497637808
0.503937006 0.503937006 0.503937006
0.510236204 0.510236204 0.510236204
0.516535461 0.516535461 0.516535461
0.522834659 0.522834659 0.522834659
0.529133856 0.529133856 0.529133856
0.535433054 0.535433054 0.535433054
0.541732311 0.541732311 0.541732311
0.548031509 0.548031509 0.548031509
0.554330707 0.554330707 0.554330707
0.560629904 0.560629904 0.560629904
0.566929162 0.566929162 0.566929162
0.573228359 0.573228359 0.573228359
0.579527557 0.579527557 0.579527557
0.585826755 0.585826755 0.585826755
0.592126012 0.592126012 0.592126012
0.59842521 0.59842521 0.59842521
0.604724407 0.604724407 0.604724407
0.611023605 0.611023605 0.611023605
0.617322862 0.617322862 0.617322862
0.62362206 0.62362206 0.62362206
0.629921257 0.629921257 0.629921257
0.636220455 0.636220455 0.636220455
0.642519712 0.642519712 0.642519712
0.64881891 0.64881891 0.64881891
0.655118108 0.655118108 0.655118108
0.661417305 0.661417305 0.661417305
0.667716563 0.667716563 0.667716563
0.67401576 0.67401576 0.67401576
0.680314958 0.680314958 0.680314958
0.686614156 0.686614156 0.686614156
0.692913413 0.692913413 0.692913413
0.699212611 0.699212611 0.699212611
0.705511808 0.705511808 0.705511808
0.711811006 0.711811006 0.711811006
0.718110263 0.718110263 0.718110263
0.724409461 0.724409461 0.724409461
0.730708659 0.730708659 0.730708659
0.737007856 0.737007856 0.737007856
0.743307114 0.743307114 0.743307114
0.749606311 0.749606311 0.749606311
0.755905509 0.755905509 0.755905509
0.762204707 0.762204707 0.762204707
0.768503964 0.768503964 0.768503964
0.774803162 0.774803162 0.774803162
0.781102359 0.781102359 0.781102359
0.787401557 0.787401557 0.787401557
0.787401557 0.787401557 0.787401557
0.768627465 0.368627459 0.168627456
0.768627465 0.368627459 0.168627456
0.768929541 0.369346976 0.16918768
0.769231617 0.370066464 0.169747904
0.769533694 0.370785981 0.170308128
0.76983577 0.371505469 0.170868352
0.770137846 0.372224987 0.171428576
0.770439982 0.372944474 0.1719888
0.770742059 0.373663992 0.172549024
0.771044135 0.374383479 0.173109248
0.771346211 0.375102997 0.173669472
0.771648288 0.375822484 0.174229696
0.771950364 0.376542002 0.17478992
0.77225244 0.37726149 0.175350145
0.772554517 0.377981007 0.175910369
0.772856593 0.378700495 0.176470593
0.773158669 0.379420012 0.177030817
0.773460805 0.3801395 0.177591041
0.773762882 0.380859017 0.178151265
0.774064958 0.381578505 0.178711489
0.774367034 0.382298023 0.179271713
0.774669111 0.38301751 0.179831937
0.774971187 0.383737028 0.180392161
0.780972838 0.400119334 0.194455713
0.786974549 0.416501611 0.20851928
0.792976201 0.432883918 0.222582832
0.798977852 0.449266195 0.236646384
0.804979563 0.465648502 0.250709951
0.810981214 0.482030779 0.264773518
0.816982865 0.498413086 0.278837055
0.822984517 0.514795363 0.292900622
0.828986228 0.53117764 0.306964189
0.834987879 0.547559977 0.321027726
0.84098953 0.563942254 0.335091293
0.846991241 0.580324531 0.34915486
0.852992892 0.596706867 0.363218397
0.858994544 0.613089144 0.377281964
0.864996254 0.629471421 0.391345531
0.870997906 0.645853698 0.405409068
0.876999557 0.662236035 0.419472635
0.883001268 0.678618312 0.433536202
0.889002919 0.695000589 0.447599739
0.89500457 0.711382926 0.461663306
0.901006281 0.727765203 0.475726873
0.907007933 0.74414748 0.48979041
0.913009584 0.760529757 0.503853977
0.919011235 0.776912093 0.517917514
0.925012946 0.79329437 0.531981111
0.931014597 0.809676647 0.546044648
0.937016249 0.826058924 0.560108185
0.94301796 0.842441261 0.574171782
0.949019611 0.858823538 0.588235319
0.913870752 0.858823538 0.566448808
0.878721833 0.858823538 0.544662356
0.843572974 0.858823538 0.522875845
0.808424115 0.858823538 0.501089334
0.773275256 0.858823538 0.479302853
0.738126338 0.858823538 0.457516372
0.702977479 0.858823538 0.435729861
0.667828619 0.858823538 0.41394338
0.63267976 0.858823538 0.392156869
0.597530842 0.858823538 0.370370388
0.562381983 0.858823538 0.348583907
0.527233124 0.858823538 0.326797396
0.492084235 0.858823538 0.305010915
0.456935376 0.858823538 0.283224404
0.421786487 0.858823538 0.261437923
0.386637628 0.858823538 0.239651427
0.351488739 0.858823538 0.217864931
0.31633988 0.858823538 0.196078435
0.281190991 0.858823538 0.174291953
0.246042117 0.858823538 0.152505457
0.210893244 0.858823538 0.130718961
0.17574437 0.858823538 0.108932465
0.140595496 0.858823538 0.087145977
0.105446622 0.858823538 0.065359481
0.070297748 0.858823538 0.043572988
0.035148874 0.858823538 0.021786494
0 0.858823538 0
0 0.846087515 0
0 0.833351493 0
0 0.82061547 0
0 0.807879448 0
0 0.795143425 0
0 0.782407403 0
0 0.769671381 0
0 0.756935358 0
0 0.744199336 0
0 0.731463313 0
0 0.718727291 0
0 0.705991268 0
0 0.693255246 0
0 0.680519283 0
0 0.66778326 0
0 0.655047238 0
0 0.642311215 0
0 0.629575193 0
0 0.61683917 0
0 0.604103148 0
0 0.591367126 0
0 0.578631103 0
0 0.565895081 0
0 0.553159058 0
0 0.540423036 0
0 0.527687013 0
0 0.514950991 0
0 0.514215708 0
0 0.513480425 0
0 0.512745082 0
0 0.512009799 0
0 0.511274517 0
0 0.510539234 0
0 0.509803951 0
0 0.509068608 0
0 0.508333325 0
0 0.507598042 0
0 0.50686276 0
0 0.506127477 0
0 0.505392134 0
0 0.504656851 0
0 0.503921568 0
0 0.503186285 0
0 0.502451003 0
0 0.50171566 0
0 0.500980377 0
0 0.500245094 0
0 0.499509811 0
0 0.498774499 0
0 0.498039216 0

View File

@@ -0,0 +1,263 @@
# Oct2 2015 14:40:26 GMT+0200
# ---------------------------------------------
# Colormap built by cfastie
# Source: http://publiclab.org/notes/cfastie/08-26-2014/new-ndvi-colormap
mode = 255
0 0 0
255 255 255
250 250 250
246 246 246
242 242 242
238 238 238
233 233 233
229 229 229
225 225 225
221 221 221
216 216 216
212 212 212
208 208 208
204 204 204
200 200 200
195 195 195
191 191 191
187 187 187
183 183 183
178 178 178
174 174 174
170 170 170
166 166 166
161 161 161
157 157 157
153 153 153
149 149 149
145 145 145
140 140 140
136 136 136
132 132 132
128 128 128
123 123 123
119 119 119
115 115 115
111 111 111
106 106 106
102 102 102
98 98 98
94 94 94
90 90 90
85 85 85
81 81 81
77 77 77
73 73 73
68 68 68
64 64 64
60 60 60
56 56 56
52 52 52
56 56 56
60 60 60
64 64 64
68 68 68
73 73 73
77 77 77
81 81 81
85 85 85
90 90 90
94 94 94
98 98 98
102 102 102
106 106 106
111 111 111
115 115 115
119 119 119
123 123 123
128 128 128
132 132 132
136 136 136
140 140 140
145 145 145
149 149 149
153 153 153
157 157 157
161 161 161
166 166 166
170 170 170
174 174 174
178 178 178
183 183 183
187 187 187
191 191 191
195 195 195
200 200 200
204 204 204
208 208 208
212 212 212
216 216 216
221 221 221
225 225 225
229 229 229
233 233 233
238 238 238
242 242 242
246 246 246
250 250 250
255 255 255
250 250 250
245 245 245
240 240 240
235 235 235
230 230 230
225 225 225
220 220 220
215 215 215
210 210 210
205 205 205
200 200 200
195 195 195
190 190 190
185 185 185
180 180 180
175 175 175
170 170 170
165 165 165
160 160 160
155 155 155
151 151 151
146 146 146
141 141 141
136 136 136
131 131 131
126 126 126
121 121 121
116 116 116
111 111 111
106 106 106
101 101 101
96 96 96
91 91 91
86 86 86
81 81 81
76 76 76
71 71 71
66 66 66
61 61 61
56 56 56
66 66 80
77 77 105
87 87 130
98 98 155
108 108 180
119 119 205
129 129 230
140 140 255
131 147 239
122 154 223
113 161 207
105 168 191
96 175 175
87 183 159
78 190 143
70 197 127
61 204 111
52 211 95
43 219 79
35 226 63
26 233 47
17 240 31
8 247 15
1 255 1
7 255 1
15 255 1
23 255 1
31 255 1
39 255 1
47 255 1
55 255 1
63 255 1
71 255 1
79 255 1
87 255 1
95 255 1
103 255 1
111 255 1
119 255 1
127 255 1
135 255 1
143 255 1
151 255 1
159 255 1
167 255 1
175 255 1
183 255 1
191 255 1
199 255 1
207 255 1
215 255 1
223 255 1
231 255 1
239 255 1
247 255 1
255 255 1
255 249 1
255 244 1
255 239 1
255 233 1
255 228 1
255 223 1
255 217 1
255 212 1
255 207 1
255 201 1
255 196 1
255 191 1
255 185 1
255 180 1
255 175 1
255 170 1
255 164 1
255 159 1
255 154 1
255 148 1
255 143 1
255 138 1
255 132 1
255 127 1
255 122 1
255 116 1
255 111 1
255 106 1
255 100 1
255 95 1
255 90 1
255 85 1
255 79 1
255 74 1
255 69 1
255 63 1
255 58 1
255 53 1
255 47 1
255 42 1
255 37 1
255 31 1
255 26 1
255 21 1
255 15 1
255 10 1
255 5 1
255 1 1
255 1 15
255 1 31
255 1 47
255 1 63
255 1 79
255 1 95
255 1 111
255 1 127
255 1 143
255 1 159
255 1 175
255 1 191
255 1 207
255 1 223
255 1 239

View File

@@ -1,154 +0,0 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco, KAPPS-
#
# License: CC0 1.0 Universal
#
# This is intended to populate an Elastic Search instance.
# For this file to work, you must make sure that you have a running instnace
# of Elastic Search and it is setup in the settings.py
from __future__ import print_function
import sys
import json
from urllib2 import urlopen, HTTPError, URLError
from elasticsearch import Elasticsearch
from elasticsearch.exceptions import ConnectionError
import settings
class Metadata(object):
def __init__(self):
self.l8_metadata_filename = settings.L8_METADATA_FILENAME
self.l8_metadata_url = settings.L8_METADATA_URL
self.assests_dir = settings.ASSESTS_DIR
self.es_url = settings.ES_URL
self.es_main_index = settings.ES_MAIN_INDEX
self.es_main_type = settings.ES_MAIN_TYPE
def populate(self):
if self.download():
es = Elasticsearch(self.es_url)
f = open('%s/%s' % (self.assests_dir, self.l8_metadata_filename),
'r')
# Read the first line for all the headers
headers = f.readline().split(',')
# Read the rest of the document
rows = f.readlines()
added_counter = 0
skipped_counter = 0
for row in rows:
fields = row.split(',')
obj = {}
for header in headers:
try:
obj[header.replace('\n', '')] = float(fields[
headers.index(header)].replace('\n', ''))
except ValueError:
obj[header.replace('\n', '')] = fields[
headers.index(header)].replace('\n', '')
try:
if not es.exists(
index=self.es_main_index,
doc_type=self.es_main_type,
id=obj['sceneID']):
es.create(
index=self.es_main_index,
doc_type=self.es_main_type,
id=obj['sceneID'],
body=json.dumps(obj),
ignore=409)
# print('%s-%s created' % (counter, obj['sceneID']))
added_counter += 1
print('%s new records added' % added_counter,
end='\r')
else:
skipped_counter += 1
# New meta data is added to the top of the document.
# When the script starts to see existing records, it means
# that all new records are added and it's safe to break
# the loop.
if skipped_counter > 10:
break
return True
except ConnectionError:
print('There was a connection error. Check your Elastic' +
' Search setting and make sure Elastic Search is' +
'running.')
return False
except:
print('An expected error: %s' % (sys.exc_info()[0]))
return False
print('The update is completed. %s new records were added.' %
added_counter)
def download(self):
# Open the url
try:
f = urlopen(self.l8_metadata_url)
if self.file_is_csv(f):
print("downloading " + self.l8_metadata_url)
CHUNK = 800 * 1024
counter = 0
total_size = self.get_url_file_size(f)
# Open our local file for writing
with open('%s/%s' % (self.assests_dir,
self.l8_metadata_filename),
"wb") as meta_file:
while True:
chunk = f.read(CHUNK)
if not chunk:
break
meta_file.write(chunk)
counter += 1
chunk_sum = float(counter * CHUNK)
perct = chunk_sum / total_size
print('==> download progress: {:.2%}'.format(perct),
end='\r')
sys.stdout.flush()
print('==> Download completed')
return True
else:
print('The URL provided doesn\'t include a CSV file')
return False
# handle errors
except HTTPError, e:
print("HTTP Error:", e.code, self.l8_metadata_url)
except URLError, e:
print("URL Error:", e.reason, self.l8_metadata_url)
return False
def get_url_file_size(self, remote_file):
"""gets filesize of remote file"""
size = remote_file.headers.get('content-length')
return float(size)
def file_is_csv(self, remote_file):
"""Checks whether the file is CSV"""
if 'csv' in remote_file.headers.get('content-type'):
return True
else:
return False

115
landsat/mixins.py Normal file
View File

@@ -0,0 +1,115 @@
# Pansharpened Image Process using Rasterio
# Landsat Util
# License: CC0 1.0 Universal
from __future__ import print_function, division, absolute_import
import sys
import subprocess
from termcolor import colored
class VerbosityMixin(object):
"""
Verbosity Mixin that generates beautiful stdout outputs.
"""
verbose = False
def output(self, value, normal=False, color=None, error=False,
arrow=False, indent=None):
""" Handles verbosity of this calls.
if priority is set to 1, the value is printed
if class instance verbose is True, the value is printed
:param value:
a string representing the message to be printed
:type value:
String
:param normal:
if set to true the message is always printed, otherwise it is only shown if verbosity is set
:type normal:
boolean
:param color:
The color of the message, choices: 'red', 'green', 'blue'
:type normal:
String
:param error:
if set to true the message appears in red
:type error:
Boolean
:param arrow:
if set to true an arrow appears before the message
:type arrow:
Boolean
:param indent:
indents the message based on the number provided
:type indent:
Boolean
:returns:
void
"""
if error and value and (normal or self.verbose):
return self._print(value, color='red', indent=indent)
if self.verbose or normal:
return self._print(value, color, arrow, indent)
return
def subprocess(self, argv):
"""
Execute subprocess commands with proper ouput.
This is no longer used in landsat-util
:param argv:
A list of subprocess arguments
:type argv:
List
:returns:
void
"""
if self.verbose:
proc = subprocess.Popen(argv, stderr=subprocess.PIPE)
else:
proc = subprocess.Popen(argv, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.output(proc.stderr.read(), error=True)
return
def exit(self, message):
""" outputs an exit message and exits
:param message:
The message to be outputed
:type message:
String
:returns:
void
"""
self.output(message, normal=True, color="green")
sys.exit()
def _print(self, msg, color=None, arrow=False, indent=None):
""" Print the msg with the color provided. """
if color:
msg = colored(msg, color)
if arrow:
msg = colored('===> ', 'blue') + msg
if indent:
msg = (' ' * indent) + msg
print(msg)
return msg

129
landsat/ndvi.py Normal file
View File

@@ -0,0 +1,129 @@
from __future__ import print_function, division, absolute_import
from os.path import join
import rasterio
import numpy
from . import settings
from .decorators import rasterio_decorator
from .image import BaseProcess
class NDVI(BaseProcess):
def __init__(self, path, bands=None, **kwargs):
bands = [4, 5]
self._read_cmap()
super(NDVI, self).__init__(path, bands, **kwargs)
def _read_cmap(self):
"""
reads the colormap from a text file given in settings.py.
See colormap_cubehelix.txt. File must contain 256 RGB values
"""
try:
i = 0
colormap = {0: (0, 0, 0)}
with open(settings.COLORMAP) as cmap:
lines = cmap.readlines()
for line in lines:
if i == 0 and 'mode = ' in line:
i = 1
maxval = float(line.replace('mode = ', ''))
elif i > 0:
str = line.split()
if str == []: # when there are empty lines at the end of the file
break
colormap.update(
{
i: (int(round(float(str[0]) * 255 / maxval)),
int(round(float(str[1]) * 255 / maxval)),
int(round(float(str[2]) * 255 / maxval)))
}
)
i += 1
except IOError:
pass
self.cmap = {k: v[:4] for k, v in colormap.items()}
@rasterio_decorator
def run(self):
"""
Executes NDVI processing
"""
self.output("* NDVI processing started.", normal=True)
bands = self._read_bands()
image_data = self._get_image_data()
new_bands = []
for i in range(0, 2):
new_bands.append(numpy.empty(image_data['shape'], dtype=numpy.float32))
self._warp(image_data, bands, new_bands)
# Bands are no longer needed
del bands
calc_band = numpy.true_divide((new_bands[1] - new_bands[0]), (new_bands[1] + new_bands[0]))
output_band = numpy.rint((calc_band + 1) * 255 / 2).astype(numpy.uint8)
output_file = join(self.dst_path, self._filename(suffix='NDVI'))
return self.write_band(output_band, output_file, image_data)
def write_band(self, output_band, output_file, image_data):
# from http://publiclab.org/notes/cfastie/08-26-2014/new-ndvi-colormap
with rasterio.open(output_file, 'w', driver='GTiff',
width=image_data['shape'][1],
height=image_data['shape'][0],
count=1,
dtype=numpy.uint8,
nodata=0,
transform=image_data['dst_transform'],
crs=self.dst_crs) as output:
output.write_band(1, output_band)
self.output("Writing to file", normal=True, color='green', indent=1)
return output_file
class NDVIWithManualColorMap(NDVI):
def manual_colormap(self, n, i):
return self.cmap[n][i]
def write_band(self, output_band, output_file, image_data):
# colormaps will overwrite our transparency masks so we will manually
# create three RGB bands
self.output("Applying ColorMap", normal=True, arrow=True)
self.cmap[0] = (0, 0, 0, 255)
v_manual_colormap = numpy.vectorize(self.manual_colormap, otypes=[numpy.uint8])
rgb_bands = []
for i in range(3):
rgb_bands.append(v_manual_colormap(output_band, i))
with rasterio.drivers(GDAL_TIFF_INTERNAL_MASK=True):
with rasterio.open(output_file, 'w', driver='GTiff',
width=image_data['shape'][1],
height=image_data['shape'][0],
count=3,
dtype=numpy.uint8,
nodata=0,
photometric='RGB',
transform=image_data['dst_transform'],
crs=self.dst_crs) as output:
for i in range(3):
output.write_band(i + 1, rgb_bands[i])
self.output("Writing to file", normal=True, color='green', indent=1)
return output_file

File diff suppressed because it is too large Load Diff

View File

@@ -1,532 +0,0 @@
#!/usr/bin/env python
#/******************************************************************************
# * $Id$
# *
# * Project: OpenGIS Simple Features Reference Implementation
# * Purpose: Python port of a simple client for viewing OGR driver data.
# * Author: Even Rouault, <even dot rouault at mines dash paris dot org>
# *
# * Port from ogrinfo.cpp whose author is Frank Warmerdam
# *
# ******************************************************************************
# * Copyright (c) 2010-2013, Even Rouault <even dot rouault at mines-paris dot org>
# * Copyright (c) 1999, Frank Warmerdam
# *
# * Permission is hereby granted, free of charge, to any person obtaining a
# * copy of this software and associated documentation files (the "Software"),
# * to deal in the Software without restriction, including without limitation
# * the rights to use, copy, modify, merge, publish, distribute, sublicense,
# * and/or sell copies of the Software, and to permit persons to whom the
# * Software is furnished to do so, subject to the following conditions:
# *
# * The above copyright notice and this permission notice shall be included
# * in all copies or substantial portions of the Software.
# *
# * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# * DEALINGS IN THE SOFTWARE.
# ****************************************************************************/
# Note : this is the most direct port of ogrinfo.cpp possible
# It could be made much more Python'ish !
import sys
try:
from osgeo import gdal
from osgeo import ogr
except:
import gdal
import ogr
bReadOnly = False
bVerbose = True
bSummaryOnly = False
nFetchFID = ogr.NullFID
papszOptions = None
def EQUAL(a, b):
return a.lower() == b.lower()
#/************************************************************************/
#/* main() */
#/************************************************************************/
def main(argv = None):
global bReadOnly
global bVerbose
global bSummaryOnly
global nFetchFID
global papszOptions
pszWHERE = None
pszDataSource = None
papszLayers = None
poSpatialFilter = None
nRepeatCount = 1
bAllLayers = False
pszSQLStatement = None
pszDialect = None
options = {}
pszGeomField = None
if argv is None:
argv = sys.argv
argv = ogr.GeneralCmdLineProcessor( argv )
#/* -------------------------------------------------------------------- */
#/* Processing command line arguments. */
#/* -------------------------------------------------------------------- */
if argv is None:
return 1
nArgc = len(argv)
iArg = 1
while iArg < nArgc:
if EQUAL(argv[iArg],"--utility_version"):
print("%s is running against GDAL %s" %
(argv[0], gdal.VersionInfo("RELEASE_NAME")))
return 0
elif EQUAL(argv[iArg],"-ro"):
bReadOnly = True
elif EQUAL(argv[iArg],"-q") or EQUAL(argv[iArg],"-quiet"):
bVerbose = False
elif EQUAL(argv[iArg],"-fid") and iArg < nArgc-1:
iArg = iArg + 1
nFetchFID = int(argv[iArg])
elif EQUAL(argv[iArg],"-spat") and iArg + 4 < nArgc:
oRing = ogr.Geometry(ogr.wkbLinearRing)
oRing.AddPoint( float(argv[iArg+1]), float(argv[iArg+2]) )
oRing.AddPoint( float(argv[iArg+1]), float(argv[iArg+4]) )
oRing.AddPoint( float(argv[iArg+3]), float(argv[iArg+4]) )
oRing.AddPoint( float(argv[iArg+3]), float(argv[iArg+2]) )
oRing.AddPoint( float(argv[iArg+1]), float(argv[iArg+2]) )
poSpatialFilter = ogr.Geometry(ogr.wkbPolygon)
poSpatialFilter.AddGeometry(oRing)
iArg = iArg + 4
elif EQUAL(argv[iArg],"-geomfield") and iArg < nArgc-1:
iArg = iArg + 1
pszGeomField = argv[iArg]
elif EQUAL(argv[iArg],"-where") and iArg < nArgc-1:
iArg = iArg + 1
pszWHERE = argv[iArg]
elif EQUAL(argv[iArg],"-sql") and iArg < nArgc-1:
iArg = iArg + 1
pszSQLStatement = argv[iArg]
elif EQUAL(argv[iArg],"-dialect") and iArg < nArgc-1:
iArg = iArg + 1
pszDialect = argv[iArg]
elif EQUAL(argv[iArg],"-rc") and iArg < nArgc-1:
iArg = iArg + 1
nRepeatCount = int(argv[iArg])
elif EQUAL(argv[iArg],"-al"):
bAllLayers = True
elif EQUAL(argv[iArg],"-so") or EQUAL(argv[iArg],"-summary"):
bSummaryOnly = True
elif len(argv[iArg]) > 8 and EQUAL(argv[iArg][0:8],"-fields="):
options['DISPLAY_FIELDS'] = argv[iArg][7:len(argv[iArg])]
elif len(argv[iArg]) > 6 and EQUAL(argv[iArg][0:6],"-geom="):
options['DISPLAY_GEOMETRY'] = argv[iArg][6:len(argv[iArg])]
elif argv[iArg][0] == '-':
return Usage()
elif pszDataSource is None:
pszDataSource = argv[iArg]
else:
if papszLayers is None:
papszLayers = []
papszLayers.append( argv[iArg] )
bAllLayers = False
iArg = iArg + 1
if pszDataSource is None:
return Usage()
#/* -------------------------------------------------------------------- */
#/* Open data source. */
#/* -------------------------------------------------------------------- */
poDS = None
poDriver = None
poDS = ogr.Open( pszDataSource, not bReadOnly )
if poDS is None and not bReadOnly:
poDS = ogr.Open( pszDataSource, False )
if poDS is not None and bVerbose:
print( "Had to open data source read-only." )
bReadOnly = True
#/* -------------------------------------------------------------------- */
#/* Report failure */
#/* -------------------------------------------------------------------- */
if poDS is None:
print( "FAILURE:\n"
"Unable to open datasource `%s' with the following drivers." % pszDataSource )
for iDriver in range(ogr.GetDriverCount()):
print( " -> %s" % ogr.GetDriver(iDriver).GetName() )
return 1
poDriver = poDS.GetDriver()
#/* -------------------------------------------------------------------- */
#/* Some information messages. */
#/* -------------------------------------------------------------------- */
if bVerbose:
print( "INFO: Open of `%s'\n"
" using driver `%s' successful." % (pszDataSource, poDriver.GetName()) )
poDS_Name = poDS.GetName()
if str(type(pszDataSource)) == "<type 'unicode'>" and str(type(poDS_Name)) == "<type 'str'>":
poDS_Name = unicode(poDS_Name, "utf8")
if bVerbose and pszDataSource != poDS_Name:
print( "INFO: Internal data source name `%s'\n"
" different from user name `%s'." % (poDS_Name, pszDataSource ))
#/* -------------------------------------------------------------------- */
#/* Special case for -sql clause. No source layers required. */
#/* -------------------------------------------------------------------- */
if pszSQLStatement is not None:
poResultSet = None
nRepeatCount = 0 #// skip layer reporting.
if papszLayers is not None:
print( "layer names ignored in combination with -sql." )
if pszGeomField is None:
poResultSet = poDS.ExecuteSQL( pszSQLStatement, poSpatialFilter,
pszDialect )
else:
poResultSet = poDS.ExecuteSQL( pszSQLStatement, None, pszDialect )
if poResultSet is not None:
if pszWHERE is not None:
if poResultSet.SetAttributeFilter( pszWHERE ) != 0:
print("FAILURE: SetAttributeFilter(%s) failed." % pszWHERE)
return 1
if pszGeomField is not None:
ReportOnLayer( poResultSet, None, pszGeomField, poSpatialFilter, options )
else:
ReportOnLayer( poResultSet, None, None, None, options )
poDS.ReleaseResultSet( poResultSet )
#gdal.Debug( "OGR", "GetLayerCount() = %d\n", poDS.GetLayerCount() )
for iRepeat in range(nRepeatCount):
if papszLayers is None:
#/* -------------------------------------------------------------------- */
#/* Process each data source layer. */
#/* -------------------------------------------------------------------- */
for iLayer in range(poDS.GetLayerCount()):
poLayer = poDS.GetLayer(iLayer)
if poLayer is None:
print( "FAILURE: Couldn't fetch advertised layer %d!" % iLayer )
return 1
if not bAllLayers:
line = "%d: %s" % (iLayer+1, poLayer.GetLayerDefn().GetName())
nGeomFieldCount = poLayer.GetLayerDefn().GetGeomFieldCount()
if nGeomFieldCount > 1:
line = line + " ("
for iGeom in range(nGeomFieldCount):
if iGeom > 0:
line = line + ", "
poGFldDefn = poLayer.GetLayerDefn().GetGeomFieldDefn(iGeom)
line = line + "%s" % ogr.GeometryTypeToName( poGFldDefn.GetType() )
line = line + ")"
if poLayer.GetLayerDefn().GetGeomType() != ogr.wkbUnknown:
line = line + " (%s)" % ogr.GeometryTypeToName( poLayer.GetLayerDefn().GetGeomType() )
print(line)
else:
if iRepeat != 0:
poLayer.ResetReading()
ReportOnLayer( poLayer, pszWHERE, pszGeomField, poSpatialFilter, options )
else:
#/* -------------------------------------------------------------------- */
#/* Process specified data source layers. */
#/* -------------------------------------------------------------------- */
for papszIter in papszLayers:
poLayer = poDS.GetLayerByName(papszIter)
if poLayer is None:
print( "FAILURE: Couldn't fetch requested layer %s!" % papszIter )
return 1
if iRepeat != 0:
poLayer.ResetReading()
ReportOnLayer( poLayer, pszWHERE, pszGeomField, poSpatialFilter, options )
#/* -------------------------------------------------------------------- */
#/* Close down. */
#/* -------------------------------------------------------------------- */
poDS.Destroy()
return 0
#/************************************************************************/
#/* Usage() */
#/************************************************************************/
def Usage():
print( "Usage: ogrinfo [--help-general] [-ro] [-q] [-where restricted_where]\n"
" [-spat xmin ymin xmax ymax] [-geomfield field] [-fid fid]\n"
" [-sql statement] [-al] [-so] [-fields={YES/NO}]\n"
" [-geom={YES/NO/SUMMARY}][--formats]\n"
" datasource_name [layer [layer ...]]")
return 1
#/************************************************************************/
#/* ReportOnLayer() */
#/************************************************************************/
def ReportOnLayer( poLayer, pszWHERE, pszGeomField, poSpatialFilter, options ):
poDefn = poLayer.GetLayerDefn()
#/* -------------------------------------------------------------------- */
#/* Set filters if provided. */
#/* -------------------------------------------------------------------- */
if pszWHERE is not None:
if poLayer.SetAttributeFilter( pszWHERE ) != 0:
print("FAILURE: SetAttributeFilter(%s) failed." % pszWHERE)
return
if poSpatialFilter is not None:
if pszGeomField is not None:
iGeomField = poLayer.GetLayerDefn().GetGeomFieldIndex(pszGeomField)
if iGeomField >= 0:
poLayer.SetSpatialFilter( iGeomField, poSpatialFilter )
else:
print("WARNING: Cannot find geometry field %s." % pszGeomField)
else:
poLayer.SetSpatialFilter( poSpatialFilter )
#/* -------------------------------------------------------------------- */
#/* Report various overall information. */
#/* -------------------------------------------------------------------- */
print( "" )
print( "Layer name: %s" % poDefn.GetName() )
if bVerbose:
nGeomFieldCount = poLayer.GetLayerDefn().GetGeomFieldCount()
if nGeomFieldCount > 1:
for iGeom in range(nGeomFieldCount):
poGFldDefn = poLayer.GetLayerDefn().GetGeomFieldDefn(iGeom)
print( "Geometry (%s): %s" % (poGFldDefn.GetNameRef(), ogr.GeometryTypeToName( poGFldDefn.GetType() ) ))
else:
print( "Geometry: %s" % ogr.GeometryTypeToName( poDefn.GetGeomType() ) )
print( "Feature Count: %d" % poLayer.GetFeatureCount() )
if nGeomFieldCount > 1:
for iGeom in range(nGeomFieldCount):
poGFldDefn = poLayer.GetLayerDefn().GetGeomFieldDefn(iGeom)
oExt = poLayer.GetExtent(True, geom_field = iGeom, can_return_null = True)
if oExt is not None:
print("Extent (%s): (%f, %f) - (%f, %f)" % (poGFldDefn.GetNameRef(), oExt[0], oExt[2], oExt[1], oExt[3]))
else:
oExt = poLayer.GetExtent(True, can_return_null = True)
if oExt is not None:
print("Extent: (%f, %f) - (%f, %f)" % (oExt[0], oExt[2], oExt[1], oExt[3]))
if nGeomFieldCount > 1:
for iGeom in range(nGeomFieldCount):
poGFldDefn = poLayer.GetLayerDefn().GetGeomFieldDefn(iGeom)
if poGFldDefn.GetSpatialRef() is None:
pszWKT = "(unknown)"
else:
pszWKT = poGFldDefn.GetSpatialRef().ExportToPrettyWkt()
print( "SRS WKT (%s):\n%s" % (poGFldDefn.GetNameRef(), pszWKT) )
else:
if poLayer.GetSpatialRef() is None:
pszWKT = "(unknown)"
else:
pszWKT = poLayer.GetSpatialRef().ExportToPrettyWkt()
print( "Layer SRS WKT:\n%s" % pszWKT )
if len(poLayer.GetFIDColumn()) > 0:
print( "FID Column = %s" % poLayer.GetFIDColumn() )
if nGeomFieldCount > 1:
for iGeom in range(nGeomFieldCount):
poGFldDefn = poLayer.GetLayerDefn().GetGeomFieldDefn(iGeom)
print( "Geometry Column %d = %s" % (iGeom + 1, poGFldDefn.GetNameRef() ))
else:
if len(poLayer.GetGeometryColumn()) > 0:
print( "Geometry Column = %s" % poLayer.GetGeometryColumn() )
for iAttr in range(poDefn.GetFieldCount()):
poField = poDefn.GetFieldDefn( iAttr )
print( "%s: %s (%d.%d)" % ( \
poField.GetNameRef(), \
poField.GetFieldTypeName( poField.GetType() ), \
poField.GetWidth(), \
poField.GetPrecision() ))
#/* -------------------------------------------------------------------- */
#/* Read, and dump features. */
#/* -------------------------------------------------------------------- */
poFeature = None
if nFetchFID == ogr.NullFID and not bSummaryOnly:
poFeature = poLayer.GetNextFeature()
while poFeature is not None:
DumpReadableFeature(poFeature, options)
poFeature = poLayer.GetNextFeature()
elif nFetchFID != ogr.NullFID:
poFeature = poLayer.GetFeature( nFetchFID )
if poFeature is None:
print( "Unable to locate feature id %d on this layer." % nFetchFID )
else:
DumpReadableFeature(poFeature, options)
return
def DumpReadableFeature( poFeature, options = None ):
poDefn = poFeature.GetDefnRef()
print("OGRFeature(%s):%ld" % (poDefn.GetName(), poFeature.GetFID() ))
if 'DISPLAY_FIELDS' not in options or EQUAL(options['DISPLAY_FIELDS'], 'yes'):
for iField in range(poDefn.GetFieldCount()):
poFDefn = poDefn.GetFieldDefn(iField)
line = " %s (%s) = " % ( \
poFDefn.GetNameRef(), \
ogr.GetFieldTypeName(poFDefn.GetType()) )
if poFeature.IsFieldSet( iField ):
line = line + "%s" % (poFeature.GetFieldAsString( iField ) )
else:
line = line + "(null)"
print(line)
if poFeature.GetStyleString() is not None:
if 'DISPLAY_STYLE' not in options or EQUAL(options['DISPLAY_STYLE'], 'yes'):
print(" Style = %s" % GetStyleString() )
nGeomFieldCount = poFeature.GetGeomFieldCount()
if nGeomFieldCount > 0:
if 'DISPLAY_GEOMETRY' not in options or not EQUAL(options['DISPLAY_GEOMETRY'], 'no'):
for iField in range(nGeomFieldCount):
poGFldDefn = poFeature.GetDefnRef().GetGeomFieldDefn(iField)
poGeometry = poFeature.GetGeomFieldRef(iField)
if poGeometry is not None:
sys.stdout.write(" ")
if len(poGFldDefn.GetNameRef()) > 0 and nGeomFieldCount > 1:
sys.stdout.write("%s = " % poGFldDefn.GetNameRef() )
DumpReadableGeometry( poGeometry, "", options)
print('')
return
def DumpReadableGeometry( poGeometry, pszPrefix, options ):
if pszPrefix == None:
pszPrefix = ""
if 'DISPLAY_GEOMETRY' in options and EQUAL(options['DISPLAY_GEOMETRY'], 'SUMMARY'):
line = ("%s%s : " % (pszPrefix, poGeometry.GetGeometryName() ))
eType = poGeometry.GetGeometryType()
if eType == ogr.wkbLineString or eType == ogr.wkbLineString25D:
line = line + ("%d points" % poGeometry.GetPointCount())
print(line)
elif eType == ogr.wkbPolygon or eType == ogr.wkbPolygon25D:
nRings = poGeometry.GetGeometryCount()
if nRings == 0:
line = line + "empty"
else:
poRing = poGeometry.GetGeometryRef(0)
line = line + ("%d points" % poRing.GetPointCount())
if nRings > 1:
line = line + (", %d inner rings (" % (nRings - 1))
for ir in range(0,nRings-1):
if ir > 0:
line = line + ", "
poRing = poGeometry.GetGeometryRef(ir+1)
line = line + ("%d points" % poRing.GetPointCount())
line = line + ")"
print(line)
elif eType == ogr.wkbMultiPoint or \
eType == ogr.wkbMultiPoint25D or \
eType == ogr.wkbMultiLineString or \
eType == ogr.wkbMultiLineString25D or \
eType == ogr.wkbMultiPolygon or \
eType == ogr.wkbMultiPolygon25D or \
eType == ogr.wkbGeometryCollection or \
eType == ogr.wkbGeometryCollection25D:
line = line + "%d geometries:" % poGeometry.GetGeometryCount()
print(line)
for ig in range(poGeometry.GetGeometryCount()):
subgeom = poGeometry.GetGeometryRef(ig)
from sys import version_info
if version_info >= (3,0,0):
exec('print("", end=" ")')
else:
exec('print "", ')
DumpReadableGeometry( subgeom, pszPrefix, options)
else:
print(line)
elif 'DISPLAY_GEOMETRY' not in options or EQUAL(options['DISPLAY_GEOMETRY'], 'yes') \
or EQUAL(options['DISPLAY_GEOMETRY'], 'WKT'):
print("%s%s" % (pszPrefix, poGeometry.ExportToWkt() ))
return
if __name__ == '__main__':
version_num = int(gdal.VersionInfo('VERSION_NUM'))
if version_num < 1800: # because of ogr.GetFieldTypeName
print('ERROR: Python bindings of GDAL 1.8.0 or later required')
sys.exit(1)
sys.exit(main( sys.argv ))

324
landsat/search.py Normal file
View File

@@ -0,0 +1,324 @@
# Landsat Util
# License: CC0 1.0 Universal
from __future__ import print_function, division, absolute_import
import json
import time
import requests
from . import settings
from .utils import three_digit, create_paired_list, geocode
class Search(object):
""" The search class """
def __init__(self):
self.api_url = settings.API_URL
def search(self, paths_rows=None, lat=None, lon=None, address=None, start_date=None, end_date=None, cloud_min=None,
cloud_max=None, limit=1, geojson=False):
"""
The main method of Search class. It searches Development Seed's Landsat API.
:param paths_rows:
A string in this format: "003,003,004,004". Must be in pairs and separated by comma.
:type paths_rows:
String
:param lat:
The latitude
:type lat:
String, float, integer
:param lon:
The The longitude
:type lon:
String, float, integer
:param address:
The address
:type address:
String
:param start_date:
Date string. format: YYYY-MM-DD
:type start_date:
String
:param end_date:
date string. format: YYYY-MM-DD
:type end_date:
String
:param cloud_min:
float specifying the minimum percentage. e.g. 4.3
:type cloud_min:
float
:param cloud_max:
float specifying the maximum percentage. e.g. 78.9
:type cloud_max:
float
:param limit:
integer specigying the maximum results return.
:type limit:
integer
:param geojson:
boolean specifying whether to return a geojson object
:type geojson:
boolean
:returns:
dict
:example:
>>> search = Search()
>>> search('003,003', '2014-01-01', '2014-06-01')
>>> {
'status': u'SUCCESS',
'total_returned': 1,
'total': 1,
'limit': 1
'results': [
{
'sat_type': u'L8',
'sceneID': u'LC80030032014142LGN00',
'date': u'2014-05-22',
'path': u'003',
'thumbnail': u'http://....../landsat_8/2014/003/003/LC80030032014142LGN00.jpg',
'cloud': 33.36,
'row': u'003
}
]
}
"""
search_string = self.query_builder(paths_rows, lat, lon, address, start_date, end_date, cloud_min, cloud_max)
# Have to manually build the URI to bypass requests URI encoding
# The api server doesn't accept encoded URIs
r = requests.get('%s?search=%s&limit=%s' % (self.api_url, search_string, limit))
r_dict = json.loads(r.text)
result = {}
if 'error' in r_dict:
result['status'] = u'error'
result['code'] = r_dict['error']['code']
result['message'] = r_dict['error']['message']
elif 'meta' in r_dict:
if geojson:
result = {
'type': 'FeatureCollection',
'features': []
}
for r in r_dict['results']:
feature = {
'type': 'Feature',
'properties': {
'sceneID': r['sceneID'],
'row': three_digit(r['row']),
'path': three_digit(r['path']),
'thumbnail': r['browseURL'],
'date': r['acquisitionDate'],
'cloud': r['cloudCoverFull']
},
'geometry': {
'type': 'Polygon',
'coordinates': [
[
[r['upperLeftCornerLongitude'], r['upperLeftCornerLatitude']],
[r['lowerLeftCornerLongitude'], r['lowerLeftCornerLatitude']],
[r['lowerRightCornerLongitude'], r['lowerRightCornerLatitude']],
[r['upperRightCornerLongitude'], r['upperRightCornerLatitude']],
[r['upperLeftCornerLongitude'], r['upperLeftCornerLatitude']]
]
]
}
}
result['features'].append(feature)
else:
result['status'] = u'SUCCESS'
result['total'] = r_dict['meta']['results']['total']
result['limit'] = r_dict['meta']['results']['limit']
result['total_returned'] = len(r_dict['results'])
result['results'] = [{'sceneID': i['sceneID'],
'sat_type': u'L8',
'path': three_digit(i['path']),
'row': three_digit(i['row']),
'thumbnail': i['browseURL'],
'date': i['acquisitionDate'],
'cloud': i['cloudCoverFull']}
for i in r_dict['results']]
return result
def query_builder(self, paths_rows=None, lat=None, lon=None, address=None, start_date=None, end_date=None,
cloud_min=None, cloud_max=None):
""" Builds the proper search syntax (query) for Landsat API.
:param paths_rows:
A string in this format: "003,003,004,004". Must be in pairs and separated by comma.
:type paths_rows:
String
:param lat:
The latitude
:type lat:
String, float, integer
:param lon:
The The longitude
:type lon:
String, float, integer
:param address:
The address
:type address:
String
:param start_date:
Date string. format: YYYY-MM-DD
:type start_date:
String
:param end_date:
date string. format: YYYY-MM-DD
:type end_date:
String
:param cloud_min:
float specifying the minimum percentage. e.g. 4.3
:type cloud_min:
float
:param cloud_max:
float specifying the maximum percentage. e.g. 78.9
:type cloud_max:
float
:returns:
String
"""
query = []
or_string = ''
and_string = ''
search_string = ''
if paths_rows:
# Coverting rows and paths to paired list
new_array = create_paired_list(paths_rows)
paths_rows = ['(%s)' % self.row_path_builder(i[0], i[1]) for i in new_array]
or_string = '+OR+'.join(map(str, paths_rows))
if start_date and end_date:
query.append(self.date_range_builder(start_date, end_date))
elif start_date:
query.append(self.date_range_builder(start_date, '2100-01-01'))
elif end_date:
query.append(self.date_range_builder('2009-01-01', end_date))
if cloud_min and cloud_max:
query.append(self.cloud_cover_prct_range_builder(cloud_min, cloud_max))
elif cloud_min:
query.append(self.cloud_cover_prct_range_builder(cloud_min, '100'))
elif cloud_max:
query.append(self.cloud_cover_prct_range_builder('-1', cloud_max))
if address:
query.append(self.address_builder(address))
elif (lat is not None) and (lon is not None):
query.append(self.lat_lon_builder(lat, lon))
if query:
and_string = '+AND+'.join(map(str, query))
if and_string and or_string:
search_string = and_string + '+AND+(' + or_string + ')'
else:
search_string = or_string + and_string
return search_string
def row_path_builder(self, path='', row=''):
"""
Builds row and path query.
:param path:
Landsat path. Must be three digits
:type path:
String
:param row:
Landsat row. Must be three digits
:type row:
String
:returns:
String
"""
return 'path:%s+AND+row:%s' % (path, row)
def date_range_builder(self, start='2013-02-11', end=None):
"""
Builds date range query.
:param start:
Date string. format: YYYY-MM-DD
:type start:
String
:param end:
date string. format: YYYY-MM-DD
:type end:
String
:returns:
String
"""
if not end:
end = time.strftime('%Y-%m-%d')
return 'acquisitionDate:[%s+TO+%s]' % (start, end)
def cloud_cover_prct_range_builder(self, min=0, max=100):
"""
Builds cloud cover percentage range query.
:param min:
float specifying the minimum percentage. Default is 0
:type min:
float
:param max:
float specifying the maximum percentage. Default is 100
:type max:
float
:returns:
String
"""
return 'cloudCoverFull:[%s+TO+%s]' % (min, max)
def address_builder(self, address):
""" Builds lat and lon query from a geocoded address.
:param address:
The address
:type address:
String
:returns:
String
"""
geocoded = geocode(address)
return self.lat_lon_builder(**geocoded)
def lat_lon_builder(self, lat=0, lon=0):
""" Builds lat and lon query.
:param lat:
The latitude. Default is 0
:type lat:
float
:param lon:
The The longitude. Default is 0
:type lon:
float
:returns:
String
"""
return ('upperLeftCornerLatitude:[%s+TO+1000]+AND+lowerRightCornerLatitude:[-1000+TO+%s]'
'+AND+lowerLeftCornerLongitude:[-1000+TO+%s]+AND+upperRightCornerLongitude:[%s+TO+1000]'
% (lat, lat, lon, lon))

View File

@@ -1,167 +0,0 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco, KAPPS-
#
# License: CC0 1.0 Universal
import json
import requests
import settings
from general_helper import three_digit, create_paired_list
class Search(object):
def __init__(self):
self.api_url = settings.API_URL
def search(self,
row_paths=None,
start_date=None,
end_date=None,
cloud_min=None,
cloud_max=None,
limit=1):
"""
The main method of Search class. It searches the DevSeed Landsat API
Returns python dictionary
Arguments:
row_paths -- A string in this format: "003,003,004,004". Must be in pairs
start_date -- date string. format: YYYY-MM-DD
end_date -- date string. format: YYYY-MM-DD
cloud_min -- float specifying the minimum percentage. e.g. 4.3
cloud_max -- float specifying the maximum percentage. e.g. 78.9
limit -- integer specigying the maximum results return.
Example:
search('003,003', '2014-01-01', '2014-06-01')
will return:
{
'status': u'SUCCESS',
'total_returned': 1,
'total': 1,
'limit': 1
'results': [
{
'sat_type': u'L8',
'sceneID': u'LC80030032014142LGN00',
'date': u'2014-05-22',
'path': u'003',
'thumbnail': u'http://earthexplorer.usgs.gov/browse/landsat_8/2014/003/003/LC80030032014142LGN00.jpg',
'cloud': 33.36,
'row': u'003
}
]
}
"""
search_string = self._query_builder(row_paths,
start_date,
end_date,
cloud_min,
cloud_max)
# Have to manually build the URI to bypass requests URI encoding
# The api server doesn't accept encoded URIs
r = requests.get('%s?search=%s&limit=%s' % (self.api_url,
search_string,
limit))
r_dict = json.loads(r.text)
result = {}
if 'error' in r_dict:
result['status'] = u'error'
result['code'] = r_dict['error']['code']
result['message'] = r_dict['error']['message']
elif 'meta' in r_dict:
result['status'] = u'SUCCESS'
result['total'] = r_dict['meta']['results']['total']
result['limit'] = r_dict['meta']['results']['limit']
result['total_returned'] = len(r_dict['results'])
result['results'] = [{'sceneID': i['sceneID'],
'sat_type': u'L8',
'path': three_digit(i['path']),
'row': three_digit(i['row']),
'thumbnail': i['browseURL'],
'date': i['acquisitionDate'],
'cloud': i['cloudCoverFull']}
for i in r_dict['results']]
return result
def _query_builder(self,
row_paths=None,
start_date=None,
end_date=None,
cloud_min=None,
cloud_max=None):
""" Builds the proper search syntax (query) for Landsat API """
query = []
rows_paths = []
# Coverting rows and paths to paired list
try:
new_array = create_paired_list(row_paths)
rows_paths.extend(['(%s)' % self._row_path_builder(i[0], i[1])
for i in new_array])
except ValueError:
return ''
except TypeError:
raise Exception('Invalid Argument. Please try again!')
if start_date and end_date:
query.append(self._date_range_builder(start_date, end_date))
elif start_date:
query.append(self._date_range_builder(start_date, '2100-01-01'))
elif end_date:
query.append(self._date_range_builder('2009-01-01', end_date))
if cloud_min and cloud_max:
query.append(self._cloud_cover_prct_range_builder(cloud_min,
cloud_max))
elif cloud_min:
query.append(self._cloud_cover_prct_range_builder(cloud_min,
'100'))
elif cloud_max:
query.append(self._cloud_cover_prct_range_builder('-1',
cloud_max))
search_string = '+AND+'.join(map(str, query))
if len(search_string) > 0:
search_string = search_string + '+AND+(' + \
'+OR+'.join(map(str, rows_paths)) + ')'
else:
search_string = '+OR+'.join(map(str, rows_paths))
return search_string
def _row_path_builder(self, path, row):
""" Builds row and path query
Accepts row and path in XXX format, e.g. 003
"""
return 'row:%s+AND+path:%s' % (row, path)
def _date_range_builder(self, start, end):
""" Builds date range query
Accepts start and end date in this format YYYY-MM-DD
"""
return 'acquisitionDate:[%s+TO+%s]' % (start, end)
def _cloud_cover_prct_range_builder(self, min, max):
""" Builds cloud cover percentage range query
Accepts bottom and top range in float, e.g. 1.00
"""
return 'cloudCoverFull:[%s+TO+%s]' % (min, max)

View File

@@ -1,54 +1,32 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco, KAPPS-
#
# Landsat Util
# License: CC0 1.0 Universal
##
## Main Setting File
# Main Setting File
##
import os
from os import getenv
from os.path import join, expanduser, abspath, dirname
# Google Storage Landsat Config
DEBUG = os.getenv('DEBUG', False)
DEBUG = getenv('DEBUG', False)
SOURCE_URL = 'gs://earthengine-public/landsat'
SCENE_FILE_URL = SOURCE_URL + '/scene_list.zip'
SATELLITE = 'L8'
L8_METADATA_URL = 'http://landsat.usgs.gov/metadata_service/bulk_metadata_files/LANDSAT_8.csv'
# Elastic Search Config
ES_URL = [{
'host': 'localhost',
'port': 9200,
'use_ssl': False
}]
ES_MAIN_INDEX = 'landsat'
ES_MAIN_TYPE = '8'
API_URL = 'http://api.developmentseed.com:8000/landsat'
# Local Forlders Config
GOOGLE_STORAGE = 'http://storage.googleapis.com/earthengine-public/landsat/'
S3_LANDSAT = 'http://landsat-pds.s3.amazonaws.com/'
API_URL = 'https://api.developmentseed.org/landsat'
# User's Home Directory
HOME_DIR = os.path.expanduser('~')
HOME_DIR = expanduser('~')
# Utility's base directory
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
BASE_DIR = abspath(dirname(__file__))
DOWNLOAD_DIR = HOME_DIR + '/landsat'
ZIP_DIR = DOWNLOAD_DIR + '/zip'
UNZIP_DIR = DOWNLOAD_DIR + '/unzip'
PROCESSED_IMAGE = DOWNLOAD_DIR +'/processed'
SCENE_FILE = DOWNLOAD_DIR + '/scene_list'
LANDSAT_DIR = join(HOME_DIR, 'landsat')
DOWNLOAD_DIR = join(LANDSAT_DIR, 'downloads')
PROCESSED_IMAGE = join(LANDSAT_DIR, 'processed')
ASSESTS_DIR = BASE_DIR + '/assests'
L8_METADATA_FILENAME = 'metadata.csv'
SHAPEFILE_INPUT = HOME_DIR + '/landsat/output/shapefiles/input'
SHAPEFILE_OUTPUT = HOME_DIR + '/landsat/output/shapefiles/output'
# Colormap File
COLORMAP = join(abspath(dirname(__file__)), 'maps', 'colormap_ndvi_cfastie.txt')

265
landsat/uploader.py Normal file
View File

@@ -0,0 +1,265 @@
# Boto Uploader
# Landsat Util
# License: CC0 1.0 Universal
# The S3 uploader is a fork of pys3upload (https://github.com/leetreveil/pys3upload)
from __future__ import print_function, division, absolute_import
import os
import sys
import time
import threading
import contextlib
try:
import queue
except:
import Queue as queue
from multiprocessing import pool
try:
from io import BytesIO as StringIO
except ImportError:
try:
from cStringIO import StringIO
except:
from StringIO import StringIO
from boto.s3.connection import S3Connection
from .mixins import VerbosityMixin
STREAM = sys.stderr
class Uploader(VerbosityMixin):
"""
The Uploader class.
To initiate the following parameters must be passed:
:param key:
AWS access key id (optional)
:type key:
String
:param secret:
AWS access secret key (optional)
:type secret:
String
:param host:
AWS host, e.g. s3.amazonaws.com (optional)
:type host:
String
"""
progress_template = \
'File Size:%(size)4d MB | Uploaded:%(uploaded)4d MB' + ' ' * 8
def __init__(self, key=None, secret=None, host=None):
self.key = key
self.secret = secret
self.source_size = 0
self.conn = S3Connection(key, secret, host=host)
def run(self, bucket_name, filename, path):
"""
Initiate the upload.
:param bucket_name:
Name of the S3 bucket
:type bucket_name:
String
:param filename:
The filname
:type filename:
String
:param path:
The path to the file that needs to be uploaded
:type path:
String
:returns:
void
"""
f = open(path, 'rb')
self.source_size = os.stat(path).st_size
total_dict = {}
def cb(part_no, uploaded, total):
total_dict[part_no] = uploaded
params = {
'uploaded': round(sum(total_dict.values()) / 1048576, 0),
'size': round(self.source_size / 1048576, 0),
}
p = (self.progress_template + '\r') % params
STREAM.write(p)
STREAM.flush()
self.output('Uploading to S3', normal=True, arrow=True)
upload(bucket_name, self.key, self.secret,
data_collector(iter(f)), filename, cb,
threads=10, replace=True, secure=True, connection=self.conn)
print('\n')
self.output('Upload Completed', normal=True, arrow=True)
def data_collector(iterable, def_buf_size=5242880):
""" Buffers n bytes of data.
:param iterable:
Could be a list, generator or string
:type iterable:
List, generator, String
:returns:
A generator object
"""
buf = b''
for data in iterable:
buf += data
if len(buf) >= def_buf_size:
output = buf[:def_buf_size]
buf = buf[def_buf_size:]
yield output
if len(buf) > 0:
yield buf
def upload_part(upload_func, progress_cb, part_no, part_data):
num_retries = 5
def _upload_part(retries_left=num_retries):
try:
with contextlib.closing(StringIO(part_data)) as f:
f.seek(0)
cb = lambda c, t: progress_cb(part_no, c, t) if progress_cb else None
upload_func(f, part_no, cb=cb, num_cb=100)
except Exception as exc:
retries_left -= 1
if retries_left > 0:
return _upload_part(retries_left=retries_left)
else:
return threading.ThreadError(repr(threading.current_thread()) + ' ' + repr(exc))
return _upload_part()
def upload(bucket, aws_access_key, aws_secret_key,
iterable, key, progress_cb=None,
threads=5, replace=False, secure=True,
connection=None):
""" Upload data to s3 using the s3 multipart upload API.
:param bucket:
Name of the S3 bucket
:type bucket:
String
:param aws_access_key:
AWS access key id (optional)
:type aws_access_key:
String
:param aws_secret_key:
AWS access secret key (optional)
:type aws_secret_key:
String
:param iterable:
The data to upload. Each 'part' in the list. will be uploaded in parallel. Each part must be at
least 5242880 bytes (5mb).
:type iterable:
An iterable object
:param key:
The name of the key (filename) to create in the s3 bucket
:type key:
String
:param progress_cb:
Progress callback, will be called with (part_no, uploaded, total) each time a progress update
is available. (optional)
:type progress_cb:
function
:param threads:
the number of threads to use while uploading. (Default is 5)
:type threads:
int
:param replace:
will replace the key (filename) on S3 if set to true. (Default is false)
:type replace:
boolean
:param secure:
Use ssl when talking to s3. (Default is true)
:type secure:
boolean
:param connection:
Used for testing (optional)
:type connection:
S3 connection class
:returns:
void
"""
if not connection:
from boto.s3.connection import S3Connection as connection
c = connection(aws_access_key, aws_secret_key, is_secure=secure)
else:
c = connection
b = c.get_bucket(bucket)
if not replace and b.lookup(key):
raise Exception('s3 key ' + key + ' already exists')
multipart_obj = b.initiate_multipart_upload(key)
err_queue = queue.Queue()
lock = threading.Lock()
upload.counter = 0
try:
tpool = pool.ThreadPool(processes=threads)
def check_errors():
try:
exc = err_queue.get(block=False)
except queue.Empty:
pass
else:
raise exc
def waiter():
while upload.counter >= threads:
check_errors()
time.sleep(0.1)
def cb(err):
if err:
err_queue.put(err)
with lock:
upload.counter -= 1
args = [multipart_obj.upload_part_from_file, progress_cb]
for part_no, part in enumerate(iterable):
part_no += 1
tpool.apply_async(upload_part, args + [part_no, part], callback=cb)
with lock:
upload.counter += 1
waiter()
tpool.close()
tpool.join()
# Check for thread errors before completing the upload,
# sometimes an error can be left unchecked until we
# get to this point.
check_errors()
multipart_obj.complete_upload()
except:
multipart_obj.cancel_upload()
tpool.terminate()
raise

415
landsat/utils.py Normal file
View File

@@ -0,0 +1,415 @@
# Landsat Util
# License: CC0 1.0 Universal
from __future__ import print_function, division, absolute_import
import os
import sys
import time
import re
try:
from io import StringIO
except ImportError:
from cStringIO import StringIO
from datetime import datetime
import geocoder
from .mixins import VerbosityMixin
class Capturing(list):
"""
Captures a subprocess stdout.
:Usage:
>>> with Capturing():
... subprocess(args)
"""
def __enter__(self):
self._stdout = sys.stdout
sys.stdout = self._stringio = StringIO()
return self
def __exit__(self, *args):
self.extend(self._stringio.getvalue().splitlines())
sys.stdout = self._stdout
class timer(object):
"""
A timer class.
:Usage:
>>> with timer():
... your code
"""
def __enter__(self):
self.start = time.time()
def __exit__(self, type, value, traceback):
self.end = time.time()
print('Time spent : {0:.2f} seconds'.format((self.end - self.start)))
def exit(message, code=0):
""" output a message to stdout and terminates the process.
:param message:
Message to be outputed.
:type message:
String
:param code:
The termination code. Default is 0
:type code:
int
:returns:
void
"""
v = VerbosityMixin()
if code == 0:
v.output(message, normal=True, arrow=True)
v.output('Done!', normal=True, arrow=True)
else:
v.output(message, normal=True, error=True)
sys.exit(code)
def create_paired_list(value):
""" Create a list of paired items from a string.
:param value:
the format must be 003,003,004,004 (commas with no space)
:type value:
String
:returns:
List
:example:
>>> create_paired_list('003,003,004,004')
[['003','003'], ['004', '004']]
"""
if isinstance(value, list):
value = ",".join(value)
array = re.split('\D+', value)
# Make sure the elements in the list are even and pairable
if len(array) % 2 == 0:
new_array = [list(array[i:i + 2]) for i in range(0, len(array), 2)]
return new_array
else:
raise ValueError('The string should include pairs and be formated. '
'The format must be 003,003,004,004 (commas with '
'no space)')
def check_create_folder(folder_path):
""" Check whether a folder exists, if not the folder is created.
:param folder_path:
Path to the folder
:type folder_path:
String
:returns:
(String) the path to the folder
"""
if not os.path.exists(folder_path):
os.makedirs(folder_path)
return folder_path
def get_file(path):
""" Separate the name of the file or folder from the path and return it.
:param path:
Path to the folder
:type path:
String
:returns:
(String) the filename
:example:
>>> get_file('/path/to/file.jpg')
'file.jpg'
"""
return os.path.basename(path)
def get_filename(path):
""" Return the filename without extension.
:param path:
Path to the folder
:type path:
String
:returns:
(String) the filename without extension
:example:
>>> get_filename('/path/to/file.jpg')
'file'
"""
return os.path.splitext(get_file(path))[0]
def three_digit(number):
""" Add 0s to inputs that their length is less than 3.
:param number:
The number to convert
:type number:
int
:returns:
String
:example:
>>> three_digit(1)
'001'
"""
number = str(number)
if len(number) == 1:
return u'00%s' % number
elif len(number) == 2:
return u'0%s' % number
else:
return number
def georgian_day(date):
""" Returns the number of days passed since the start of the year.
:param date:
The string date with this format %m/%d/%Y
:type date:
String
:returns:
int
:example:
>>> georgian_day('05/1/2015')
121
"""
try:
fmt = '%m/%d/%Y'
return datetime.strptime(date, fmt).timetuple().tm_yday
except (ValueError, TypeError):
return 0
def year(date):
""" Returns the year.
:param date:
The string date with this format %m/%d/%Y
:type date:
String
:returns:
int
:example:
>>> year('05/1/2015')
2015
"""
try:
fmt = '%m/%d/%Y'
return datetime.strptime(date, fmt).timetuple().tm_year
except ValueError:
return 0
def reformat_date(date, new_fmt='%Y-%m-%d'):
""" Returns reformated date.
:param date:
The string date with this format %m/%d/%Y
:type date:
String
:param new_fmt:
date format string. Default is '%Y-%m-%d'
:type date:
String
:returns:
int
:example:
>>> reformat_date('05/1/2015', '%d/%m/%Y')
'1/05/2015'
"""
try:
if isinstance(date, datetime):
return date.strftime(new_fmt)
else:
fmt = '%m/%d/%Y'
return datetime.strptime(date, fmt).strftime(new_fmt)
except ValueError:
return date
def convert_to_integer_list(value):
""" Converts a comma separate string to a list
:param value:
the format must be 003,003,004,004 (commas with no space)
:type value:
String
:returns:
List
:example:
>>> convert_to_integer_list('003,003,004,004')
['003', '003', '004', '004']
"""
if isinstance(value, list) or value is None:
return value
else:
s = re.findall('(10|11|QA|[0-9])', value)
for k, v in enumerate(s):
try:
s[k] = int(v)
except ValueError:
pass
return s
# Geocoding confidence scores, from https://github.com/DenisCarriere/geocoder/blob/master/docs/features/Confidence%20Score.md
geocode_confidences = {
10: 0.25,
9: 0.5,
8: 1.,
7: 5.,
6: 7.5,
5: 10.,
4: 15.,
3: 20.,
2: 25.,
1: 99999.,
# 0: unable to locate at all
}
def geocode(address, required_precision_km=1.):
""" Identifies the coordinates of an address
:param address:
the address to be geocoded
:type value:
String
:param required_precision_km:
the maximum permissible geographic uncertainty for the geocoding
:type required_precision_km:
float
:returns:
dict
:example:
>>> geocode('1600 Pennsylvania Ave NW, Washington, DC 20500')
{'lat': 38.89767579999999, 'lon': -77.0364827}
"""
geocoded = geocoder.google(address)
precision_km = geocode_confidences[geocoded.confidence]
if precision_km <= required_precision_km:
(lon, lat) = geocoded.geometry['coordinates']
return {'lat': lat, 'lon': lon}
else:
raise ValueError("Address could not be precisely located")
def convert_to_float_list(value):
""" Converts a comma separate string to a list
:param value:
the format must be 1.2,-3.5 (commas with no space)
:type value:
String
:returns:
List
:example:
>>> convert_to_integer_list('003,003,004,004')
[1.2, -3.5]
"""
if isinstance(value, list) or value is None:
return value
else:
s = re.findall('([-+]?\d*\.\d+|\d+|[-+]?\d+)', value)
for k, v in enumerate(s):
try:
s[k] = float(v)
except ValueError:
pass
return s
def adjust_bounding_box(bounds1, bounds2):
""" If the bounds 2 corners are outside of bounds1, they will be adjusted to bounds1 corners
@params
bounds1 - The source bounding box
bounds2 - The target bounding box that has to be within bounds1
@return
A bounding box tuple in (y1, x1, y2, x2) format
"""
# out of bound check
# If it is completely outside of target bounds, return target bounds
if ((bounds2[0] > bounds1[0] and bounds2[2] > bounds1[0]) or
(bounds2[2] < bounds1[2] and bounds2[2] < bounds1[0])):
return bounds1
if ((bounds2[1] < bounds1[1] and bounds2[3] < bounds1[1]) or
(bounds2[3] > bounds1[3] and bounds2[1] > bounds1[3])):
return bounds1
new_bounds = list(bounds2)
# Adjust Y axis (Longitude)
if (bounds2[0] > bounds1[0] or bounds2[0] < bounds1[3]):
new_bounds[0] = bounds1[0]
if (bounds2[2] < bounds1[2] or bounds2[2] > bounds1[0]):
new_bounds[2] = bounds1[2]
# Adjust X axis (Latitude)
if (bounds2[1] < bounds1[1] or bounds2[1] > bounds1[3]):
new_bounds[1] = bounds1[1]
if (bounds2[3] > bounds1[3] or bounds2[3] < bounds1[1]):
new_bounds[3] = bounds1[3]
return tuple(new_bounds)
def remove_slash(value):
assert(isinstance(value, str))
return re.sub('(^\/|\/$)', '', value)
def url_builder(segments):
# Only accept list or tuple
assert((isinstance(segments, list) or isinstance(segments, tuple)))
return "/".join([remove_slash(s) for s in segments])

7
requirements-dev.txt Normal file
View File

@@ -0,0 +1,7 @@
pdoc>=0.3.1
nose>=1.3.7
coverage>=4.0
Sphinx>=1.3.1
wheel>=0.26.0
mock>=1.3.0
jsonschema==2.5.1

View File

@@ -1,8 +1,14 @@
GDAL==1.11.0
elasticsearch==1.1.1
gsutil==4.4
requests==2.3.0
python-dateutil==2.2
nose==1.3.3
pdoc==0.2.4
numpy
usgs==0.1.9
requests==2.7.0
python-dateutil==2.5.1
numpy==1.10.4
termcolor==1.1.0
rasterio==0.32.0
six==1.8.0
scipy==0.17.0
scikit-image==0.12.3
homura==0.1.3
boto==2.39.0
polyline==1.1
geocoder==1.9.0
matplotlib==1.5.1

2
setup.cfg Normal file
View File

@@ -0,0 +1,2 @@
[wheel]
universal = 1

View File

@@ -1,61 +1,41 @@
#!/usr/bin/env python
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco
#
# Landsat Util
# License: CC0 1.0 Universal
import sys
import subprocess
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# Check if gdal-config is installed
if subprocess.call(['which', 'gdal-config']):
error = """Error: gdal-config is not installed on this machine.
This installation requires gdal-config to proceed.
If you are on Mac OSX, you can installed gdal-config by running:
brew install gdal
On Ubuntu you should run:
sudo apt-get install libgdal1-dev
Exiting the setup now!"""
print error
sys.exit(1)
from landsat import __version__
def readme():
with open("README.rst") as f:
with open('README.rst') as f:
return f.read()
setup(name="landsat",
version='0.2.0',
description="A utility to search, download and process Landsat 8" +
" satellite imagery",
long_description=readme(),
author="Scisco",
author_email="alireza@developmentseed.org",
scripts=["bin/landsat"],
url="https://github.com/developmentseed/landsat-util",
packages=["landsat"],
include_package_data=True,
license="CCO",
platforms="Posix; MacOS X; Windows",
install_requires=[
"GDAL>=1.10.0",
"elasticsearch==1.1.1",
"gsutil==4.4",
"requests==2.3.0",
"python-dateutil==2.2",
"numpy"
],
)
with open('requirements.txt') as fid:
INSTALL_REQUIRES = [l.strip() for l in fid.readlines() if l]
with open('requirements-dev.txt') as fid:
TEST_REQUIRES = [l.strip() for l in fid.readlines() if l]
setup(
name='landsat-util',
version=__version__,
description='A utility to search, download and process Landsat 8' +
' satellite imagery',
long_description=readme(),
author='Development Seed',
author_email='info@developmentseed.org',
scripts=['bin/landsat'],
url='https://github.com/developmentseed/landsat-util',
packages=['landsat'],
include_package_data=True,
license='CCO',
platforms='Posix; MacOS X; Windows',
install_requires=INSTALL_REQUIRES,
test_suite='nose.collector',
tests_require=TEST_REQUIRES
)

View File

@@ -0,0 +1,149 @@
geojson_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"id": "http://json-schema.org/geojson/geojson.json#",
"title": "Geo JSON object",
"description": "Schema for a Geo JSON object",
"type": "object",
"required": ["type"],
"properties": {
"crs": {"$ref": "http://json-schema.org/geojson/crs.json#"},
"bbox": {"$ref": "http://json-schema.org/geojson/bbox.json#"}
},
"oneOf": [
{"$ref": "#/definitions/geometry"},
{"$ref": "#/definitions/geometryCollection"},
{"$ref": "#/definitions/feature"},
{"$ref": "#/definitions/featureCollection"}
],
"definitions": {
"position": {
"description": "A single position",
"type": "array",
"minItems": 2,
"items": [ {"type": "number"}, {"type": "number"} ],
"additionalItems": False
},
"positionArray": {
"description": "An array of positions",
"type": "array",
"items": {"$ref": "#/definitions/position"}
},
"lineString": {
"description": "An array of two or more positions",
"allOf": [
{"$ref": "#/definitions/positionArray"},
{"minItems": 2}
]
},
"linearRing": {
"description": "An array of four positions where the first equals the last",
"allOf": [
{"$ref": "#/definitions/positionArray"},
{"minItems": 4}
]
},
"polygon": {
"description": "An array of linear rings",
"type": "array",
"items": {"$ref": "#/definitions/linearRing"}
},
"geometry": {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "geometry",
"description": "One geometry as defined by GeoJSON",
"type": "object",
"required": ["type", "coordinates"],
"oneOf": [
{
"title": "Point",
"properties": {
"type": {"enum": ["Point"]},
"coordinates": {"$ref": "#/definitions/position"}
}
},
{
"title": "MultiPoint",
"properties": {
"type": {"enum": ["MultiPoint"]},
"coordinates": {"$ref": "#/definitions/positionArray"}
}
},
{
"title": "LineString",
"properties": {
"type": {"enum": ["LineString"]},
"coordinates": {"$ref": "#/definitions/lineString"}
}
},
{
"title": "MultiLineString",
"properties": {
"type": {"enum": [ "MultiLineString" ]},
"coordinates": {
"type": "array",
"items": {"$ref": "#/definitions/lineString"}
}
}
},
{
"title": "Polygon",
"properties": {
"type": {"enum": [ "Polygon" ]},
"coordinates": {"$ref": "#/definitions/polygon"}
}
},
{
"title": "MultiPolygon",
"properties": {
"type": {"enum": ["MultiPolygon"]},
"coordinates": {
"type": "array",
"items": {"$ref": "#/definitions/polygon"}
}
}
}
]
},
"geometryCollection": {
"title": "GeometryCollection",
"description": "A collection of geometry objects",
"required": [ "geometries" ],
"properties": {
"type": {"enum": [ "GeometryCollection" ]},
"geometries": {
"type": "array",
"items": {"$ref": "#/definitions/geometry"}
}
}
},
"feature": {
"title": "Feature",
"description": "A Geo JSON feature object",
"required": [ "geometry", "properties" ],
"properties": {
"type": {"enum": [ "Feature" ]},
"geometry": {
"oneOf": [
{"type": "null"},
{"$ref": "#/definitions/geometry"}
]
},
"properties": {"type": [ "object", "null" ]},
"id": {"FIXME": "may be there, type not known (string? number?)"}
}
},
"featureCollection": {
"title": "FeatureCollection",
"description": "A Geo JSON feature collection",
"required": [ "features" ],
"properties": {
"type": {"enum": [ "FeatureCollection" ]},
"features": {
"type": "array",
"items": {"$ref": "#/definitions/feature"}
}
}
}
}
}

31
tests/mocks.py Normal file
View File

@@ -0,0 +1,31 @@
state = {}
class MockBotoS3MultipartUpload():
def __init__(self):
self.data = state['mock_boto_s3_multipart_upload_data']
def upload_part_from_file(self, f, part_no, cb=None, num_cb=None):
self.data.append(f.read())
def complete_upload(self):
pass
def cancel_upload(self):
pass
class MockBotoS3Bucket():
def lookup(self, key):
pass
def initiate_multipart_upload(self, key):
return MockBotoS3MultipartUpload()
class S3Connection():
def __init__(self, key, secret, is_secure=None, host=None):
pass
def get_bucket(self, bucket_name):
return MockBotoS3Bucket()

View File

@@ -0,0 +1 @@
111111

BIN
tests/samples/test.tar.bz2 Normal file

Binary file not shown.

Binary file not shown.

View File

@@ -1 +0,0 @@
GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]

Binary file not shown.

Binary file not shown.

View File

@@ -1,45 +0,0 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco
#
# License: CC0 1.0 Universal
"""Tests for clipper_helper"""
import os
import sys
import unittest
try:
from landsat.clipper_helper import Clipper
except ImportError:
sys.path.append(os.path
.abspath(os.path
.join(os.path.dirname(__file__),
'../landsat')))
from clipper_helper import Clipper
class TestClipperHelper(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.c = Clipper()
cls.base_dir = os.path.abspath(os.path.dirname(__file__))
cls.shapefile = cls.base_dir + '/samples/test_shapefile.shp'
def test_shapefile(self):
# Test with correct shapefile
self.assertEqual([[u'009', u'045'], [u'008', u'045']],
self.c.shapefile(self.shapefile))
def test_country(self):
# Test output of a known country
self.assertEqual([['145', u'057'], ['145', u'058']],
self.c.country('Maldives'))
if __name__ == '__main__':
unittest.main()

165
tests/test_download.py Normal file
View File

@@ -0,0 +1,165 @@
# Landsat Util
# License: CC0 1.0 Universal
"""Tests for downloader"""
import os
import errno
import shutil
import unittest
from tempfile import mkdtemp
import mock
from landsat.downloader import Downloader, RemoteFileDoesntExist, IncorrectSceneId
from landsat.settings import GOOGLE_STORAGE, S3_LANDSAT
class TestDownloader(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.temp_folder = mkdtemp()
cls.d = Downloader(download_dir=cls.temp_folder)
cls.scene = 'LT81360082013127LGN01'
cls.scene_2 = 'LC82050312014229LGN00'
cls.scene_s3 = 'LC80010092015051LGN00'
cls.scene_s3_2 = 'LC82050312015136LGN00'
cls.scene_size = 59204484
@classmethod
def tearDownClass(cls):
try:
shutil.rmtree(cls.temp_folder)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
def assertSize(self, url, path):
remote_size = self.d.get_remote_file_size(url)
download_size = os.path.getsize(path)
self.assertEqual(remote_size, download_size)
@mock.patch('landsat.downloader.fetch')
def test_download(self, mock_fetch):
mock_fetch.return_value = True
# download one scene
self.d.download([self.scene])
paths = self.d.download([self.scene])
self.assertTrue(isinstance(paths, list))
self.assertEqual([self.temp_folder + '/' + self.scene + '.tar.bz'],
paths)
# download multiple scenes
paths = self.d.download([self.scene, self.scene_2])
test_paths = [self.temp_folder + '/' + self.scene + '.tar.bz',
self.temp_folder + '/' + self.scene_2 + '.tar.bz']
self.assertTrue(isinstance(paths, list))
self.assertEqual(test_paths, paths)
# Test if error is raised when passing scene as string instead of list
self.assertRaises(Exception, self.d.download, self.scene)
# Test when passing band list along with sceneID
paths = self.d.download([self.scene_s3, self.scene_s3_2], bands=[11])
test_paths = [self.temp_folder + '/' + self.scene_s3,
self.temp_folder + '/' + self.scene_s3_2]
self.assertEqual(test_paths, paths)
# When passing scene as string, google storage download should be triggered
paths = self.d.download([self.scene], bands=4)
test_paths = [self.temp_folder + '/' + self.scene + '.tar.bz']
self.assertEqual(test_paths, paths)
@mock.patch('landsat.downloader.Downloader.google_storage')
def test_download_google_when_amazon_is_unavailable(self, fake_google):
""" Test whether google or amazon are correctly selected based on input """
fake_google.return_value = False
# Test if google is used when an image from 2014 is passed even if bands are provided
self.d.download([self.scene], bands=[432])
fake_google.assert_called_with(self.scene, self.d.download_dir)
@mock.patch('landsat.downloader.fetch')
def test_download_amazon_when_available(self, mock_fetch):
""" Test whether google or amazon are correctly selected based on input """
mock_fetch.return_value = True
# Test if amazon is used when an image from 2015 is passed with bands
paths = self.d.download([self.scene_s3], bands=[4, 3, 2])
test_paths = [self.temp_folder + '/' + self.scene_s3]
self.assertEqual(test_paths, paths)
@mock.patch('landsat.downloader.fetch')
def test_fetch(self, mock_fetch):
mock_fetch.return_value = True
sat = self.d.scene_interpreter(self.scene)
url = self.d.google_storage_url(sat)
self.assertTrue(self.d.fetch(url, self.temp_folder))
def test_remote_file_size(self):
url = self.d.google_storage_url(self.d.scene_interpreter(self.scene))
size = self.d.get_remote_file_size(url)
self.assertEqual(self.scene_size, size)
def test_google_storage_url(self):
sat = self.d.scene_interpreter(self.scene)
string = self.d.google_storage_url(sat)
expect = os.path.join(GOOGLE_STORAGE, 'L8/136/008/LT81360082013127LGN01.tar.bz')
self.assertEqual(expect, string)
def test_amazon_s3_url(self):
sat = self.d.scene_interpreter(self.scene)
string = self.d.amazon_s3_url(sat, 11)
expect = os.path.join(S3_LANDSAT, 'L8/136/008/LT81360082013127LGN01/LT81360082013127LGN01_B11.TIF')
self.assertEqual(expect, string)
def test_remote_file_exist(self):
# Exists and should return None
self.assertIsNone(self.d.remote_file_exists(os.path.join(S3_LANDSAT, 'L8/003/017/LC80030172015001L'
'GN00/LC80030172015001LGN00_B6.TIF')))
# Doesn't exist and should raise errror
with self.assertRaises(RemoteFileDoesntExist):
self.d.remote_file_exists(
os.path.join(
S3_LANDSAT,
'L8/003/017/LC80030172015001LGN00/LC80030172015001LGN00_B34.TIF'
)
)
# Doesn't exist and should raise errror
with self.assertRaises(RemoteFileDoesntExist):
self.d.remote_file_exists(
os.path.join(
GOOGLE_STORAGE,
'L8/003/017/LC80030172015001LGN00/LC80030172015001LGN00_B6.TIF'
)
)
# Exist and shouldn't raise error
self.assertIsNone(self.d.remote_file_exists(os.path.join(GOOGLE_STORAGE,
'L8/003/017/LC80030172015001LGN00.tar.bz')))
def test_scene_interpreter(self):
# Test with correct input
scene = 'LC80030172015001LGN00'
ouput = self.d.scene_interpreter(scene)
self.assertEqual({'path': '003', 'row': '017', 'sat': 'L8', 'scene': scene}, ouput)
# Test with incorrect input
self.assertRaises(Exception, self.d.scene_interpreter, 'LC80030172015001LGN')
if __name__ == '__main__':
unittest.main()

View File

@@ -1,109 +0,0 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco
#
# License: CC0 1.0 Universal
"""Tests for general_helper"""
import os
import sys
import errno
import shutil
import unittest
from tempfile import mkdtemp, mkstemp
try:
import landsat.general_helper as g
except ImportError:
sys.path.append(os.path
.abspath(os.path
.join(os.path.dirname(__file__),
'../landsat')))
import general_helper as g
class TestGeneralHelper(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.temp_folder_base = mkdtemp()
cls.temp_folder_test = cls.temp_folder_base + '/test'
cls.temp_file = mkstemp(dir=cls.temp_folder_base)
@classmethod
def tearDownClass(cls):
try:
shutil.rmtree(cls.temp_folder_base)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
def test_create_paired_list(self):
# Test correct input (string)
output = g.create_paired_list('003,003,004,004')
self.assertEqual([['003', '003'], ['004', '004']], output)
# Test correct input (list)
output = g.create_paired_list(['003', '003', '004', '004'])
self.assertEqual([['003', '003'], ['004', '004']], output)
# Test incorrect input
self.assertRaises(ValueError, g.create_paired_list, '003,003,004')
self.assertRaises(ValueError, g.create_paired_list, '')
# Test with paired list
output = g.create_paired_list([['003', '003'], ['004', '004']])
self.assertEqual([['003', '003'], ['004', '004']], output)
#Test with paired tupile
output = g.create_paired_list([('003', '003'), ('004', '004')])
self.assertEqual([('003', '003'), ('004', '004')], output)
def test_check_create_folder(self):
new_path = g.check_create_folder(self.temp_folder_test)
self.assertEqual(new_path, self.temp_folder_test)
def test_get_file(self):
f = g.get_file(self.temp_folder_test)
self.assertEqual('test', f)
def test_get_filename(self):
# Easy filename
f = g.get_filename('%s/filename.html' % self.temp_folder_base)
self.assertEqual('filename', f)
# Dificult filename
f = g.get_filename('%s/filename.test.html' % self.temp_folder_base)
self.assertEqual('filename.test', f)
def test_three_digit(self):
self.assertEqual('009', g.three_digit(9))
self.assertEqual('010', g.three_digit(10))
self.assertEqual('100', g.three_digit(100))
self.assertEqual('string', g.three_digit('string'))
def test_georgian_day(self):
self.assertEqual(28, g.georgian_day('01/28/2014'))
self.assertEqual(79, g.georgian_day('03/20/2014'))
self.assertEqual(0, g.georgian_day('random text'))
self.assertEqual(0, g.georgian_day(9876))
def test_year(self):
self.assertEqual(2014, g.year('01/28/2014'))
self.assertEqual(0, g.year('2014'))
def test_reformat_date(self):
self.assertEqual('28/01/2014', g.reformat_date('01/28/2014',
'%d/%m/%Y'))
self.assertEqual('2014', g.reformat_date('2014', '%d/%m/%Y'))
self.assertEqual('2014', g.reformat_date('2014', 'juberish'))
if __name__ == '__main__':
unittest.main()

View File

@@ -1,92 +0,0 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco
#
# License: CC0 1.0 Universal
"""Tests for gs_helper"""
import os
import sys
import errno
import shutil
import unittest
from tempfile import mkdtemp, mkstemp
try:
from landsat.gs_helper import GsHelper
except ImportError:
sys.path.append(os.path
.abspath(os.path
.join(os.path.dirname(__file__),
'../landsat')))
from gs_helper import GsHelper
class TestGsHelper(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.g = GsHelper()
cls.temp_folder = mkdtemp()
cls.g.download_dir = cls.temp_folder + '/download'
cls.g.zip_dir = cls.g.download_dir + '/zip'
cls.g.unzip_dir = cls.g.download_dir + '/unzip'
cls.g.scene_file = cls.g.download_dir + '/scene_list'
@classmethod
def tearDownClass(cls):
try:
shutil.rmtree(cls.temp_folder)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
def test_init(self):
self.assertIsInstance(self.g, GsHelper)
# @unittest.skip("demonstrating skipping")
def test_search(self):
# test wrong query
self.assertRaises(SystemExit, self.g.search, '334555')
# test a search with known result
query = '003,003'
start = '01/01/2014'
end = '01/06/2014'
self.assertEqual(1, len(self.g.search(query, start, end)))
# test a search with unconvential date range
query = '003,003'
start = 'jan 1 2014'
end = 'june 1 2014'
self.assertEqual(1, len(self.g.search(query, start, end)))
# @unittest.skip("demonstrating skipping")
def test_signle_download(self):
# Downloading this image: LT81360082013127LGN01.tar.bz since the size
# is very small: 56.46MB
self.assertTrue(self.g.single_download('008',
'136',
'LT81360082013127LGN01'))
# @unittest.skip("demonstrating skipping")
def test_batch_download(self):
image_list = ['gs://earthengine-public/landsat/L8/136/008/'
'LT81360082013127LGN01.tar.bz',
'gs://earthengine-public/landsat/L8/136/008/'
'LT81360082013127LGN01.tar.bz']
self.assertTrue(self.g.batch_download(image_list))
def test_unzip(self):
self.assertTrue(self.g.unzip())
if __name__ == '__main__':
unittest.main()

132
tests/test_image.py Normal file
View File

@@ -0,0 +1,132 @@
# Landsat Util
# License: CC0 1.0 Universal
"""Tests for image processing"""
from os.path import join, abspath, dirname, exists
import errno
import shutil
import unittest
from tempfile import mkdtemp
import rasterio
from rasterio.warp import transform_bounds
from landsat.image import Simple, PanSharpen
from landsat.ndvi import NDVI, NDVIWithManualColorMap
def get_bounds(path):
""" Retrun bounds in WGS84 system """
with rasterio.drivers():
src = rasterio.open(path)
return transform_bounds(
src.crs,
{'init': 'EPSG:4326'},
*src.bounds)
class TestProcess(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.base_dir = abspath(dirname(__file__))
cls.temp_folder = mkdtemp()
cls.landsat_image = join(cls.base_dir, 'samples/test.tar.bz2')
@classmethod
def tearDownClass(cls):
try:
shutil.rmtree(cls.temp_folder)
shutil.rmtree(join(cls.base_dir, 'samples', 'test'))
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
def test_simple_no_bands(self):
p = Simple(path=self.landsat_image, dst_path=self.temp_folder)
self.assertTrue(exists(p.run()))
def test_simple_with_bands(self):
p = Simple(path=self.landsat_image, bands=[1, 2, 3], dst_path=self.temp_folder)
self.assertTrue(exists(p.run()))
def test_simple_with_clip(self):
bounds = [-87.48138427734375, 30.700515832683923, -87.43331909179688, 30.739475058679485]
p = Simple(path=self.landsat_image, bands=[1, 2, 3], dst_path=self.temp_folder,
bounds=bounds)
path = p.run()
self.assertTrue(exists(path))
for val, exp in zip(get_bounds(path), bounds):
self.assertAlmostEqual(val, exp, 2)
def test_simple_with_intersecting_bounds_clip(self):
bounds = [-87.520515832683923, 30.700515832683923, -87.43331909179688, 30.739475058679485]
expected_bounds = [-87.49691403528307, 30.700515832683923, -87.43331909179688, 30.739475058679485]
p = Simple(path=self.landsat_image, bands=[1, 2, 3], dst_path=self.temp_folder,
bounds=bounds)
path = p.run()
self.assertTrue(exists(path))
for val, exp in zip(get_bounds(path), expected_bounds):
self.assertAlmostEqual(val, exp, 2)
def test_simple_with_out_of_bounds_clip(self):
bounds = [-87.66197204589844, 30.732392734006083, -87.57545471191406, 30.806731169315675]
expected_bounds = [-87.49691403528307, 30.646646570857722, -87.29976764207227, 30.810617911193567]
p = Simple(path=self.landsat_image, bands=[1, 2, 3], dst_path=self.temp_folder,
bounds=bounds)
path = p.run()
self.assertTrue(exists(path))
for val, exp in zip(get_bounds(path), expected_bounds):
self.assertAlmostEqual(val, exp, 2)
def test_simple_with_zip_file(self):
p = Simple(path=self.landsat_image, dst_path=self.temp_folder)
# test from an unzip file
self.path = join(self.base_dir, 'samples', 'test')
self.assertTrue(exists(p.run()))
def test_pansharpen(self):
p = PanSharpen(path=self.landsat_image, bands=[4, 3, 2], dst_path=self.temp_folder)
self.assertTrue(exists(p.run()))
def test_pansharpen_with_clip(self):
""" test with pansharpen and clipping """
bounds = [-87.48138427734375, 30.700515832683923, -87.43331909179688, 30.739475058679485]
p = PanSharpen(path=self.landsat_image, bands=[4, 3, 2], dst_path=self.temp_folder,
bounds=bounds)
path = p.run()
self.assertTrue(exists(path))
for val, exp in zip(get_bounds(path), bounds):
self.assertAlmostEqual(val, exp, 2)
def test_ndvi(self):
p = NDVI(path=self.landsat_image, dst_path=self.temp_folder)
self.assertTrue(exists(p.run()))
def test_ndvi_with_clip(self):
bounds = [-87.48138427734375, 30.700515832683923, -87.43331909179688, 30.739475058679485]
p = NDVI(path=self.landsat_image, dst_path=self.temp_folder,
bounds=bounds)
path = p.run()
self.assertTrue(exists(path))
for val, exp in zip(get_bounds(path), bounds):
self.assertAlmostEqual(val, exp, 2)
def test_ndvi_with_manual_colormap(self):
p = NDVIWithManualColorMap(path=self.landsat_image, dst_path=self.temp_folder)
self.assertTrue(exists(p.run()))

View File

@@ -1,71 +1,284 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco
#
# Landsat Util
# License: CC0 1.0 Universal
"""Tests for landsat"""
import os
import sys
import json
import unittest
import subprocess
import errno
import shutil
from os.path import join
try:
import landsat.landsat as landsat
except ImportError:
sys.path.append(os.path
.abspath(os.path
.join(os.path.dirname(__file__),
'../landsat')))
import landsat
from jsonschema import validate
import mock
import landsat.landsat as landsat
from tests import geojson_schema
class TestLandsat(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.base_dir = os.path.abspath(os.path.dirname(__file__))
cls.shapefile = cls.base_dir + '/samples/test_shapefile.shp'
cls.parser = landsat.args_options()
cls.mock_path = 'path/to/folder'
@classmethod
def tearDownClass(cls):
try:
shutil.rmtree('path')
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
def test_incorrect_date(self):
""" Test search with incorrect date input """
args = ['search', '--start', 'berlin', '--end', 'january 10 2014']
self.assertEquals(landsat.main(self.parser.parse_args(args)),
['Your date format is incorrect. Please try again!', 1])
def test_too_many_results(self):
""" Test when search return too many results """
args = ['search', '--cloud', '100', '-p', '205,022,206,022,204,022']
self.assertEquals(landsat.main(self.parser.parse_args(args)),
['Over 100 results. Please narrow your search', 1])
def test_search_pr_correct(self):
# Correct search
args = ['search', '--onlysearch', 'pr', '008', '008']
"""Test Path Row search with correct input"""
args = ['search', '--start', 'january 1 2013', '--end',
'january 10 2014', '-p', '008,008']
with self.assertRaises(SystemExit) as cm:
landsat.main(self.parser.parse_args(args))
self.assertEquals(landsat.main(self.parser.parse_args(args)),
['Search completed!'])
self.assertEqual(cm.exception.code, 0)
def test_search_lat_lon(self):
"""Test Latitude Longitude search with correct input"""
args = ['search', '--start', 'may 01 2013', '--end', 'may 08 2013',
'--lat', '38.9107203', '--lon', '-77.0290116']
self.assertEquals(landsat.main(self.parser.parse_args(args)),
['Search completed!'])
def test_search_pr_wrong_input(self):
args = ['search', '--onlysearch', 'pr', 'what?']
"""Test Path Row search with incorrect input"""
args = ['search', '-p', 'what?']
with self.assertRaises(SystemExit) as cm:
self.assertEquals(landsat.main(self.parser.parse_args(args)),
['Check your request and try again', 1])
def test_search_json_output(self):
"""Test json output in search"""
args = ['search', '--latest', '10', '--json']
output = landsat.main(self.parser.parse_args(args))
j = json.loads(output)
self.assertEquals(type(j), dict)
def test_search_geojson_output(self):
"""Test json output in search"""
args = ['search', '--latest', '10', '--geojson']
output = landsat.main(self.parser.parse_args(args))
j = json.loads(output)
self.assertIsNone(validate(j, geojson_schema))
self.assertEquals(type(j), dict)
@mock.patch('landsat.landsat.Downloader')
def test_download_correct(self, mock_downloader):
"""Test download command with correct input"""
mock_downloader.download.return_value = True
args = ['download', 'LC80010092015051LGN00', '-b', '11,', '-d', self.mock_path]
output = landsat.main(self.parser.parse_args(args))
mock_downloader.assert_called_with(download_dir=self.mock_path, usgs_pass=None, usgs_user=None)
mock_downloader.return_value.download.assert_called_with(['LC80010092015051LGN00'], [11])
self.assertEquals(output, ['Download Completed', 0])
@mock.patch('landsat.landsat.Downloader')
def test_download_correct_zip(self, mock_downloader):
"""Download command should download zip if no bands are given"""
mock_downloader.download.return_value = True
args = ['download', 'LC80010092015051LGN00', '-d', self.mock_path]
output = landsat.main(self.parser.parse_args(args))
mock_downloader.assert_called_with(download_dir=self.mock_path, usgs_pass=None, usgs_user=None)
mock_downloader.return_value.download.assert_called_with(['LC80010092015051LGN00'], [])
self.assertEquals(output, ['Download Completed', 0])
@mock.patch('landsat.landsat.process_image')
@mock.patch('landsat.landsat.Downloader.download')
def test_download_no_bands_with_process(self, mock_downloader, mock_process):
"""Download command should not download zip if no bands are given but process flag is used"""
mock_downloader.return_value = {'LC80010092015051LGN00': 'aws'}
mock_process.return_value = 'image.TIF'
args = ['download', 'LC80010092015051LGN00', '-p', '-d', self.mock_path]
output = landsat.main(self.parser.parse_args(args))
mock_downloader.assert_called_with(['LC80010092015051LGN00'], [4, 3, 2])
self.assertEquals(output, ["The output is stored at image.TIF", 0])
def test_download_incorrect(self):
"""Test download command with incorrect input"""
args = ['download', 'LT813600']
self.assertEquals(landsat.main(self.parser.parse_args(args)),
['The SceneID provided was incorrect', 1])
@mock.patch('landsat.landsat.process_image')
@mock.patch('landsat.downloader.fetch')
def test_download_process_continuous(self, fetch, mock_process):
"""Test download and process commands together"""
fetch.return_value = True
mock_process.return_value = 'image.TIF'
args = ['download', 'LC80010092015051LGN00', 'LC80470222014354LGN00', '-b', '432', '-d', self.mock_path, '-p']
output = landsat.main(self.parser.parse_args(args))
mock_process.assert_called_with('path/to/folder/LC80470222014354LGN00', '432',
False, False, False, False, False, bounds=None)
self.assertEquals(output, ["The output is stored at image.TIF", 0])
# Call with force unzip flag
args = ['download', 'LC80010092015051LGN00', 'LC80470222014354LGN00', '-b', '432', '-d',
self.mock_path, '-p', '--force-unzip']
output = landsat.main(self.parser.parse_args(args))
mock_process.assert_called_with('path/to/folder/LC80470222014354LGN00', '432', False, False, False,
True, False, bounds=None)
self.assertEquals(output, ["The output is stored at image.TIF", 0])
# Call with pansharpen
args = ['download', 'LC80010092015051LGN00', 'LC80470222014354LGN00', '-b', '432', '-d',
self.mock_path, '-p', '--pansharpen']
output = landsat.main(self.parser.parse_args(args))
mock_process.assert_called_with('path/to/folder/LC80470222014354LGN00', '432', False, True, False,
False, False, bounds=None)
self.assertEquals(output, ["The output is stored at image.TIF", 0])
# Call with pansharpen and clipping
args = ['download', 'LC80010092015051LGN00', 'LC80470222014354LGN00', '-b', '432', '-d',
self.mock_path, '-p', '--pansharpen', '--clip', '"-180,-180,0,0"']
output = landsat.main(self.parser.parse_args(args))
mock_process.assert_called_with('path/to/folder/LC80470222014354LGN00', '432', False, True, False,
False, False, bounds=[-180.0, -180.0, 0.0, 0.0])
self.assertEquals(output, ["The output is stored at image.TIF", 0])
# Call with ndvi
args = ['download', 'LC80010092015051LGN00', 'LC80470222014354LGN00', '-b', '432', '-d',
self.mock_path, '-p', '--ndvi']
output = landsat.main(self.parser.parse_args(args))
mock_process.assert_called_with('path/to/folder/LC80470222014354LGN00', '432', False, False, True,
False, False, bounds=None)
self.assertEquals(output, ["The output is stored at image.TIF", 0])
# Call with ndvigrey
args = ['download', 'LC80010092015051LGN00', 'LC80470222014354LGN00', '-b', '432', '-d',
self.mock_path, '-p', '--ndvigrey']
output = landsat.main(self.parser.parse_args(args))
mock_process.assert_called_with('path/to/folder/LC80470222014354LGN00', '432', False, False, False,
False, True, bounds=None)
self.assertEquals(output, ["The output is stored at image.TIF", 0])
@mock.patch('landsat.landsat.Uploader')
@mock.patch('landsat.landsat.process_image')
@mock.patch('landsat.downloader.fetch')
def test_download_process_continuous_with_upload(self, fetch, mock_process, mock_upload):
"""Test download and process commands together"""
fetch.return_value = True
mock_process.return_value = 'image.TIF'
mock_upload.run.return_value = True
args = ['download', 'LC80010092015051LGN00', '-b', '432', '-d', self.mock_path, '-p',
'-u', '--key', 'somekey', '--secret', 'somesecret', '--bucket', 'mybucket', '--region', 'this']
output = landsat.main(self.parser.parse_args(args))
# mock_downloader.assert_called_with(['LC80010092015051LGN00'], [4, 3, 2])
mock_process.assert_called_with('path/to/folder/LC80010092015051LGN00', '432', False, False, False,
False, False, bounds=None)
mock_upload.assert_called_with('somekey', 'somesecret', 'this')
mock_upload.return_value.run.assert_called_with('mybucket', 'image.TIF', 'image.TIF')
self.assertEquals(output, ['The output is stored at image.TIF', 0])
@mock.patch('landsat.landsat.process_image')
@mock.patch('landsat.downloader.fetch')
def test_download_process_continuous_with_wrong_args(self, fetch, mock_process):
"""Test download and process commands together"""
fetch.return_value = True
mock_process.return_value = 'image.TIF'
args = ['download', 'LC80010092015051LGN00', '-b', '432', '-d', self.mock_path, '-p',
'-u', '--region', 'whatever']
output = landsat.main(self.parser.parse_args(args))
mock_process.assert_called_with('path/to/folder/LC80010092015051LGN00', '432', False, False, False,
False, False, bounds=None)
self.assertEquals(output, ['Could not authenticate with AWS', 1])
@mock.patch('landsat.landsat.process_image')
def test_process_correct(self, mock_process):
"""Test process command with correct input"""
mock_process.return_value = 'image.TIF'
args = ['process', 'path/to/folder/LC80010092015051LGN00']
output = landsat.main(self.parser.parse_args(args))
mock_process.assert_called_with('path/to/folder/LC80010092015051LGN00', '432',
False, False, False, False, False, None)
self.assertEquals(output, ["The output is stored at image.TIF"])
@mock.patch('landsat.landsat.process_image')
def test_process_correct_with_clipping(self, mock_process):
"""Test process command with correct input"""
mock_process.return_value = 'image.TIF'
args = ['process', 'path/to/folder/LC80010092015051LGN00', '--clip', '"-180,-180,0,0"']
output = landsat.main(self.parser.parse_args(args))
mock_process.assert_called_with('path/to/folder/LC80010092015051LGN00', '432',
False, False, False, False, False, [-180.0, -180.0, 0.0, 0.0])
self.assertEquals(output, ["The output is stored at image.TIF"])
@mock.patch('landsat.landsat.process_image')
def test_process_correct_pansharpen(self, mock_process):
"""Test process command with correct input and pansharpening"""
mock_process.return_value = 'image.TIF'
args = ['process', '--pansharpen', 'path/to/folder/LC80010092015051LGN00']
output = landsat.main(self.parser.parse_args(args))
mock_process.assert_called_with('path/to/folder/LC80010092015051LGN00', '432', False, True, False, False,
False, None)
self.assertEquals(output, ["The output is stored at image.TIF"])
@mock.patch('landsat.landsat.process_image')
def test_process_correct_ndvi(self, mock_process):
"""Test process command with correct input and ndvi"""
mock_process.return_value = 'image.TIF'
args = ['process', '--ndvi', 'path/to/folder/LC80010092015051LGN00']
output = landsat.main(self.parser.parse_args(args))
mock_process.assert_called_with('path/to/folder/LC80010092015051LGN00', '432', False, False, True, False,
False, None)
self.assertEquals(output, ["The output is stored at image.TIF"])
def test_process_incorrect(self):
"""Test process command with incorrect input"""
args = ['process', 'whatever']
try:
landsat.main(self.parser.parse_args(args))
except SystemExit as e:
self.assertEqual(e.code, 1)
self.assertNotEqual(cm.exception.code, 0)
def test_search_shapefile_correct(self):
args = ['search', '--onlysearch', 'shapefile', self.shapefile]
with self.assertRaises(SystemExit) as cm:
landsat.main(self.parser.parse_args(args))
self.assertEqual(cm.exception.code, 0)
def test_search_shapefile_incorrect(self):
args = ['search', '--onlysearch', 'shapefile', 'whatever']
with self.assertRaises(Exception) as cm:
landsat.main(self.parser.parse_args(args))
self.assertEqual(cm.exception.args[0],
'Invalid Argument. Please try again!')
def check_command_line(self):
""" Check if the commandline performs correctly """
self.assertEqual(subprocess.call(['python', join(self.base_dir, '../landsat.py'), '-h']), 0)
if __name__ == '__main__':
unittest.main()

83
tests/test_mixins.py Normal file
View File

@@ -0,0 +1,83 @@
# Landsat Util
# License: CC0 1.0 Universal
"""Tests for mixins"""
from __future__ import absolute_import
import sys
import unittest
try:
from io import StringIO
except:
from cStringIO import StringIO
from contextlib import contextmanager
from landsat.mixins import VerbosityMixin
# Capture function is taken from
# http://schinckel.net/2013/04/15/capture-and-test-sys.stdout-sys.stderr-in-unittest.testcase/
@contextmanager
def capture(command, *args, **kwargs):
out, sys.stdout = sys.stdout, StringIO()
command(*args, **kwargs)
sys.stdout.seek(0)
yield sys.stdout.read()
sys.stdout = out
class TestMixins(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.v = VerbosityMixin()
def test_output(self):
# just a value
with capture(self.v.output, u'this is a test') as output:
self.assertEquals("", output)
# value as normal
with capture(self.v.output, u'this is a test', normal=True) as output:
self.assertEquals("this is a test\n", output)
# value as normal with color
with capture(self.v.output, u'this is a test', normal=True, color='blue') as output:
self.assertEquals("\x1b[34mthis is a test\x1b[0m\n", output)
# value as error
with capture(self.v.output, u'this is a test', normal=True, error=True) as output:
self.assertEquals("\x1b[31mthis is a test\x1b[0m\n", output)
# value with arrow
with capture(self.v.output, u'this is a test', normal=True, arrow=True) as output:
self.assertEquals("\x1b[34m===> \x1b[0mthis is a test\n", output)
# value with indent
with capture(self.v.output, u'this is a test', normal=True, indent=1) as output:
self.assertEquals(" this is a test\n", output)
def test_exit(self):
with self.assertRaises(SystemExit):
with capture(self.v.exit, u'exit test') as output:
self.assertEquals('exit test', output)
def test_print(self):
# message in blue with arrow
with capture(self.v._print, msg=u'this is a test', color='blue', arrow=True) as output:
self.assertEquals("\x1b[34m===> \x1b[0m\x1b[34mthis is a test\x1b[0m\n", output)
# just a message
with capture(self.v._print, msg=u'this is a test') as output:
self.assertEquals("this is a test\n", output)
# message with color and indent
with capture(self.v._print, msg=u'this is a test', color='blue', indent=1) as output:
self.assertEquals(" \x1b[34mthis is a test\x1b[0m\n", output)
if __name__ == '__main__':
unittest.main()

126
tests/test_search.py Normal file
View File

@@ -0,0 +1,126 @@
# Landsat Util
# License: CC0 1.0 Universal
"""Tests for search"""
import unittest
from jsonschema import validate
from landsat.search import Search
from tests import geojson_schema
class TestSearchHelper(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.s = Search()
def test_search(self):
# TEST A REGULAR SEARCH WITH KNOWN RESULT for paths and rows
paths_rows = '003,003'
start_date = '2014-01-01'
end_date = '2014-06-01'
result = self.s.search(paths_rows=paths_rows, start_date=start_date, end_date=end_date)
self.assertEqual('2014-05-22', result['results'][0]['date'])
# TEST A REGULAR SEARCH WITH KNOWN RESULT for lat and lon
lat = 38.9107203
lon = -77.0290116
start_date = '2015-02-01'
end_date = '2015-02-20'
result = self.s.search(lat=lat, lon=lon, start_date=start_date, end_date=end_date)
self.assertEqual('2015-02-06', result['results'][0]['date'])
def test_search_zero_lon(self):
# Make sure that zero coordinates are handled correctly
paths_rows = '003,003'
lon = 0.0
lat = 52.0
start_date = '2016-01-01'
end_date = '2016-01-10'
result = self.s.search(start_date=start_date, end_date=end_date,
lon=0.0, lat=52.0)
self.assertEqual('2016-01-06', result['results'][0]['date'])
def test_search_with_geojson(self):
# TEST A REGULAR SEARCH WITH KNOWN RESULT for paths and rows
paths_rows = '003,003'
start_date = '2014-01-01'
end_date = '2014-06-01'
result = self.s.search(paths_rows=paths_rows, start_date=start_date, end_date=end_date, geojson=True)
self.assertIsNone(validate(result, geojson_schema))
self.assertEqual('2014-05-22', result['features'][0]['properties']['date'])
def test_query_builder(self):
# test wiht no input
string = self.s.query_builder()
self.assertEqual('', string)
# just with rows and paths
string = self.s.query_builder(paths_rows='003,004')
self.assertEqual('(path:003+AND+row:004)', string)
# multiple rows and paths
string = self.s.query_builder(paths_rows='003,004,010,001')
self.assertEqual('(path:003+AND+row:004)+OR+(path:010+AND+row:001)', string)
# incomplete rows and paths
self.assertRaises(ValueError, self.s.query_builder, paths_rows='003,004,010')
# full example
expected_string = ('acquisitionDate:[2014-01-01+TO+2014-11-12]+AND+cloudCoverFull:[10+TO+28]+AND+upperLeftCo'
'rnerLatitude:[23+TO+1000]+AND+lowerRightCornerLatitude:[-1000+TO+23]+AND+lowerLeftCorner'
'Longitude:[-1000+TO+21]+AND+upperRightCornerLongitude:[21+TO+1000]+AND+((path:003+AND+ro'
'w:004))')
string = self.s.query_builder(paths_rows='003,004', lat=23, lon=21, start_date='2014-01-01',
end_date='2014-11-12', cloud_min=10, cloud_max=28)
self.assertEqual(expected_string, string)
def test_lat_lon_builder(self):
expected_string = ('upperLeftCornerLatitude:[12.3344+TO+1000]+AND+lowerRightCornerLatitude:[-1000+TO+12.3344]'
'+AND+lowerLeftCornerLongitude:[-1000+TO+11.0032]+AND+upperRightCornerLongitude:[11.0032+T'
'O+1000]')
# Test with floats
string = self.s.lat_lon_builder(12.3344, 11.0032)
self.assertEqual(expected_string, string)
# Test with strings
string = self.s.lat_lon_builder('12.3344', '11.0032')
self.assertEqual(expected_string, string)
def test_cloud_cover_prct_range_builder(self):
# no input
string = self.s.cloud_cover_prct_range_builder()
self.assertEqual('cloudCoverFull:[0+TO+100]', string)
# just min
string = self.s.cloud_cover_prct_range_builder(3)
self.assertEqual('cloudCoverFull:[3+TO+100]', string)
# just max
string = self.s.cloud_cover_prct_range_builder(max=30)
self.assertEqual('cloudCoverFull:[0+TO+30]', string)
# both inputs
string = self.s.cloud_cover_prct_range_builder(7, 10)
self.assertEqual('cloudCoverFull:[7+TO+10]', string)
def test_date_range_builder(self):
string = self.s.date_range_builder('2014-01-01', '2015-01-01')
self.assertEqual('acquisitionDate:[2014-01-01+TO+2015-01-01]', string)
def test_row_path_builder(self):
string = self.s.row_path_builder('003', '004')
self.assertEqual('path:003+AND+row:004', string)
if __name__ == '__main__':
unittest.main()

View File

@@ -1,99 +0,0 @@
# USGS Landsat Imagery Util
#
#
# Author: developmentseed
# Contributer: scisco
#
# License: CC0 1.0 Universal
"""Tests for search_helper"""
import os
import sys
import unittest
try:
from landsat.search_helper import Search
except ImportError:
sys.path.append(os.path
.abspath(os.path
.join(os.path.dirname(__file__),
'../landsat')))
from search_helper import Search
class TestSearchHelper(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.s = Search()
def test_search(self):
# TEST A REGULAR SEARCH WITH KNOWN RESULT
row_paths = '003,003'
start_date = '2014-01-01'
end_date = '2014-06-01'
cloud_min = 0
cloud_max = 100
limit = 10
result = self.s.search(row_paths=row_paths,
start_date=start_date,
end_date=end_date)
self.assertEqual(1, result['total'])
row_path_list = ['003,003,004',
'045503345']
# TEST VARIOUS FORMATS
for i in range(len(row_path_list)):
row_path_list[i]
result = self.s.search(row_paths=row_paths,
start_date=start_date,
end_date=end_date,
cloud_min=cloud_min,
cloud_max=cloud_max,
limit=limit)
self.assertIsInstance(result, dict)
def test_query_builder(self):
q = [{'rp': '003,003',
'start': '2014-01-01',
'end': '2014-06-01',
'min': 0,
'max': 100
},
{'rp': '003,003',
'start': '01',
'end': '2014',
'min': '',
'max': ''
}]
for i in range(len(q)):
output = self.s._query_builder(row_paths=q[i]['rp'],
start_date=q[i]['start'],
end_date=q[i]['end'],
cloud_min=q[i]['min'],
cloud_max=q[i]['max'])
self.assertIsInstance(output, str)
def test_row_path_builder(self):
self.assertEqual('row:003+AND+path:003', self.s
._row_path_builder('003',
'003'))
def test_date_range_builder(self):
self.assertEqual('acquisitionDate:[2013+TO+2014]',
self.s._date_range_builder('2013', '2014'))
def test_cloud_cover_prct_range_builder(self):
self.assertEqual('cloudCoverFull:[1+TO+2]',
self.s._cloud_cover_prct_range_builder('1', '2'))
if __name__ == '__main__':
unittest.main()

92
tests/test_uploader.py Normal file
View File

@@ -0,0 +1,92 @@
# Landsat Util
# License: CC0 1.0 Universal
# Some of the tests are from pys3upload (https://github.com/leetreveil/pys3upload)
"""Tests for uploader"""
import os
import sys
import unittest
import threading
import mock
from landsat.uploader import Uploader, upload, upload_part, data_collector
from .mocks import S3Connection, state
class TestUploader(unittest.TestCase):
@mock.patch('landsat.uploader.S3Connection', S3Connection)
def test_upload_to_s3(self):
state['mock_boto_s3_multipart_upload_data'] = []
base_dir = os.path.abspath(os.path.dirname(__file__))
landsat_image = os.path.join(base_dir, 'samples/mock_upload')
f = open(landsat_image, 'rb').readlines()
u = Uploader('some_key', 'some_secret')
u.run('some bucket', 'mock_upload', landsat_image)
self.assertEqual(state['mock_boto_s3_multipart_upload_data'], f)
class upload_tests(unittest.TestCase):
def test_should_be_able_to_upload_data(self):
input = [b'12', b'345']
state['mock_boto_s3_multipart_upload_data'] = []
conn = S3Connection('some_key', 'some_secret', True)
upload('test_bucket', 'some_key', 'some_secret', input, 'some_key', connection=conn)
self.assertEqual(state['mock_boto_s3_multipart_upload_data'], [b'12', b'345'])
class upload_part_tests(unittest.TestCase):
def test_should_return_error_when_upload_func_raises_error(self):
def upload_func(*args, **kwargs):
raise Exception()
with self.assertRaises(threading.ThreadError):
raise upload_part(upload_func, '_', '_', '_')
def test_should_retry_upload_five_times(self):
counter = [0]
def upload_func(*args, **kwargs):
counter[0] += 1
raise Exception()
upload_part(upload_func, b'_', b'_', b'_')
self.assertEqual(counter[0], 5)
class doc_collector_tests(unittest.TestCase):
def test_should_be_able_to_read_every_byte_of_data(self):
input = [b'12345']
result = list(data_collector(input, def_buf_size=3))
self.assertEqual(result, [b'123', b'45'])
def test_should_be_able_to_read_single_yield(self):
input = [b'123']
result = list(data_collector(input, def_buf_size=3))
self.assertEqual(result, [b'123'])
def test_should_be_able_to_yield_data_less_than_buffer_size(self):
input = [b'123']
result = list(data_collector(input, def_buf_size=6))
self.assertEqual(result, [b'123'])
def test_a_single_item_should_still_be_buffered_even_if_it_is_above_the_buffer_size(self):
input = [b'123456']
result = list(data_collector(input, def_buf_size=3))
self.assertEqual(result, [b'123', b'456'])
def test_should_return_rest_of_data_on_last_iteration(self):
input = [b'1234', b'56']
result = list(data_collector(input, def_buf_size=3))
self.assertEqual(result, [b'123', b'456'])
if __name__ == '__main__':
unittest.main()

190
tests/test_utils.py Normal file
View File

@@ -0,0 +1,190 @@
# Landsat Util
# License: CC0 1.0 Universal
"""Tests for utils"""
from os.path import join
import errno
import shutil
import unittest
from tempfile import mkdtemp, mkstemp
from landsat import utils
class TestUtils(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.temp_folder_base = mkdtemp()
cls.temp_folder_test = join(cls.temp_folder_base, 'test')
cls.temp_file = mkstemp(dir=cls.temp_folder_base)
@classmethod
def tearDownClass(cls):
try:
shutil.rmtree(cls.temp_folder_base)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
def test_create_paired_list(self):
# Test correct input (string)
output = utils.create_paired_list('003,003,004,004')
self.assertEqual([['003', '003'], ['004', '004']], output)
# Test correct input (list)
output = utils.create_paired_list(['003', '003', '004', '004'])
self.assertEqual([['003', '003'], ['004', '004']], output)
# Test incorrect input
self.assertRaises(ValueError, utils.create_paired_list, '003,003,004')
self.assertRaises(ValueError, utils.create_paired_list, '')
def test_check_create_folder(self):
new_path = utils.check_create_folder(self.temp_folder_test)
self.assertEqual(new_path, self.temp_folder_test)
def test_get_file(self):
f = utils.get_file(self.temp_folder_test)
self.assertEqual('test', f)
def test_get_filename(self):
# Easy filename
f = utils.get_filename('%s/filename.html' % self.temp_folder_base)
self.assertEqual('filename', f)
# Dificult filename
f = utils.get_filename('%s/filename.test.html' % self.temp_folder_base)
self.assertEqual('filename.test', f)
def test_three_digit(self):
self.assertEqual('009', utils.three_digit(9))
self.assertEqual('010', utils.three_digit(10))
self.assertEqual('100', utils.three_digit(100))
self.assertEqual('string', utils.three_digit('string'))
def test_georgian_day(self):
self.assertEqual(28, utils.georgian_day('01/28/2014'))
self.assertEqual(79, utils.georgian_day('03/20/2014'))
self.assertEqual(0, utils.georgian_day('random text'))
self.assertEqual(0, utils.georgian_day(9876))
def test_year(self):
self.assertEqual(2014, utils.year('01/28/2014'))
self.assertEqual(0, utils.year('2014'))
def test_reformat_date(self):
self.assertEqual('2014-02-03', utils.reformat_date('02/03/2014'))
self.assertEqual('2014-03-02', utils.reformat_date('02/03/2014', '%Y-%d-%m'))
self.assertEqual('2014', utils.reformat_date('2014', '%d/%m/%Y'))
self.assertEqual('2014', utils.reformat_date('2014', 'juberish'))
self.assertRaises(TypeError, utils.reformat_date('date'))
def test_convert_to_integer_list(self):
# correct input
r = utils.convert_to_integer_list('1,2,3')
self.assertEqual([1, 2, 3], r)
# try other cobinations
r = utils.convert_to_integer_list('1, 2, 3')
self.assertEqual([1, 2, 3], r)
r = utils.convert_to_integer_list('1s,2df,3d/')
self.assertEqual([1, 2, 3], r)
r = utils.convert_to_integer_list([1, 3, 4])
self.assertEqual([1, 3, 4], r)
r = utils.convert_to_integer_list('1,11,10')
self.assertEqual([1, 11, 10], r)
r = utils.convert_to_integer_list('1,11,10,QA')
self.assertEqual([1, 11, 10, 'QA'], r)
def test_geocode(self):
loc = utils.geocode('1600 Pennsylvania Ave NW, Washington, DC 20500')
self.assertEqual(round(loc['lat'], 3), 38.898)
self.assertEqual(round(loc['lon'], 3), -77.037)
self.assertRaises(ValueError, utils.geocode, 'Pennsylvania Ave NW, Washington, DC')
self.assertEqual({'lat': 38.8987352, 'lon': -77.0350902},
utils.geocode('Pennsylvania Ave NW, Washington, DC', 10.))
def test_convert_to_float_list(self):
# correct input
r = utils.convert_to_float_list('-1,2,-3')
self.assertEqual([-1.0, 2.0, -3.0], r)
# try other cobinations
r = utils.convert_to_float_list('1, 2, 3')
self.assertEqual([1.0, 2.0, 3.0], r)
r = utils.convert_to_float_list('1s,2df,3d/')
self.assertEqual([1.0, 2.0, 3.0], r)
r = utils.convert_to_float_list([1, 3, 4])
self.assertEqual([1, 3, 4], r)
r = utils.convert_to_float_list('1,11,10')
self.assertEqual([1.0, 11.0, 10.0], r)
def test_adjust_bounding_box(self):
# Test target bounds with origin bounds
# should return the target bounds
origin = (100, 10, 80, 20)
target = (90, 15, 91, 15)
self.assertEqual(utils.adjust_bounding_box(origin, target), target)
# Test target bounds intersects with origin bounds
# should return the expected bounds
origin = (100, 10, 80, 20)
target = (120, -5, 99, 15)
expected = (100, 10, 99, 15)
self.assertEqual(utils.adjust_bounding_box(origin, target), expected)
# Test target bounds do not intersect with origin bounds at all
# Target bounds are above origin bounds
# should return the origin bounds
origin = (100, 10, 80, 20)
target = (120, -5, 110, 9)
self.assertEqual(utils.adjust_bounding_box(origin, target), origin)
# Target bounds are on the right side of origin bounds
origin = (100, 10, 80, 20)
target = (82, 23, 91, 26)
self.assertEqual(utils.adjust_bounding_box(origin, target), origin)
# Target bounds are below of origin bounds
origin = (100, 10, 80, 20)
target = (70, 11, 60, 18)
self.assertEqual(utils.adjust_bounding_box(origin, target), origin)
# Target bounds are on the left side of origin bounds
origin = (100, 10, 80, 20)
target = (80, -20, 79, -19)
self.assertEqual(utils.adjust_bounding_box(origin, target), origin)
def test_url_builder(self):
self.assertEqual('http://example.com/segment1/segment2',
utils.url_builder(['/http://example.com', 'segment1/', '/segment2']))
self.assertEqual('http://example.com/segment1/segment2',
utils.url_builder(('/http://example.com', 'segment1/', '/segment2',)))
with self.assertRaises(AssertionError):
utils.url_builder('example.com')
if __name__ == '__main__':
unittest.main()

17
tox.ini Normal file
View File

@@ -0,0 +1,17 @@
[tox]
envlist = py27,py34,py35
[testenv]
deps =
wheel
cython>=0.21
pip>=8.1.1
jsonschema
mock>=1.3.0
nose>=1.3.7
pytest
commands =
pip install -r requirements.txt
pip install -e .
python setup.py test
install_command=pip install --process-dependency-links --allow-external --allow-unverified {opts} {packages}