Skip to content
Merged
1 change: 0 additions & 1 deletion doc/api/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,6 @@ Use :func:`pygmt.datasets.load_sample_data` instead.
:toctree: generated

datasets.load_mars_shape
datasets.load_usgs_quakes

.. automodule:: pygmt.exceptions

Expand Down
7 changes: 1 addition & 6 deletions pygmt/datasets/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,4 @@
from pygmt.datasets.earth_vertical_gravity_gradient import (
load_earth_vertical_gravity_gradient,
)
from pygmt.datasets.samples import (
list_sample_data,
load_mars_shape,
load_sample_data,
load_usgs_quakes,
)
from pygmt.datasets.samples import list_sample_data, load_mars_shape, load_sample_data
29 changes: 4 additions & 25 deletions pygmt/datasets/samples.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ def load_sample_data(name):
# Dictionary of public load functions for backwards compatibility
load_func_old = {
"mars_shape": load_mars_shape,
"usgs_quakes": load_usgs_quakes,
}

# Dictionary of private load functions
Expand All @@ -85,6 +84,7 @@ def load_sample_data(name):
"notre_dame_topography": _load_notre_dame_topography,
"ocean_ridge_points": _load_ocean_ridge_points,
"rock_compositions": _load_rock_sample_compositions,
"usgs_quakes": _load_usgs_quakes,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Need to remove "usgs_quakes": load_usgs_quakes at line 75.

}

if name in load_func_old:
Expand Down Expand Up @@ -163,39 +163,18 @@ def _load_baja_california_bathymetry():
)


def load_usgs_quakes(**kwargs):
def _load_usgs_quakes():
"""
(Deprecated) Load a table of global earthquakes from the USGS as a
pandas.DataFrame.

.. warning:: Deprecated since v0.6.0. This function has been replaced with
``load_sample_data(name="usgs_quakes")`` and will be removed in
v0.9.0.

This is the ``@usgs_quakes_22.txt`` dataset used in the GMT tutorials.

The data are downloaded to a cache directory (usually ``~/.gmt/cache``) the
first time you invoke this function. Afterwards, it will load the data from
the cache. So you'll need an internet connection the first time around.
Load a table of global earthquakes from the USGS as a pandas.DataFrame.

Returns
-------
data : pandas.DataFrame
The data table. Use ``print(data.describe())`` to see the available
columns.
"""

if "suppress_warning" not in kwargs:
warnings.warn(
"This function has been deprecated since v0.6.0 and will be "
"removed in v0.9.0. Please use "
"load_sample_data(name='usgs_quakes') instead.",
category=FutureWarning,
stacklevel=2,
)
fname = which("@usgs_quakes_22.txt", download="c")
data = pd.read_csv(fname)
return data
return pd.read_csv(fname)


def _load_fractures_compilation():
Expand Down
56 changes: 51 additions & 5 deletions pygmt/tests/test_datasets_samples.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import numpy.testing as npt
import pandas as pd
import pytest
from pygmt.datasets import load_mars_shape, load_sample_data, load_usgs_quakes
from pygmt.datasets import load_mars_shape, load_sample_data
from pygmt.exceptions import GMTInvalidInput


Expand Down Expand Up @@ -58,12 +58,58 @@ def test_sample_bathymetry():

def test_usgs_quakes():
"""
Check that the dataset loads without errors.
Check that the @usgs_quakes_22.txt dataset loads without errors.
"""
with pytest.warns(expected_warning=FutureWarning) as record:
data = load_usgs_quakes()
assert len(record) == 1
data = load_sample_data(name="usgs_quakes")
assert data.shape == (1197, 22)
assert list(data.columns) == [
"time",
"latitude",
"longitude",
"depth",
"mag",
"magType",
"nst",
"gap",
"dmin",
"rms",
"net",
"id",
"updated",
"place",
"type",
"horizontalError",
"depthError",
"magError",
"magNst",
"status",
"locationSource",
"magSource",
]
npt.assert_allclose(data["latitude"].min(), -60.6819)
npt.assert_allclose(data["latitude"].max(), 72.6309)
npt.assert_allclose(data["longitude"].min(), -179.9953)
npt.assert_allclose(data["longitude"].max(), 179.9129)
npt.assert_allclose(data["depth"].min(), -0.21)
npt.assert_allclose(data["depth"].max(), 640.49)
npt.assert_allclose(data["mag"].min(), 3)
npt.assert_allclose(data["mag"].max(), 8.1)
npt.assert_allclose(data["nst"].min(), 3)
npt.assert_allclose(data["nst"].max(), 167)
npt.assert_allclose(data["gap"].min(), 10.0)
npt.assert_allclose(data["gap"].max(), 353.0)
npt.assert_allclose(data["dmin"].min(), 0.006421)
npt.assert_allclose(data["dmin"].max(), 39.455)
npt.assert_allclose(data["rms"].min(), 0.02)
npt.assert_allclose(data["rms"].max(), 1.76)
npt.assert_allclose(data["horizontalError"].min(), 0.09)
npt.assert_allclose(data["horizontalError"].max(), 36.8)
npt.assert_allclose(data["depthError"].min(), 0)
npt.assert_allclose(data["depthError"].max(), 65.06)
npt.assert_allclose(data["magError"].min(), 0.02)
npt.assert_allclose(data["magError"].max(), 0.524)
npt.assert_allclose(data["magNst"].min(), 1)
npt.assert_allclose(data["magNst"].max(), 944)


def test_fractures_compilation():
Expand Down