Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Moved resample out of parsers to its own sub-package #158

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions news/resample-relocation.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
**Added:**

* <news item>

**Changed:**

* Moved resampler out of parsers, new path is diffpy.utils.resampler

**Deprecated:**

* <news item>

**Removed:**

* Relative imports in parser's __init__.py

**Fixed:**

* File and function names are now all in PEP8 format

**Security:**

* <news item>
8 changes: 0 additions & 8 deletions src/diffpy/utils/parsers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,4 @@
"""Various utilities related to data parsing and manipulation.
"""

from .loaddata import loadData
from .resample import resample
from .serialization import deserialize_data, serialize_data

# silence the pyflakes syntax checker
assert loadData or resample or True
assert serialize_data or deserialize_data or True

# End of file
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import numpy


def loadData(filename, minrows=10, headers=False, hdel="=", hignore=None, **kwargs):
def load_data(filename, minrows=10, headers=False, hdel="=", hignore=None, **kwargs):
"""Find and load data from a text file.

The data block is identified as the first matrix block of at least minrows rows and constant number of columns.
Expand Down
File renamed without changes.
24 changes: 12 additions & 12 deletions src/diffpy/utils/scattering_objects/diffraction_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
)


class Diffraction_object:
class DiffractionObject:
def __init__(self, name="", wavelength=None):
self.name = name
self.wavelength = wavelength
Expand All @@ -29,7 +29,7 @@ def __init__(self, name="", wavelength=None):
self.metadata = {}

def __eq__(self, other):
if not isinstance(other, Diffraction_object):
if not isinstance(other, DiffractionObject):
return NotImplemented
self_attributes = [key for key in self.__dict__ if not key.startswith("_")]
other_attributes = [key for key in other.__dict__ if not key.startswith("_")]
Expand Down Expand Up @@ -59,8 +59,8 @@ def __add__(self, other):
if isinstance(other, int) or isinstance(other, float) or isinstance(other, np.ndarray):
summed.on_tth[1] = self.on_tth[1] + other
summed.on_q[1] = self.on_q[1] + other
elif not isinstance(other, Diffraction_object):
raise TypeError("I only know how to sum two Diffraction_object objects")
elif not isinstance(other, DiffractionObject):
raise TypeError("I only know how to sum two DiffractionObject objects")
elif self.on_tth[0].all() != other.on_tth[0].all():
raise RuntimeError(x_grid_emsg)
else:
Expand All @@ -73,7 +73,7 @@ def __radd__(self, other):
if isinstance(other, int) or isinstance(other, float) or isinstance(other, np.ndarray):
summed.on_tth[1] = self.on_tth[1] + other
summed.on_q[1] = self.on_q[1] + other
elif not isinstance(other, Diffraction_object):
elif not isinstance(other, DiffractionObject):
raise TypeError("I only know how to sum two Scattering_object objects")
elif self.on_tth[0].all() != other.on_tth[0].all():
raise RuntimeError(x_grid_emsg)
Expand All @@ -87,7 +87,7 @@ def __sub__(self, other):
if isinstance(other, int) or isinstance(other, float) or isinstance(other, np.ndarray):
subtracted.on_tth[1] = self.on_tth[1] - other
subtracted.on_q[1] = self.on_q[1] - other
elif not isinstance(other, Diffraction_object):
elif not isinstance(other, DiffractionObject):
raise TypeError("I only know how to subtract two Scattering_object objects")
elif self.on_tth[0].all() != other.on_tth[0].all():
raise RuntimeError(x_grid_emsg)
Expand All @@ -101,7 +101,7 @@ def __rsub__(self, other):
if isinstance(other, int) or isinstance(other, float) or isinstance(other, np.ndarray):
subtracted.on_tth[1] = other - self.on_tth[1]
subtracted.on_q[1] = other - self.on_q[1]
elif not isinstance(other, Diffraction_object):
elif not isinstance(other, DiffractionObject):
raise TypeError("I only know how to subtract two Scattering_object objects")
elif self.on_tth[0].all() != other.on_tth[0].all():
raise RuntimeError(x_grid_emsg)
Expand All @@ -115,7 +115,7 @@ def __mul__(self, other):
if isinstance(other, int) or isinstance(other, float) or isinstance(other, np.ndarray):
multiplied.on_tth[1] = other * self.on_tth[1]
multiplied.on_q[1] = other * self.on_q[1]
elif not isinstance(other, Diffraction_object):
elif not isinstance(other, DiffractionObject):
raise TypeError("I only know how to multiply two Scattering_object objects")
elif self.on_tth[0].all() != other.on_tth[0].all():
raise RuntimeError(x_grid_emsg)
Expand All @@ -141,7 +141,7 @@ def __truediv__(self, other):
if isinstance(other, int) or isinstance(other, float) or isinstance(other, np.ndarray):
divided.on_tth[1] = other / self.on_tth[1]
divided.on_q[1] = other / self.on_q[1]
elif not isinstance(other, Diffraction_object):
elif not isinstance(other, DiffractionObject):
raise TypeError("I only know how to multiply two Scattering_object objects")
elif self.on_tth[0].all() != other.on_tth[0].all():
raise RuntimeError(x_grid_emsg)
Expand Down Expand Up @@ -389,7 +389,7 @@ def scale_to(self, target_diff_object, xtype=None, xvalue=None):

Parameters
----------
target_diff_object: Diffraction_object
target_diff_object: DiffractionObject
the diffractoin object you want to scale the current one on to
xtype: string, optional. Default is Q
the xtype, from {XQUANTITIES}, that you will specify a point from to scale to
Expand All @@ -400,7 +400,7 @@ def scale_to(self, target_diff_object, xtype=None, xvalue=None):

Returns
-------
the rescaled Diffraction_object as a new object
the rescaled DiffractionObject as a new object

"""
scaled = deepcopy(self)
Expand Down Expand Up @@ -454,7 +454,7 @@ def dump(self, filepath, xtype=None):

with open(filepath, "w") as f:
f.write(
f"[Diffraction_object]\nname = {self.name}\nwavelength = {self.wavelength}\n"
f"[DiffractionObject]\nname = {self.name}\nwavelength = {self.wavelength}\n"
f"scat_quantity = {self.scat_quantity}\n"
)
for key, value in self.metadata.items():
Expand Down
File renamed without changes.
10 changes: 5 additions & 5 deletions tests/test_diffraction_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import pytest
from freezegun import freeze_time

from diffpy.utils.scattering_objects.diffraction_objects import Diffraction_object
from diffpy.utils.scattering_objects.diffraction_objects import DiffractionObject

params = [
( # Default
Expand Down Expand Up @@ -222,8 +222,8 @@

@pytest.mark.parametrize("inputs1, inputs2, expected", params)
def test_diffraction_objects_equality(inputs1, inputs2, expected):
diffraction_object1 = Diffraction_object()
diffraction_object2 = Diffraction_object()
diffraction_object1 = DiffractionObject()
diffraction_object2 = DiffractionObject()
diffraction_object1_attributes = [key for key in diffraction_object1.__dict__ if not key.startswith("_")]
for i, attribute in enumerate(diffraction_object1_attributes):
setattr(diffraction_object1, attribute, inputs1[i])
Expand All @@ -235,7 +235,7 @@ def test_dump(tmp_path, mocker):
x, y = np.linspace(0, 5, 6), np.linspace(0, 5, 6)
directory = Path(tmp_path)
file = directory / "testfile"
test = Diffraction_object()
test = DiffractionObject()
test.wavelength = 1.54
test.name = "test"
test.scat_quantity = "x-ray"
Expand All @@ -251,7 +251,7 @@ def test_dump(tmp_path, mocker):
with open(file, "r") as f:
actual = f.read()
expected = (
"[Diffraction_object]\nname = test\nwavelength = 1.54\nscat_quantity = x-ray\nthing1 = 1\n"
"[DiffractionObject]\nname = test\nwavelength = 1.54\nscat_quantity = x-ray\nthing1 = 1\n"
"thing2 = thing2\npackage_info = {'package2': '3.4.5', 'diffpy.utils': '3.3.0'}\n"
"creation_time = 2012-01-14 00:00:00\n\n"
"#### start data\n0.000000000000000000e+00 0.000000000000000000e+00\n"
Expand Down
20 changes: 10 additions & 10 deletions tests/test_loaddata.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import numpy
import pytest

from diffpy.utils.parsers import loadData
from diffpy.utils.parsers.data_loader import load_data


##############################################################################
Expand All @@ -21,29 +21,29 @@ def test_loadData_default(self):
"""check loadData() with default options"""
loaddata01 = self.datafile("loaddata01.txt")
d2c = numpy.array([[3, 31], [4, 32], [5, 33]])
self.assertRaises(IOError, loadData, "doesnotexist")
self.assertRaises(IOError, load_data, "doesnotexist")
# the default minrows=10 makes it read from the third line
d = loadData(loaddata01)
d = load_data(loaddata01)
self.assertTrue(numpy.array_equal(d2c, d))
# the usecols=(0, 1) would make it read from the third line
d = loadData(loaddata01, minrows=1, usecols=(0, 1))
d = load_data(loaddata01, minrows=1, usecols=(0, 1))
self.assertTrue(numpy.array_equal(d2c, d))
# check the effect of usecols effect
d = loadData(loaddata01, usecols=(0,))
d = load_data(loaddata01, usecols=(0,))
self.assertTrue(numpy.array_equal(d2c[:, 0], d))
d = loadData(loaddata01, usecols=(1,))
d = load_data(loaddata01, usecols=(1,))
self.assertTrue(numpy.array_equal(d2c[:, 1], d))
return

def test_loadData_1column(self):
"""check loading of one-column data."""
loaddata01 = self.datafile("loaddata01.txt")
d1c = numpy.arange(1, 6)
d = loadData(loaddata01, usecols=[0], minrows=1)
d = load_data(loaddata01, usecols=[0], minrows=1)
self.assertTrue(numpy.array_equal(d1c, d))
d = loadData(loaddata01, usecols=[0], minrows=2)
d = load_data(loaddata01, usecols=[0], minrows=2)
self.assertTrue(numpy.array_equal(d1c, d))
d = loadData(loaddata01, usecols=[0], minrows=3)
d = load_data(loaddata01, usecols=[0], minrows=3)
self.assertFalse(numpy.array_equal(d1c, d))
return

Expand All @@ -52,7 +52,7 @@ def test_loadData_headers(self):
loaddatawithheaders = self.datafile("loaddatawithheaders.txt")
hignore = ["# ", "// ", "["] # ignore lines beginning with these strings
delimiter = ": " # what our data should be separated by
hdata = loadData(loaddatawithheaders, headers=True, hdel=delimiter, hignore=hignore)
hdata = load_data(loaddatawithheaders, headers=True, hdel=delimiter, hignore=hignore)
# only fourteen lines of data are formatted properly
assert len(hdata) == 14
# check the following are floats
Expand Down
2 changes: 1 addition & 1 deletion tests/test_resample.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np
import pytest

from diffpy.utils.parsers.resample import wsinterp
from diffpy.utils.resampler import wsinterp


def test_wsinterp():
Expand Down
19 changes: 10 additions & 9 deletions tests/test_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
import numpy
import pytest

from diffpy.utils.parsers import deserialize_data, loadData, serialize_data
from diffpy.utils.parsers.custom_exceptions import ImproperSizeError, UnsupportedTypeError
from diffpy.utils.parsers.data_loader import load_data
from diffpy.utils.parsers.serializer import deserialize_data, serialize_data

tests_dir = os.path.dirname(os.path.abspath(locals().get("__file__", "file.py")))

Expand All @@ -20,8 +21,8 @@ def test_load_multiple(tmp_path, datafile):
for hfname in tlm_list:
# gather data using loadData
headerfile = os.path.normpath(os.path.join(tests_dir, "testdata", "dbload", hfname))
hdata = loadData(headerfile, headers=True)
data_table = loadData(headerfile)
hdata = load_data(headerfile, headers=True)
data_table = load_data(headerfile)

# check path extraction
generated_data = serialize_data(headerfile, hdata, data_table, dt_colnames=["r", "gr"], show_path=True)
Expand Down Expand Up @@ -50,8 +51,8 @@ def test_exceptions(datafile):
loadfile = datafile("loadfile.txt")
warningfile = datafile("generatewarnings.txt")
nodt = datafile("loaddatawithheaders.txt")
hdata = loadData(loadfile, headers=True)
data_table = loadData(loadfile)
hdata = load_data(loadfile, headers=True)
data_table = load_data(loadfile)

# improper file types
with pytest.raises(UnsupportedTypeError):
Expand Down Expand Up @@ -87,15 +88,15 @@ def test_exceptions(datafile):
assert numpy.allclose(r_extract[data_name]["r"], r_list)
assert numpy.allclose(gr_extract[data_name]["gr"], gr_list)
# no datatable
nodt_hdata = loadData(nodt, headers=True)
nodt_dt = loadData(nodt)
nodt_hdata = load_data(nodt, headers=True)
nodt_dt = load_data(nodt)
no_dt = serialize_data(nodt, nodt_hdata, nodt_dt, show_path=False)
nodt_data_name = list(no_dt.keys())[0]
assert numpy.allclose(no_dt[nodt_data_name]["data table"], nodt_dt)

# ensure user is warned when columns are overwritten
hdata = loadData(warningfile, headers=True)
data_table = loadData(warningfile)
hdata = load_data(warningfile, headers=True)
data_table = load_data(warningfile)
with pytest.warns(RuntimeWarning) as record:
serialize_data(
warningfile,
Expand Down
Loading