diff --git a/trackpy/__init__.py b/trackpy/__init__.py index 71f49172..c0dac059 100644 --- a/trackpy/__init__.py +++ b/trackpy/__init__.py @@ -1,8 +1,13 @@ +# Configure a logger from trackpy. +# This must be done before utils is imported. import logging -FORMAT = "%(name)s.%(funcName)s: %(message)s" -logging.basicConfig(level=logging.WARN, format=FORMAT) +logger = logging.getLogger(__name__) + -from trackpy.api import * from ._version import get_versions __version__ = get_versions()['version'] del get_versions + +from trackpy.api import * + +handle_logging() diff --git a/trackpy/api.py b/trackpy/api.py index 98826e36..4bb75c1a 100644 --- a/trackpy/api.py +++ b/trackpy/api.py @@ -8,7 +8,8 @@ from .feature import * from .preprocessing import bandpass from .framewise_data import * -from . import utils +from . import utils, predict, diag +from .utils import handle_logging, ignore_logging, quiet from .try_numba import try_numba_autojit, enable_numba, disable_numba # Import all of pims top-level for convenience. diff --git a/trackpy/feature.py b/trackpy/feature.py index 7e7cf1a2..506faa3c 100644 --- a/trackpy/feature.py +++ b/trackpy/feature.py @@ -2,6 +2,7 @@ unicode_literals) import six import warnings +import logging import numpy as np import pandas as pd @@ -12,20 +13,20 @@ from . import uncertainty from .preprocessing import bandpass, scale_to_gamut -from .utils import record_meta, print_update -from .masks import * +from .utils import record_meta +from .masks import binary_mask, r_squared_mask, cosmask, sinmask import trackpy # to get trackpy.__version__ from .try_numba import try_numba_autojit, NUMBA_AVAILABLE -__all__ = ['locate', 'batch', 'percentile_threshold', 'local_maxima', - 'refine', 'estimate_mass', 'estimate_size'] +logger = logging.getLogger(__name__) +__all__ = ['percentile_threshold', 'local_maxima', 'refine', 'locate', + 'batch', 'estimate_mass', 'estimate_size'] def percentile_threshold(image, percentile): """Find grayscale threshold based on distribution in image.""" - ndim = image.ndim not_black = image[np.nonzero(image)] if len(not_black) == 0: return np.nan @@ -72,7 +73,7 @@ def local_maxima(image, radius, percentile=64, margin=None): # Return coords in as a numpy array shaped so it can be passed directly # to the DataFrame constructor. - return maxima + return maxima def estimate_mass(image, radius, coord): @@ -214,7 +215,7 @@ def _refine(raw_image, image, radius, coords, max_iterations, for iteration in range(max_iterations): off_center = cm_n - radius if walkthrough: - print_update(off_center) + logger.info('%f', off_center) if np.all(np.abs(off_center) < GOOD_ENOUGH_THRESH): break # Accurate enough. @@ -747,8 +748,7 @@ def batch(frames, diameter, minmass=100, maxsize=None, separation=None, else: frame_no = i features['frame'] = i # just counting iterations - message = "Frame %d: %d features" % (frame_no, len(features)) - print_update(message) + logger.info("Frame %d: %d features", frame_no, len(features)) if len(features) == 0: continue diff --git a/trackpy/filtering.py b/trackpy/filtering.py index 5ce2e6ef..1d52c8fd 100644 --- a/trackpy/filtering.py +++ b/trackpy/filtering.py @@ -1,9 +1,11 @@ +"""Simple functions that eliminate spurrious trajectories +by wrapping pandas group-by and filter capabilities.""" + from __future__ import (absolute_import, division, print_function, unicode_literals) import six -"""Simple functions that eliminate spurrious trajectories -by wrapping pandas group-by and filter capabilities.""" +__all__ = ['filter_stubs', 'filter_clusters', 'filter'] def filter_stubs(tracks, threshold=100): diff --git a/trackpy/framewise_data.py b/trackpy/framewise_data.py index 2e8628b7..13a139e4 100644 --- a/trackpy/framewise_data.py +++ b/trackpy/framewise_data.py @@ -6,10 +6,12 @@ import pandas as pd -from .utils import print_update +import logging -__all__ = ['FramewiseData', 'PandasHDFStore', 'PandasHDFStoreBig', - 'PandasHDFStoreSingleNode'] +logger = logging.getLogger(__name__) + +__all__ = ['PandasHDFStore', 'PandasHDFStoreBig', 'PandasHDFStoreSingleNode', + 'FramewiseData'] class FramewiseData(object): @@ -300,14 +302,14 @@ def frames(self): return frame_nos def _validate_node(self, use_tabular_copy): - # The HDFStore might be non-tabular, which means we cannot select a + # The HDFStore might be non-tabular, which means we cannot select a # subset, and this whole structure will not work. # For convenience, this can rewrite the table into a tabular node. if use_tabular_copy: self.key = _make_tabular_copy(self.filename, self.key) pandas_type = getattr(getattr(getattr( - self.store._handle.root, self.key, None), '_v_attrs', None), + self.store._handle.root, self.key, None), '_v_attrs', None), 'pandas_type', None) if not pandas_type == 'frame_table': raise ValueError("This node is not tabular. Call with " @@ -318,6 +320,6 @@ def _make_tabular_copy(store, key): """Copy the contents nontabular node in a pandas HDFStore into a tabular node""" tabular_key = key + '/tabular' - print_update("Making a tabular copy of %s at %s" % (key, tabular_key)) + logger.info("Making a tabular copy of %s at %s" % (key, tabular_key)) store.append(tabular_key, store.get(key), data_columns=True) return tabular_key diff --git a/trackpy/linking.py b/trackpy/linking.py index 90ea1df9..11e3d75e 100644 --- a/trackpy/linking.py +++ b/trackpy/linking.py @@ -2,6 +2,7 @@ unicode_literals) import six from six.moves import zip, range +import logging from copy import copy import itertools, functools from collections import deque, Iterable @@ -10,14 +11,13 @@ from scipy.spatial import cKDTree import pandas as pd -from .utils import print_update from .try_numba import try_numba_autojit, NUMBA_AVAILABLE __all__ = ['HashTable', 'TreeFinder', 'Point', 'PointND', 'IndexedPointND', 'Track', 'DummyTrack', 'UnknownLinkingError', 'SubnetOversizeException', 'link', 'link_df', 'link_iter', 'link_df_iter'] - +logger = logging.getLogger(__name__) class TreeFinder(object): @@ -536,8 +536,7 @@ def link_df(features, search_range, memory=0, "%d".format(frame_no)) features['particle'].update(labels) - msg = "Frame %d: %d trajectories present" % (frame_no, len(labels)) - print_update(msg) + logger.info("Frame %d: %d trajectories present", frame_no, len(labels)) if retain_index: features.index = orig_index @@ -662,8 +661,7 @@ def link_df_iter(features, search_range, memory=0, features.sort('particle', inplace=True) features.reset_index(drop=True, inplace=True) - msg = "Frame %d: %d trajectories present" % (frame_no, len(labels)) - print_update(msg) + logger.info("Frame %d: %d trajectories present", frame_no, len(labels)) yield features diff --git a/trackpy/plots.py b/trackpy/plots.py index aa82354b..061c7683 100644 --- a/trackpy/plots.py +++ b/trackpy/plots.py @@ -7,12 +7,11 @@ from collections import Iterable from functools import wraps import warnings +import logging import numpy as np -import pandas as pd -from pandas import DataFrame, Series -from .utils import print_update +logger = logging.getLogger(__name__) __all__ = ['annotate', 'plot_traj', 'ptraj', 'plot_displacements', @@ -132,7 +131,7 @@ def plot_traj(traj, colorby='particle', mpp=None, label=False, x = traj.set_index([t_column, 'particle'])['x'].unstack() y = traj.set_index([t_column, 'particle'])['y'].unstack() color_numbers = traj[t_column].values/float(traj[t_column].max()) - print_update("Drawing multicolor lines takes awhile. " + logger.info("Drawing multicolor lines takes awhile. " "Come back in a minute.") for particle in x: points = np.array( @@ -176,7 +175,7 @@ def annotate(centroids, image, circle_size=None, color=None, ax : matplotlib axes object, defaults to current axes split_category : string, parameter to use to split the data into sections default None - split_thresh : single value or list of ints or floats to split + split_thresh : single value or list of ints or floats to split particles into sections for plotting in multiple colors. List items should be ordered by increasing value. default None @@ -184,7 +183,7 @@ def annotate(centroids, image, circle_size=None, color=None, the `Axes.imshow(...)` command the displays the image plot_style : dictionary of keyword arguments passed through to the `Axes.plot(...)` command that marks the features - + Returns ------ axes @@ -197,7 +196,7 @@ def annotate(centroids, image, circle_size=None, color=None, if 'marker_size' not in plot_style: plot_style['marker_size'] = np.sqrt(circle_size) # area vs. dia. else: - raise ValueError("passed in both 'marker_size' and 'circle_size'") + raise ValueError("passed in both 'marker_size' and 'circle_size'") _plot_style = dict(markersize=15, markeredgewidth=2, markerfacecolor='none', markeredgecolor='r', @@ -244,19 +243,19 @@ def pairwise(iterable): "plus 1") low = centroids[split_category] < split_thresh[0] _plot_style.update(markeredgecolor=color[0]) - ax.plot(centroids['x'][low], centroids['y'][low], + ax.plot(centroids['x'][low], centroids['y'][low], **_plot_style) for c, (bot, top) in zip(color[1:-1], pairwise(split_thresh)): indx = ((centroids[split_category] >= bot) & (centroids[split_category] < top)) _plot_style.update(markeredgecolor=c) - ax.plot(centroids['x'][indx], centroids['y'][indx], + ax.plot(centroids['x'][indx], centroids['y'][indx], **_plot_style) high = centroids[split_category] >= split_thresh[-1] _plot_style.update(markeredgecolor=color[-1]) - ax.plot(centroids['x'][high], centroids['y'][high], + ax.plot(centroids['x'][high], centroids['y'][high], **_plot_style) return ax diff --git a/trackpy/preprocessing.py b/trackpy/preprocessing.py index a92191d2..2af009c3 100644 --- a/trackpy/preprocessing.py +++ b/trackpy/preprocessing.py @@ -1,12 +1,14 @@ from __future__ import (absolute_import, division, print_function, unicode_literals) import six +import logging import numpy as np from scipy.ndimage.filters import uniform_filter from scipy.ndimage.fourier import fourier_gaussian -from .utils import print_update +logger = logging.getLogger(__name__) +__all__ = ['bandpass', 'scale_to_gamut'] # When loading module, try to use pyFFTW ("Fastest Fourier Transform in the @@ -25,9 +27,9 @@ def _maybe_align(a): global planned if not planned: - print_update("Note: FFTW is configuring itself. This will take " + - "several seconds, but subsequent calls will run " + - "*much* faster.") + logger.info("Note: FFTW is configuring itself. This will take " + + "several seconds, but subsequent calls will run " + + "*much* faster.") planned = True result = pyfftw.n_byte_align(a, a.dtype.alignment) return result diff --git a/trackpy/tests/test_misc.py b/trackpy/tests/test_misc.py index 4afcb71a..71730ef0 100644 --- a/trackpy/tests/test_misc.py +++ b/trackpy/tests/test_misc.py @@ -1,11 +1,35 @@ from __future__ import (absolute_import, division, print_function, unicode_literals) import six +import logging import unittest +import trackpy import trackpy.diag class DiagTests(unittest.TestCase): def test_performance_report(self): trackpy.diag.performance_report() + +class LoggerTests(unittest.TestCase): + def test_heirarchy(self): + self.assertTrue(trackpy.linking.logger.parent is trackpy.logger) + self.assertTrue(trackpy.feature.logger.parent is trackpy.logger) + self.assertTrue(trackpy.preprocessing.logger.parent is trackpy.logger) + + def test_convenience_funcs(self): + trackpy.quiet(True) + self.assertEqual(trackpy.logger.level, logging.WARN) + trackpy.quiet(False) + self.assertEqual(trackpy.logger.level, logging.INFO) + + trackpy.ignore_logging() + self.assertEqual(len(trackpy.logger.handlers), 0) + self.assertEqual(trackpy.logger.level, logging.NOTSET) + self.assertTrue(trackpy.logger.propagate) + + trackpy.handle_logging() + self.assertEqual(len(trackpy.logger.handlers), 1) + self.assertEqual(trackpy.logger.level, logging.INFO) + self.assertEqual(trackpy.logger.propagate, 1) diff --git a/trackpy/uncertainty.py b/trackpy/uncertainty.py index a28f0ee5..9e9a5065 100644 --- a/trackpy/uncertainty.py +++ b/trackpy/uncertainty.py @@ -8,6 +8,9 @@ from .preprocessing import bandpass from .masks import binary_mask +__all__ = ['roi', 'measure_noise', 'static_error'] + + def roi(image, diameter, threshold=1): """Return a mask selecting the neighborhoods of bright regions. See Biophysical journal 88(1) 623-638 Figure C. @@ -27,11 +30,13 @@ def roi(image, diameter, threshold=1): signal_mask = morphology.binary_dilation(signal_mask, structure=structure) return signal_mask + def measure_noise(image, diameter, threshold): "Compute the standard deviation of the dark pixels outside the signal." signal_mask = roi(image, diameter, threshold) return image[~signal_mask].mean(), image[~signal_mask].std() + def static_error(features, noise, diameter, noise_size=1): """Compute the uncertainty in particle position ("the static error"). diff --git a/trackpy/utils.py b/trackpy/utils.py index 42b7045c..c22387f1 100644 --- a/trackpy/utils.py +++ b/trackpy/utils.py @@ -1,6 +1,7 @@ from __future__ import (absolute_import, division, print_function, unicode_literals) import six +import logging import collections import functools import re @@ -15,6 +16,8 @@ from scipy import stats import yaml +import trackpy + def fit_powerlaw(data, plot=True, **kwargs): """Fit a powerlaw by doing a linear regression in log space.""" @@ -180,9 +183,85 @@ def record_meta(meta_data, filename): def print_update(message): "Print a message immediately; do not wait for current execution to finish." + warnings.warn("print_update has been deprecated and will be removed in " + "version 0.4", TrackpyDeprecationWarning, stacklevel=1) try: clear_output() except Exception: pass print(message) sys.stdout.flush() + + +class IPythonStreamHandler(logging.StreamHandler): + "A StreamHandler for logging that clears output between entries." + def emit(self, s): + clear_output(wait=True) + print(s.getMessage()) + def flush(self): + sys.stdout.flush() + + +FORMAT = "%(name)s.%(funcName)s: %(message)s" +formatter = logging.Formatter(FORMAT) + +# Check for IPython and use a special logger +use_ipython_handler = False +try: + import IPython +except ImportError: + pass +else: + if IPython.get_ipython() is not None: + use_ipython_handler = True +if use_ipython_handler: + default_handler = IPythonStreamHandler() +else: + default_handler = logging.StreamHandler(sys.stdout) +default_handler.setLevel(logging.INFO) +default_handler.setFormatter(formatter) + + +def handle_logging(): + "Send INFO-level log messages to stdout. Do not propagate." + if use_ipython_handler: + # Avoid double-printing messages to IPython stderr. + trackpy.logger.propagate = False + trackpy.logger.addHandler(default_handler) + trackpy.logger.setLevel(logging.INFO) + + +def ignore_logging(): + "Reset to factory default logging configuration; remove trackpy's handler." + trackpy.logger.removeHandler(default_handler) + trackpy.logger.setLevel(logging.NOTSET) + trackpy.logger.propagate = 1 # default implemented by the logging module + + +def quiet(suppress=True): + """Suppress trackpy information log messages. + + Parameters + ---------- + suppress : boolean + If True, set the logging level to WARN, hiding INFO-level messages. + If False, set level to INFO, showing informational messages. + """ + if suppress: + trackpy.logger.setLevel(logging.WARN) + else: + trackpy.logger.setLevel(logging.INFO) + + +class TrackpyDeprecationWarning(UserWarning): + """ + A class for issuing deprecation warnings for Matplotlib users. + + In light of the fact that Python builtin DeprecationWarnings are ignored + by default as of Python 2.7 (see link below), this class was put in to + allow for the signaling of deprecation, but via UserWarnings which are not + ignored by default. + + http://docs.python.org/dev/whatsnew/2.7.html#the-future-for-python-2-x + """ + pass