Skip to content

Commit

Permalink
LIC : change license from GPL -> 3 clause BSD
Browse files Browse the repository at this point in the history
They promised me cookies!
  • Loading branch information
tacaswell committed Feb 12, 2014
1 parent 1658f01 commit cb248f3
Show file tree
Hide file tree
Showing 8 changed files with 95 additions and 863 deletions.
702 changes: 28 additions & 674 deletions LICENSE

Large diffs are not rendered by default.

17 changes: 0 additions & 17 deletions examples/legacy_link_usage.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,3 @@
#Copyright 2012 Thomas A Caswell
#[email protected]
#http://jfi.uchicago.edu/~tcaswell
#
#This program is free software; you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation; either version 3 of the License, or (at
#your option) any later version.
#
#This program is distributed in the hope that it will be useful, but
#WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
#General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, see <http://www.gnu.org/licenses>.

from __future__ import division
import matplotlib
matplotlib.use('qt4agg')
Expand Down
19 changes: 0 additions & 19 deletions trackpy/__init__.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,3 @@
# Copyright 2012 Daniel B. Allan
# [email protected], [email protected]
# http://pha.jhu.edu/~dallan
# http://www.danallan.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses>.

# Configure logging for all modules in this package.
import logging
FORMAT = "%(name)s.%(funcName)s: %(message)s"
logging.basicConfig(level=logging.WARN, format=FORMAT)
Expand Down
47 changes: 15 additions & 32 deletions trackpy/feature.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,3 @@
# Copyright 2012 Daniel B. Allan
# [email protected], [email protected]
# http://pha.jhu.edu/~dallan
# http://www.danallan.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses>.


from __future__ import division
import warnings
Expand Down Expand Up @@ -120,7 +102,7 @@ def refine(raw_image, image, radius, coords, max_iterations=10, engine='auto',
Characterize the neighborhood of a local maximum, and iteratively
hone in on its center-of-brightness. Return its coordinates, integrated
brightness, size (Rg), eccentricity (0=circular), and signal strength.
Parameters
----------
raw_image : array (any dimensions)
Expand All @@ -146,15 +128,15 @@ def refine(raw_image, image, radius, coords, max_iterations=10, engine='auto',
engine = 'python'
if engine == 'python':
coords = np.array(coords) # a copy, will not modify in place
results = _refine(raw_image, image, radius, coords, max_iterations,
results = _refine(raw_image, image, radius, coords, max_iterations,
characterize, walkthrough)
elif engine == 'numba':
if not NUMBA_AVAILABLE:
warnings.warn("numba could not be imported. Without it, the "
"'numba' engine runs very slow. Use the 'python' "
"engine or install numba.", UserWarning)
if image.ndim != 2:
raise NotImplementedError("The numba engine only supports 2D "
raise NotImplementedError("The numba engine only supports 2D "
"images. You can extend it if you feel "
"like a hero.")
if walkthrough:
Expand Down Expand Up @@ -225,7 +207,7 @@ def _refine(raw_image, image, radius, coords, max_iterations,
# If we're off by less than half a pixel, interpolate.
else:
# Here, coord is a float. We are off the grid.
neighborhood = ndimage.shift(neighborhood, -off_center,
neighborhood = ndimage.shift(neighborhood, -off_center,
order=2, mode='constant', cval=0)
new_coord = coord + off_center
# Disallow any whole-pixels moves on future iterations.
Expand Down Expand Up @@ -357,7 +339,7 @@ def _numba_refine(raw_image, image, radius, coords, max_iterations,
# TODO Implement this for numba.
# Remember to zero cm_n somewhere in here.
# Here, coord is a float. We are off the grid.
# neighborhood = ndimage.shift(neighborhood, -off_center,
# neighborhood = ndimage.shift(neighborhood, -off_center,
# order=2, mode='constant', cval=0)
# new_coord = np.float_(coord) + off_center
# Disallow any whole-pixels moves on future iterations.
Expand Down Expand Up @@ -420,7 +402,7 @@ def _numba_refine(raw_image, image, radius, coords, max_iterations,
def locate(raw_image, diameter, minmass=100., maxsize=None, separation=None,
noise_size=1, smoothing_size=None, threshold=1, invert=False,
percentile=64, topn=None, preprocess=True, max_iterations=10,
filter_before=True, filter_after=True,
filter_before=True, filter_after=True,
characterize=True, engine='auto'):
"""Locate Gaussian-like blobs of a given approximate size.
Expand Down Expand Up @@ -449,7 +431,7 @@ def locate(raw_image, diameter, minmass=100., maxsize=None, separation=None,
default.
percentile : Features must have a peak brighter than pixels in this
percentile. This helps eliminate spurrious peaks.
topn : Return only the N brightest features above minmass.
topn : Return only the N brightest features above minmass.
If None (default), return all features above minmass.
Returns
Expand Down Expand Up @@ -554,7 +536,7 @@ def locate(raw_image, diameter, minmass=100., maxsize=None, separation=None,
if maxsize is not None:
approx_size = np.empty(count_maxima)
for i in range(count_maxima):
approx_size[i] = estimate_size(image, radius, coords[i],
approx_size[i] = estimate_size(image, radius, coords[i],
approx_mass[i])
condition &= approx_size < maxsize
coords = coords[condition]
Expand Down Expand Up @@ -644,7 +626,7 @@ def batch(frames, diameter, minmass=100, maxsize=None, separation=None,
default.
percentile : Features must have a peak brighter than pixels in this
percentile. This helps eliminate spurrious peaks.
topn : Return only the N brightest features above minmass.
topn : Return only the N brightest features above minmass.
If None (default), return all features above minmass.
Returns
Expand Down Expand Up @@ -688,7 +670,7 @@ def batch(frames, diameter, minmass=100, maxsize=None, separation=None,
References
----------
.. [1] Crocker, J.C., Grier, D.G. http://dx.doi.org/10.1006/jcis.1996.0217
"""
# Gather meta information and save as YAML in current directory.
timestamp = pd.datetime.utcnow().strftime('%Y-%m-%d-%H%M%S')
Expand All @@ -698,12 +680,13 @@ def batch(frames, diameter, minmass=100, maxsize=None, separation=None,
source = None
meta_info = dict(timestamp=timestamp,
trackpy_version=trackpy.__version__,
source=source, diameter=diameter, minmass=minmass,
maxsize=maxsize, separation=separation,
noise_size=noise_size, smoothing_size=smoothing_size,
invert=invert, percentile=percentile, topn=topn,
source=source, diameter=diameter, minmass=minmass,
maxsize=maxsize, separation=separation,
noise_size=noise_size, smoothing_size=smoothing_size,
invert=invert, percentile=percentile, topn=topn,
preprocess=preprocess, max_iterations=max_iterations,
filter_before=filter_before, filter_after=filter_after)

if meta:
if isinstance(meta, str):
filename = meta
Expand Down
35 changes: 10 additions & 25 deletions trackpy/linking.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,3 @@
#Copyright 2012 Thomas A Caswell
#[email protected]
#http://jfi.uchicago.edu/~tcaswell
#
#This program is free software; you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation; either version 3 of the License, or (at
#your option) any later version.
#
#This program is distributed in the hope that it will be useful, but
#WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
#General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, see <http://www.gnu.org/licenses>.
from __future__ import (absolute_import, division, print_function,
unicode_literals)

Expand Down Expand Up @@ -333,7 +317,7 @@ def __str__(self):
def __repr__(self):
coords = '(' + (', '.join(["{:.3f}"]*len(self.pos))).format(*self.pos) + ')'
track = " in Track %d" % self.track.indx if self.track else ""
return "<%s at %d, " % (self.__class__.__name__, self.t) + coords + track + ">"
return "<%s at %d, " % (self.__class__.__name__, self.t) + coords + track + ">"

class IndexedPointND(PointND):

Expand Down Expand Up @@ -366,7 +350,7 @@ def link(levels, search_range, hash_generator, memory=0, track_cls=None,
algorithm used to resolve subnetworks of nearby particles
'auto' uses numba if available
Returns
Returns
-------
tracks : list of Track (or track_cls) objects
Expand All @@ -384,7 +368,7 @@ def link(levels, search_range, hash_generator, memory=0, track_cls=None,
label_generator = link_iter(iter(levels), search_range, memory=memory,
neighbor_strategy=neighbor_strategy,
link_strategy=link_strategy,
track_cls=track_cls,
track_cls=track_cls,
hash_generator=hash_generator)
labels = list(label_generator)
points = sum(map(list, levels), []) # flatten levels: a list of poits
Expand Down Expand Up @@ -421,7 +405,7 @@ def link_df(features, search_range, memory=0,
algorithm used to resolve subnetworks of nearby particles
'auto' uses numba if available
Returns
Returns
-------
trajectories : DataFrame
This is the input features DataFrame, now with a new column labeling
Expand All @@ -437,7 +421,7 @@ def link_df(features, search_range, memory=0,
For 'BTree' mode only. Define the shape of the search region.
If None (default), infer shape from range of data.
box_size : sequence
For 'BTree' mode only. Define the parition size to optimize
For 'BTree' mode only. Define the parition size to optimize
performance. If None (default), the search_range is used, which is
a reasonable guess for best performance.
verify_integrity : boolean
Expand Down Expand Up @@ -640,7 +624,7 @@ def link_iter(levels, search_range, memory=0,
For 'BTree' mode only. Define the shape of the search region.
(Higher-level wrappers of link infer this from the data.)
box_size : sequence
For 'BTree' mode only. Define the parition size to optimize
For 'BTree' mode only. Define the parition size to optimize
performance. If None (default), the search_range is used, which is
a reasonable guess for best performance.
track_cls : class (optional)
Expand Down Expand Up @@ -692,7 +676,7 @@ def link_iter(levels, search_range, memory=0,

try:
# Start ID numbers from zero, incompatible with multithreading.
track_cls.reset_counter()
track_cls.reset_counter()
except AttributeError:
# must be using a custom Track class without this method
pass
Expand Down Expand Up @@ -728,7 +712,7 @@ def link_iter(levels, search_range, memory=0,
p.forward_cands = []

# Sort out what can go to what.
assign_candidates(cur_level, prev_hash, search_range,
assign_candidates(cur_level, prev_hash, search_range,
neighbor_strategy)

# sort the candidate lists by distance
Expand Down Expand Up @@ -790,7 +774,7 @@ def link_iter(levels, search_range, memory=0,

spl, dpl = subnet_linker(s_sn, len(d_sn), search_range)

# Identify the particles in the destination set that
# Identify the particles in the destination set that
# were not linked to.
d_remain = set(d for d in d_sn if d is not None) # TODO DAN
d_remain -= set(d for d in dpl if d is not None)
Expand Down Expand Up @@ -872,6 +856,7 @@ def assign_candidates(cur_level, prev_hash, search_range, neighbor_strategy):
p.back_cands.append((wp, dists[i,j]))
wp.forward_cands.append((p, dists[i,j]))


class SubnetOversizeException(Exception):
'''An :py:exc:`Exception` to be raised when the sub-nets are too
big to be efficiently linked. If you get this then either reduce your search range
Expand Down
Loading

0 comments on commit cb248f3

Please sign in to comment.