From e7cd4012bc5ea4e4f2c03d5f7beeb58bddd98588 Mon Sep 17 00:00:00 2001 From: fabioz Date: Tue, 4 Mar 2014 10:03:20 -0300 Subject: [PATCH 1/5] Changed FiniteCache which had a bug where calling items() would make it recurse forever with a new LRUCache implementation. --- pyqtgraph/graphicsItems/GraphicsItem.py | 26 +----- pyqtgraph/lru_cache.py | 116 ++++++++++++++++++++++++ tests/test.py | 52 ++++++++++- 3 files changed, 170 insertions(+), 24 deletions(-) create mode 100644 pyqtgraph/lru_cache.py diff --git a/pyqtgraph/graphicsItems/GraphicsItem.py b/pyqtgraph/graphicsItems/GraphicsItem.py index e34086bd..5c941dae 100644 --- a/pyqtgraph/graphicsItems/GraphicsItem.py +++ b/pyqtgraph/graphicsItems/GraphicsItem.py @@ -3,29 +3,9 @@ from ..GraphicsScene import GraphicsScene from ..Point import Point from .. import functions as fn import weakref -from ..pgcollections import OrderedDict -import operator, sys +import operator +from pyqtgraph.lru_cache import LRUCache -class FiniteCache(OrderedDict): - """Caches a finite number of objects, removing - least-frequently used items.""" - def __init__(self, length): - self._length = length - OrderedDict.__init__(self) - - def __setitem__(self, item, val): - self.pop(item, None) # make sure item is added to end - OrderedDict.__setitem__(self, item, val) - while len(self) > self._length: - del self[list(self.keys())[0]] - - def __getitem__(self, item): - val = OrderedDict.__getitem__(self, item) - del self[item] - self[item] = val ## promote this key - return val - - class GraphicsItem(object): """ @@ -38,7 +18,7 @@ class GraphicsItem(object): The GraphicsView system places a lot of emphasis on the notion that the graphics within the scene should be device independent--you should be able to take the same graphics and display them on screens of different resolutions, printers, export to SVG, etc. This is nice in principle, but causes me a lot of headache in practice. It means that I have to circumvent all the device-independent expectations any time I want to operate in pixel coordinates rather than arbitrary scene coordinates. A lot of the code in GraphicsItem is devoted to this task--keeping track of view widgets and device transforms, computing the size and shape of a pixel in local item coordinates, etc. Note that in item coordinates, a pixel does not have to be square or even rectangular, so just asking how to increase a bounding rect by 2px can be a rather complex task. """ - _pixelVectorGlobalCache = FiniteCache(100) + _pixelVectorGlobalCache = LRUCache(100, 70) def __init__(self, register=True): if not hasattr(self, '_qtBaseClass'): diff --git a/pyqtgraph/lru_cache.py b/pyqtgraph/lru_cache.py new file mode 100644 index 00000000..862e956a --- /dev/null +++ b/pyqtgraph/lru_cache.py @@ -0,0 +1,116 @@ +import operator +import sys +import itertools + + +_IS_PY3 = sys.version_info[0] == 3 + +class LRUCache(object): + ''' + This LRU cache should be reasonable for short collections (until around 100 items), as it does a + sort on the items if the collection would become too big (so, it is very fast for getting and + setting but when its size would become higher than the max size it does one sort based on the + internal time to decide which items should be removed -- which should be Ok if the resize_to + isn't too close to the max_size so that it becomes an operation that doesn't happen all the + time). + ''' + + def __init__(self, max_size=100, resize_to=70): + ''' + :param int max_size: + This is the maximum size of the cache. When some item is added and the cache would become + bigger than this, it's resized to the value passed on resize_to. + + :param int resize_to: + When a resize operation happens, this is the size of the final cache. + ''' + assert resize_to < max_size + self.max_size = max_size + self.resize_to = resize_to + self._counter = 0 + self._dict = {} + if _IS_PY3: + self._next_time = itertools.count(0).__next__ + else: + self._next_time = itertools.count(0).next + + def __getitem__(self, key): + item = self._dict[key] + item[2] = self._next_time() + return item[1] + + def __len__(self): + return len(self._dict) + + def __setitem__(self, key, value): + item = self._dict.get(key) + if item is None: + if len(self._dict) + 1 > self.max_size: + self._resize_to() + + item = [key, value, self._next_time()] + self._dict[key] = item + else: + item[1] = value + item[2] = self._next_time() + + def __delitem__(self, key): + del self._dict[key] + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + def clear(self): + self._dict.clear() + + if _IS_PY3: + def values(self): + return [i[1] for i in self._dict.values()] + + def keys(self): + return [x[0] for x in self._dict.values()] + + def _resize_to(self): + ordered = sorted(self._dict.values(), key=operator.itemgetter(2))[:self.resize_to] + for i in ordered: + del self._dict[i[0]] + + def iteritems(self, access_time=False): + ''' + :param bool access_time: + If True sorts the returned items by the internal access time. + ''' + if access_time: + for x in sorted(self._dict.values(), key=operator.itemgetter(2)): + yield x[0], x[1] + else: + for x in self._dict.items(): + yield x[0], x[1] + + else: + def values(self): + return [i[1] for i in self._dict.itervalues()] + + def keys(self): + return [x[0] for x in self._dict.itervalues()] + + + def _resize_to(self): + ordered = sorted(self._dict.itervalues(), key=operator.itemgetter(2))[:self.resize_to] + for i in ordered: + del self._dict[i[0]] + + def iteritems(self, access_time=False): + ''' + :param bool access_time: + If True sorts the returned items by the internal access time. + ''' + if access_time: + for x in sorted(self._dict.itervalues(), key=operator.itemgetter(2)): + yield x[0], x[1] + else: + for x in self._dict.iteritems(): + yield x[0], x[1] diff --git a/tests/test.py b/tests/test.py index f24a7d42..9821f821 100644 --- a/tests/test.py +++ b/tests/test.py @@ -5,4 +5,54 @@ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..') ## all tests should be defined with this class so we have the option to tweak it later. class TestCase(unittest.TestCase): - pass \ No newline at end of file + + def testLRU(self): + from pyqtgraph.lru_cache import LRUCache + lru = LRUCache(2, 1) + + def CheckLru(): + lru[1] = 1 + lru[2] = 2 + lru[3] = 3 + + self.assertEqual(2, len(lru)) + self.assertSetEqual(set([2, 3]), set(lru.keys())) + self.assertSetEqual(set([2, 3]), set(lru.values())) + + lru[2] = 2 + self.assertSetEqual(set([2, 3]), set(lru.values())) + + lru[1] = 1 + self.assertSetEqual(set([2, 1]), set(lru.values())) + + #Iterates from the used in the last access to others based on access time. + self.assertEqual([(2, 2), (1, 1)], list(lru.iteritems(access_time=True))) + lru[2] = 2 + self.assertEqual([(1, 1), (2, 2)], list(lru.iteritems(access_time=True))) + + del lru[2] + self.assertEqual([(1, 1), ], list(lru.iteritems(access_time=True))) + + lru[2] = 2 + self.assertEqual([(1, 1), (2, 2)], list(lru.iteritems(access_time=True))) + + _a = lru[1] + self.assertEqual([(2, 2), (1, 1)], list(lru.iteritems(access_time=True))) + + _a = lru[2] + self.assertEqual([(1, 1), (2, 2)], list(lru.iteritems(access_time=True))) + + self.assertEqual(lru.get(2), 2) + self.assertEqual(lru.get(3), None) + self.assertEqual([(1, 1), (2, 2)], list(lru.iteritems(access_time=True))) + + lru.clear() + self.assertEqual([], list(lru.iteritems())) + + CheckLru() + + # Check it twice... + CheckLru() + +if __name__ == '__main__': + unittest.main() \ No newline at end of file From eb33970274b6b2558b8911c6889e79571f21e555 Mon Sep 17 00:00:00 2001 From: Luke Campagnola Date: Wed, 5 Mar 2014 09:11:53 -0500 Subject: [PATCH 2/5] Correct to unix line endings --- pyqtgraph/lru_cache.py | 232 ++++++++++++++++++++--------------------- 1 file changed, 116 insertions(+), 116 deletions(-) diff --git a/pyqtgraph/lru_cache.py b/pyqtgraph/lru_cache.py index 862e956a..2ce2e372 100644 --- a/pyqtgraph/lru_cache.py +++ b/pyqtgraph/lru_cache.py @@ -1,116 +1,116 @@ -import operator -import sys -import itertools - - -_IS_PY3 = sys.version_info[0] == 3 - -class LRUCache(object): - ''' - This LRU cache should be reasonable for short collections (until around 100 items), as it does a - sort on the items if the collection would become too big (so, it is very fast for getting and - setting but when its size would become higher than the max size it does one sort based on the - internal time to decide which items should be removed -- which should be Ok if the resize_to - isn't too close to the max_size so that it becomes an operation that doesn't happen all the - time). - ''' - - def __init__(self, max_size=100, resize_to=70): - ''' - :param int max_size: - This is the maximum size of the cache. When some item is added and the cache would become - bigger than this, it's resized to the value passed on resize_to. - - :param int resize_to: - When a resize operation happens, this is the size of the final cache. - ''' - assert resize_to < max_size - self.max_size = max_size - self.resize_to = resize_to - self._counter = 0 - self._dict = {} - if _IS_PY3: - self._next_time = itertools.count(0).__next__ - else: - self._next_time = itertools.count(0).next - - def __getitem__(self, key): - item = self._dict[key] - item[2] = self._next_time() - return item[1] - - def __len__(self): - return len(self._dict) - - def __setitem__(self, key, value): - item = self._dict.get(key) - if item is None: - if len(self._dict) + 1 > self.max_size: - self._resize_to() - - item = [key, value, self._next_time()] - self._dict[key] = item - else: - item[1] = value - item[2] = self._next_time() - - def __delitem__(self, key): - del self._dict[key] - - def get(self, key, default=None): - try: - return self[key] - except KeyError: - return default - - def clear(self): - self._dict.clear() - - if _IS_PY3: - def values(self): - return [i[1] for i in self._dict.values()] - - def keys(self): - return [x[0] for x in self._dict.values()] - - def _resize_to(self): - ordered = sorted(self._dict.values(), key=operator.itemgetter(2))[:self.resize_to] - for i in ordered: - del self._dict[i[0]] - - def iteritems(self, access_time=False): - ''' - :param bool access_time: - If True sorts the returned items by the internal access time. - ''' - if access_time: - for x in sorted(self._dict.values(), key=operator.itemgetter(2)): - yield x[0], x[1] - else: - for x in self._dict.items(): - yield x[0], x[1] - - else: - def values(self): - return [i[1] for i in self._dict.itervalues()] - - def keys(self): - return [x[0] for x in self._dict.itervalues()] - - - def _resize_to(self): - ordered = sorted(self._dict.itervalues(), key=operator.itemgetter(2))[:self.resize_to] - for i in ordered: - del self._dict[i[0]] - - def iteritems(self, access_time=False): - ''' - :param bool access_time: - If True sorts the returned items by the internal access time. - ''' - if access_time: - for x in sorted(self._dict.itervalues(), key=operator.itemgetter(2)): - yield x[0], x[1] - else: - for x in self._dict.iteritems(): - yield x[0], x[1] +import operator +import sys +import itertools + + +_IS_PY3 = sys.version_info[0] == 3 + +class LRUCache(object): + ''' + This LRU cache should be reasonable for short collections (until around 100 items), as it does a + sort on the items if the collection would become too big (so, it is very fast for getting and + setting but when its size would become higher than the max size it does one sort based on the + internal time to decide which items should be removed -- which should be Ok if the resize_to + isn't too close to the max_size so that it becomes an operation that doesn't happen all the + time). + ''' + + def __init__(self, max_size=100, resize_to=70): + ''' + :param int max_size: + This is the maximum size of the cache. When some item is added and the cache would become + bigger than this, it's resized to the value passed on resize_to. + + :param int resize_to: + When a resize operation happens, this is the size of the final cache. + ''' + assert resize_to < max_size + self.max_size = max_size + self.resize_to = resize_to + self._counter = 0 + self._dict = {} + if _IS_PY3: + self._next_time = itertools.count(0).__next__ + else: + self._next_time = itertools.count(0).next + + def __getitem__(self, key): + item = self._dict[key] + item[2] = self._next_time() + return item[1] + + def __len__(self): + return len(self._dict) + + def __setitem__(self, key, value): + item = self._dict.get(key) + if item is None: + if len(self._dict) + 1 > self.max_size: + self._resize_to() + + item = [key, value, self._next_time()] + self._dict[key] = item + else: + item[1] = value + item[2] = self._next_time() + + def __delitem__(self, key): + del self._dict[key] + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + def clear(self): + self._dict.clear() + + if _IS_PY3: + def values(self): + return [i[1] for i in self._dict.values()] + + def keys(self): + return [x[0] for x in self._dict.values()] + + def _resize_to(self): + ordered = sorted(self._dict.values(), key=operator.itemgetter(2))[:self.resize_to] + for i in ordered: + del self._dict[i[0]] + + def iteritems(self, access_time=False): + ''' + :param bool access_time: + If True sorts the returned items by the internal access time. + ''' + if access_time: + for x in sorted(self._dict.values(), key=operator.itemgetter(2)): + yield x[0], x[1] + else: + for x in self._dict.items(): + yield x[0], x[1] + + else: + def values(self): + return [i[1] for i in self._dict.itervalues()] + + def keys(self): + return [x[0] for x in self._dict.itervalues()] + + + def _resize_to(self): + ordered = sorted(self._dict.itervalues(), key=operator.itemgetter(2))[:self.resize_to] + for i in ordered: + del self._dict[i[0]] + + def iteritems(self, access_time=False): + ''' + :param bool access_time: + If True sorts the returned items by the internal access time. + ''' + if access_time: + for x in sorted(self._dict.itervalues(), key=operator.itemgetter(2)): + yield x[0], x[1] + else: + for x in self._dict.iteritems(): + yield x[0], x[1] From dad001b9d42501aa0a64a558c5bad49fa0c019c3 Mon Sep 17 00:00:00 2001 From: Luke Campagnola Date: Wed, 5 Mar 2014 09:12:23 -0500 Subject: [PATCH 3/5] Style corrections --- pyqtgraph/lru_cache.py | 65 +++++++++++++++++++++++------------------- 1 file changed, 35 insertions(+), 30 deletions(-) diff --git a/pyqtgraph/lru_cache.py b/pyqtgraph/lru_cache.py index 2ce2e372..9c04abf3 100644 --- a/pyqtgraph/lru_cache.py +++ b/pyqtgraph/lru_cache.py @@ -10,33 +10,35 @@ class LRUCache(object): This LRU cache should be reasonable for short collections (until around 100 items), as it does a sort on the items if the collection would become too big (so, it is very fast for getting and setting but when its size would become higher than the max size it does one sort based on the - internal time to decide which items should be removed -- which should be Ok if the resize_to - isn't too close to the max_size so that it becomes an operation that doesn't happen all the + internal time to decide which items should be removed -- which should be Ok if the resizeTo + isn't too close to the maxSize so that it becomes an operation that doesn't happen all the time). ''' - def __init__(self, max_size=100, resize_to=70): + def __init__(self, maxSize=100, resizeTo=70): ''' - :param int max_size: - This is the maximum size of the cache. When some item is added and the cache would become - bigger than this, it's resized to the value passed on resize_to. - - :param int resize_to: - When a resize operation happens, this is the size of the final cache. + ============== ========================================================= + **Arguments:** + maxSize (int) This is the maximum size of the cache. When some + item is added and the cache would become bigger than + this, it's resized to the value passed on resizeTo. + resizeTo (int) When a resize operation happens, this is the size + of the final cache. + ============== ========================================================= ''' - assert resize_to < max_size - self.max_size = max_size - self.resize_to = resize_to + assert resizeTo < maxSize + self.maxSize = maxSize + self.resizeTo = resizeTo self._counter = 0 self._dict = {} if _IS_PY3: - self._next_time = itertools.count(0).__next__ + self._nextTime = itertools.count(0).__next__ else: - self._next_time = itertools.count(0).next + self._nextTime = itertools.count(0).next def __getitem__(self, key): item = self._dict[key] - item[2] = self._next_time() + item[2] = self._nextTime() return item[1] def __len__(self): @@ -45,14 +47,14 @@ class LRUCache(object): def __setitem__(self, key, value): item = self._dict.get(key) if item is None: - if len(self._dict) + 1 > self.max_size: - self._resize_to() + if len(self._dict) + 1 > self.maxSize: + self._resizeTo() - item = [key, value, self._next_time()] + item = [key, value, self._nextTime()] self._dict[key] = item else: item[1] = value - item[2] = self._next_time() + item[2] = self._nextTime() def __delitem__(self, key): del self._dict[key] @@ -73,17 +75,17 @@ class LRUCache(object): def keys(self): return [x[0] for x in self._dict.values()] - def _resize_to(self): - ordered = sorted(self._dict.values(), key=operator.itemgetter(2))[:self.resize_to] + def _resizeTo(self): + ordered = sorted(self._dict.values(), key=operator.itemgetter(2))[:self.resizeTo] for i in ordered: del self._dict[i[0]] - def iteritems(self, access_time=False): + def iteritems(self, accessTime=False): ''' - :param bool access_time: + :param bool accessTime: If True sorts the returned items by the internal access time. ''' - if access_time: + if accessTime: for x in sorted(self._dict.values(), key=operator.itemgetter(2)): yield x[0], x[1] else: @@ -98,17 +100,20 @@ class LRUCache(object): return [x[0] for x in self._dict.itervalues()] - def _resize_to(self): - ordered = sorted(self._dict.itervalues(), key=operator.itemgetter(2))[:self.resize_to] + def _resizeTo(self): + ordered = sorted(self._dict.itervalues(), key=operator.itemgetter(2))[:self.resizeTo] for i in ordered: del self._dict[i[0]] - def iteritems(self, access_time=False): + def iteritems(self, accessTime=False): ''' - :param bool access_time: - If True sorts the returned items by the internal access time. + ============= ====================================================== + **Arguments** + accessTime (bool) If True sorts the returned items by the + internal access time. + ============= ====================================================== ''' - if access_time: + if accessTime: for x in sorted(self._dict.itervalues(), key=operator.itemgetter(2)): yield x[0], x[1] else: From dcb2c421796ffc86f340f3965ef2c7772e4f0907 Mon Sep 17 00:00:00 2001 From: Luke Campagnola Date: Wed, 5 Mar 2014 09:16:53 -0500 Subject: [PATCH 4/5] Moved lru_cache to util, test to util/tests --- pyqtgraph/util/__init__.py | 0 pyqtgraph/{ => util}/lru_cache.py | 0 pyqtgraph/util/tests/test_lru_cache.py | 50 ++++++++++++++++++++++ tests/test.py | 58 -------------------------- 4 files changed, 50 insertions(+), 58 deletions(-) create mode 100644 pyqtgraph/util/__init__.py rename pyqtgraph/{ => util}/lru_cache.py (100%) create mode 100644 pyqtgraph/util/tests/test_lru_cache.py delete mode 100644 tests/test.py diff --git a/pyqtgraph/util/__init__.py b/pyqtgraph/util/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pyqtgraph/lru_cache.py b/pyqtgraph/util/lru_cache.py similarity index 100% rename from pyqtgraph/lru_cache.py rename to pyqtgraph/util/lru_cache.py diff --git a/pyqtgraph/util/tests/test_lru_cache.py b/pyqtgraph/util/tests/test_lru_cache.py new file mode 100644 index 00000000..c0cf9f8a --- /dev/null +++ b/pyqtgraph/util/tests/test_lru_cache.py @@ -0,0 +1,50 @@ +from pyqtgraph.util.lru_cache import LRUCache + +def testLRU(): + lru = LRUCache(2, 1) + # check twice + checkLru(lru) + checkLru(lru) + +def checkLru(lru): + lru[1] = 1 + lru[2] = 2 + lru[3] = 3 + + assert len(lru) == 2 + assert set([2, 3]) == set(lru.keys()) + assert set([2, 3]) == set(lru.values()) + + lru[2] = 2 + assert set([2, 3]) == set(lru.values()) + + lru[1] = 1 + set([2, 1]) == set(lru.values()) + + #Iterates from the used in the last access to others based on access time. + assert [(2, 2), (1, 1)] == list(lru.iteritems(accessTime=True)) + lru[2] = 2 + assert [(1, 1), (2, 2)] == list(lru.iteritems(accessTime=True)) + + del lru[2] + assert [(1, 1), ] == list(lru.iteritems(accessTime=True)) + + lru[2] = 2 + assert [(1, 1), (2, 2)] == list(lru.iteritems(accessTime=True)) + + _a = lru[1] + assert [(2, 2), (1, 1)] == list(lru.iteritems(accessTime=True)) + + _a = lru[2] + assert [(1, 1), (2, 2)] == list(lru.iteritems(accessTime=True)) + + assert lru.get(2) == 2 + assert lru.get(3) == None + assert [(1, 1), (2, 2)] == list(lru.iteritems(accessTime=True)) + + lru.clear() + assert [] == list(lru.iteritems()) + + +if __name__ == '__main__': + testLRU() diff --git a/tests/test.py b/tests/test.py deleted file mode 100644 index 9821f821..00000000 --- a/tests/test.py +++ /dev/null @@ -1,58 +0,0 @@ -import unittest -import os, sys -## make sure this instance of pyqtgraph gets imported first -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) - -## all tests should be defined with this class so we have the option to tweak it later. -class TestCase(unittest.TestCase): - - def testLRU(self): - from pyqtgraph.lru_cache import LRUCache - lru = LRUCache(2, 1) - - def CheckLru(): - lru[1] = 1 - lru[2] = 2 - lru[3] = 3 - - self.assertEqual(2, len(lru)) - self.assertSetEqual(set([2, 3]), set(lru.keys())) - self.assertSetEqual(set([2, 3]), set(lru.values())) - - lru[2] = 2 - self.assertSetEqual(set([2, 3]), set(lru.values())) - - lru[1] = 1 - self.assertSetEqual(set([2, 1]), set(lru.values())) - - #Iterates from the used in the last access to others based on access time. - self.assertEqual([(2, 2), (1, 1)], list(lru.iteritems(access_time=True))) - lru[2] = 2 - self.assertEqual([(1, 1), (2, 2)], list(lru.iteritems(access_time=True))) - - del lru[2] - self.assertEqual([(1, 1), ], list(lru.iteritems(access_time=True))) - - lru[2] = 2 - self.assertEqual([(1, 1), (2, 2)], list(lru.iteritems(access_time=True))) - - _a = lru[1] - self.assertEqual([(2, 2), (1, 1)], list(lru.iteritems(access_time=True))) - - _a = lru[2] - self.assertEqual([(1, 1), (2, 2)], list(lru.iteritems(access_time=True))) - - self.assertEqual(lru.get(2), 2) - self.assertEqual(lru.get(3), None) - self.assertEqual([(1, 1), (2, 2)], list(lru.iteritems(access_time=True))) - - lru.clear() - self.assertEqual([], list(lru.iteritems())) - - CheckLru() - - # Check it twice... - CheckLru() - -if __name__ == '__main__': - unittest.main() \ No newline at end of file From 41c3d47d4334f71b7bd2a6cf543755f06b26de22 Mon Sep 17 00:00:00 2001 From: Luke Campagnola Date: Wed, 5 Mar 2014 10:25:55 -0500 Subject: [PATCH 5/5] Correct GraphicsItem to use relative import of lru_cache Update MultiPlotSpeedTest to test lru_cache performance --- examples/MultiPlotSpeedTest.py | 21 +++++++++++++++------ examples/__main__.py | 1 + pyqtgraph/graphicsItems/GraphicsItem.py | 2 +- 3 files changed, 17 insertions(+), 7 deletions(-) diff --git a/examples/MultiPlotSpeedTest.py b/examples/MultiPlotSpeedTest.py index e38c90e2..0d0d701b 100644 --- a/examples/MultiPlotSpeedTest.py +++ b/examples/MultiPlotSpeedTest.py @@ -22,17 +22,25 @@ p.setWindowTitle('pyqtgraph example: MultiPlotSpeedTest') #p.setRange(QtCore.QRectF(0, -10, 5000, 20)) p.setLabel('bottom', 'Index', units='B') -nPlots = 10 +nPlots = 100 +nSamples = 500 #curves = [p.plot(pen=(i,nPlots*1.3)) for i in range(nPlots)] -curves = [pg.PlotCurveItem(pen=(i,nPlots*1.3)) for i in range(nPlots)] -for c in curves: +curves = [] +for i in range(nPlots): + c = pg.PlotCurveItem(pen=(i,nPlots*1.3)) p.addItem(c) + c.setPos(0,i*6) + curves.append(c) -rgn = pg.LinearRegionItem([1,100]) +p.setYRange(0, nPlots*6) +p.setXRange(0, nSamples) +p.resize(600,900) + +rgn = pg.LinearRegionItem([nSamples/5.,nSamples/3.]) p.addItem(rgn) -data = np.random.normal(size=(53,5000/nPlots)) +data = np.random.normal(size=(nPlots*23,nSamples)) ptr = 0 lastTime = time() fps = None @@ -42,7 +50,8 @@ def update(): count += 1 #print "---------", count for i in range(nPlots): - curves[i].setData(i+data[(ptr+i)%data.shape[0]]) + curves[i].setData(data[(ptr+i)%data.shape[0]]) + #print " setData done." ptr += nPlots now = time() diff --git a/examples/__main__.py b/examples/__main__.py index e7dbe5eb..efd6ea06 100644 --- a/examples/__main__.py +++ b/examples/__main__.py @@ -53,6 +53,7 @@ examples = OrderedDict([ ('Video speed test', 'VideoSpeedTest.py'), ('Line Plot update', 'PlotSpeedTest.py'), ('Scatter Plot update', 'ScatterPlotSpeedTest.py'), + ('Multiple plots', 'MultiPlotSpeedTest.py'), ])), ('3D Graphics', OrderedDict([ ('Volumetric', 'GLVolumeItem.py'), diff --git a/pyqtgraph/graphicsItems/GraphicsItem.py b/pyqtgraph/graphicsItems/GraphicsItem.py index 5c941dae..2cae5d20 100644 --- a/pyqtgraph/graphicsItems/GraphicsItem.py +++ b/pyqtgraph/graphicsItems/GraphicsItem.py @@ -4,7 +4,7 @@ from ..Point import Point from .. import functions as fn import weakref import operator -from pyqtgraph.lru_cache import LRUCache +from ..util.lru_cache import LRUCache class GraphicsItem(object):