Merge pull request #690 from campagnola/py3-cleanup

Py3 cleanup
This commit is contained in:
Luke Campagnola 2018-05-17 08:44:56 -07:00 committed by GitHub
commit 43d965e554
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 27 additions and 27 deletions

View File

@ -1097,7 +1097,7 @@ def pretty(data, indent=''):
ind2 = indent + " "
if isinstance(data, dict):
ret = indent+"{\n"
for k, v in data.iteritems():
for k, v in data.items():
ret += ind2 + repr(k) + ": " + pretty(v, ind2).strip() + "\n"
ret += indent+"}\n"
elif isinstance(data, list) or isinstance(data, tuple):

View File

@ -43,7 +43,7 @@ class MatplotlibExporter(Exporter):
for ax in axl:
if ax is None:
continue
for loc, spine in ax.spines.iteritems():
for loc, spine in ax.spines.items():
if loc in ['left', 'bottom']:
pass
elif loc in ['right', 'top']:

View File

@ -222,7 +222,7 @@ class Node(QtCore.QObject):
for t in self.inputs().values():
nodes |= set([i.node() for i in t.inputTerminals()])
return nodes
#return set([t.inputTerminals().node() for t in self.listInputs().itervalues()])
#return set([t.inputTerminals().node() for t in self.listInputs().values()])
def __repr__(self):
return "<Node %s @%x>" % (self.name(), id(self))
@ -477,7 +477,7 @@ class NodeGraphicsItem(GraphicsObject):
#self.node.sigTerminalRenamed.connect(self.updateActionMenu)
#def setZValue(self, z):
#for t, item in self.terminals.itervalues():
#for t, item in self.terminals.values():
#item.setZValue(z+1)
#GraphicsObject.setZValue(self, z)

View File

@ -771,7 +771,7 @@ def isSequence(obj):
#if isinstance(arg, basestring):
#return self.data[arg]
#elif isinstance(arg, int):
#return dict([(k, v[arg]) for k, v in self.data.iteritems()])
#return dict([(k, v[arg]) for k, v in self.data.items()])
#elif isinstance(arg, tuple):
#arg = self._orderArgs(arg)
#return self.data[arg[1]][arg[0]]

View File

@ -245,7 +245,7 @@ class Tasker(object):
self.proc = process
self.par = parallelizer
self.tasks = tasks
for k, v in kwds.iteritems():
for k, v in kwds.items():
setattr(self, k, v)
def __iter__(self):

View File

@ -251,7 +251,7 @@ class ForkedProcess(RemoteEventHandler):
proxyIDs = {}
if preProxy is not None:
for k, v in preProxy.iteritems():
for k, v in preProxy.items():
proxyId = LocalObjectProxy.registerObject(v)
proxyIDs[k] = proxyId
@ -300,7 +300,7 @@ class ForkedProcess(RemoteEventHandler):
RemoteEventHandler.__init__(self, remoteConn, name+'_child', pid=ppid)
self.forkedProxies = {}
for name, proxyId in proxyIDs.iteritems():
for name, proxyId in proxyIDs.items():
self.forkedProxies[name] = ObjectProxy(ppid, proxyId=proxyId, typeStr=repr(preProxy[name]))
if target is not None:

View File

@ -545,7 +545,7 @@ class RemoteEventHandler(object):
if autoProxy is True:
args = [self.autoProxy(v, noProxyTypes) for v in args]
for k, v in kwds.iteritems():
for k, v in kwds.items():
opts[k] = self.autoProxy(v, noProxyTypes)
byteMsgs = []

View File

@ -239,7 +239,7 @@ class CaselessDict(OrderedDict):
return key.lower() in self.keyMap
def update(self, d):
for k, v in d.iteritems():
for k, v in d.items():
self[k] = v
def copy(self):
@ -311,11 +311,11 @@ class ProtectedDict(dict):
raise Exception("It is not safe to copy protected dicts! (instead try deepcopy, but be careful.)")
def itervalues(self):
for v in self._data_.itervalues():
for v in self._data_.values():
yield protect(v)
def iteritems(self):
for k, v in self._data_.iteritems():
for k, v in self._data_.items():
yield (k, protect(v))
def deepcopy(self):

View File

@ -80,7 +80,7 @@ class LRUCache(object):
for i in ordered:
del self._dict[i[0]]
def iteritems(self, accessTime=False):
def items(self, accessTime=False):
'''
:param bool accessTime:
If True sorts the returned items by the internal access time.
@ -94,18 +94,18 @@ class LRUCache(object):
else:
def values(self):
return [i[1] for i in self._dict.itervalues()]
return [i[1] for i in self._dict.values()]
def keys(self):
return [x[0] for x in self._dict.itervalues()]
return [x[0] for x in self._dict.values()]
def _resizeTo(self):
ordered = sorted(self._dict.itervalues(), key=operator.itemgetter(2))[:self.resizeTo]
ordered = sorted(self._dict.values(), key=operator.itemgetter(2))[:self.resizeTo]
for i in ordered:
del self._dict[i[0]]
def iteritems(self, accessTime=False):
def items(self, accessTime=False):
'''
============= ======================================================
**Arguments**
@ -114,8 +114,8 @@ class LRUCache(object):
============= ======================================================
'''
if accessTime:
for x in sorted(self._dict.itervalues(), key=operator.itemgetter(2)):
for x in sorted(self._dict.values(), key=operator.itemgetter(2)):
yield x[0], x[1]
else:
for x in self._dict.iteritems():
for x in self._dict.items():
yield x[0], x[1]

View File

@ -22,28 +22,28 @@ def checkLru(lru):
set([2, 1]) == set(lru.values())
#Iterates from the used in the last access to others based on access time.
assert [(2, 2), (1, 1)] == list(lru.iteritems(accessTime=True))
assert [(2, 2), (1, 1)] == list(lru.items(accessTime=True))
lru[2] = 2
assert [(1, 1), (2, 2)] == list(lru.iteritems(accessTime=True))
assert [(1, 1), (2, 2)] == list(lru.items(accessTime=True))
del lru[2]
assert [(1, 1), ] == list(lru.iteritems(accessTime=True))
assert [(1, 1), ] == list(lru.items(accessTime=True))
lru[2] = 2
assert [(1, 1), (2, 2)] == list(lru.iteritems(accessTime=True))
assert [(1, 1), (2, 2)] == list(lru.items(accessTime=True))
_a = lru[1]
assert [(2, 2), (1, 1)] == list(lru.iteritems(accessTime=True))
assert [(2, 2), (1, 1)] == list(lru.items(accessTime=True))
_a = lru[2]
assert [(1, 1), (2, 2)] == list(lru.iteritems(accessTime=True))
assert [(1, 1), (2, 2)] == list(lru.items(accessTime=True))
assert lru.get(2) == 2
assert lru.get(3) == None
assert [(1, 1), (2, 2)] == list(lru.iteritems(accessTime=True))
assert [(1, 1), (2, 2)] == list(lru.items(accessTime=True))
lru.clear()
assert [] == list(lru.iteritems())
assert [] == list(lru.items())
if __name__ == '__main__':