Skip to content

Commit

Permalink
Updated tests, added a priority queue
Browse files Browse the repository at this point in the history
Updated the tests to reflect the functionality added in verion 0.4.0, and then
updated the library to actually pass all those tests. Notably, there was a
problem with loading and dumping to and from files, but it's been resolved.

This version also sees the introduction of a priority queue, with unique elements.
While it is potentially problematic to be tied to the uniqueness constraing,
one can always inject arbitrary ids for repeated values. Perhaps not the most
efficient method in the world, but it exposes one more list type in redis.
  • Loading branch information
Dan Lecocq committed Nov 11, 2011
1 parent 89d19a3 commit 0a24208
Show file tree
Hide file tree
Showing 2 changed files with 254 additions and 34 deletions.
36 changes: 21 additions & 15 deletions qr.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def __len__(self):
def __getitem__(self, val):
"""Get a slice or a particular index."""
try:
return [self._unpack(i) for i in self.redis.lrange(self.key, val.start, val.stop)]
return [self._unpack(i) for i in self.redis.lrange(self.key, val.start, val.stop - 1)]
except AttributeError:
return self._unpack(self.redis.lindex(self.key, val))
except Exception as e:
Expand All @@ -96,10 +96,11 @@ def dump(self, fobj):

def load(self, fobj):
"""Load the contents of the provided fobj into the queue"""
next = self.serializer.load(obj)
while next:
self.redis.lpush(self._pack(next))
next = self.serializer.load(fobj)
try:
while True:
self.redis.lpush(self.key, self._pack(self.serializer.load(fobj)))
except:
return

def dumpfname(self, fname, truncate=False):
"""Destructively dump the contents of the queue into fname"""
Expand Down Expand Up @@ -128,11 +129,15 @@ def peek(self):

def elements(self):
"""Return all elements as a Python list"""
return self.redis.lrange(self.key, 0, -1)
return [self._unpack(o) for o in self.redis.lrange(self.key, 0, -1)]

def elements_as_json(self):
"""Return all elements as JSON object"""
return json.dumps(self.elements)

def clear(self):
"""Removes all the elements in the queue"""
self.redis.delete(self.key)

class Deque(BaseQueue):
"""Implements a double-ended queue"""
Expand Down Expand Up @@ -191,7 +196,7 @@ def __len__(self):
def __getitem__(self, val):
"""Get a slice or a particular index."""
try:
return [self._unpack(i) for i in self.redis.zrange(self.key, val.start, val.stop)]
return [self._unpack(i) for i in self.redis.zrange(self.key, val.start, val.stop - 1)]
except AttributeError:
val = self.redis.zrange(self.key, val, val)
if val:
Expand All @@ -206,16 +211,18 @@ def dump(self, fobj):
next = self.redis.zrange(self.key, 0, 0, withscores=True)
removed = self.redis.zremrangebyrank(self.key, 0, 0)
while next:
fobj.write(next[0])
next = self.redis.zrange(self.key, 0, 0)
self.serializer.dump(next[0], fobj)
next = self.redis.zrange(self.key, 0, 0, withscores=True)
removed = self.redis.zremrangebyrank(self.key, 0, 0)

def load(self, fobj):
"""Load the contents of the provided fobj into the queue"""
next = self.serializer.load(fobj)
while next:
self.redis.zadd(self.key, self.serializer._pack(*next))
next = self.serializer.load(fobj)
try:
while True:
value, score = self.serializer.load(fobj)
self.redis.zadd(self.key, value, score)
except Exception as e:
return

def dumpfname(self, fname, truncate=False):
"""Destructively dump the contents of the queue into fname"""
Expand Down Expand Up @@ -244,7 +251,7 @@ def peek(self, withscores=False):

def elements(self):
"""Return all elements as a Python list"""
return self.redis.zrange(self.key, 0, -1)
return [self._unpack(o) for o in self.redis.zrange(self.key, 0, -1)]

def pop(self, withscores=False):
'''Get the element with the lowest score, and pop it off'''
Expand Down Expand Up @@ -313,4 +320,3 @@ def pop(self):
popped = self.redis.lpop(self.key)
log.debug('Popped ** %s ** from key ** %s **' % (popped, self.key))
return self._unpack(popped)

Loading

0 comments on commit 0a24208

Please sign in to comment.