Skip to content

Commit

Permalink
Improve RedisCache/cache docs
Browse files Browse the repository at this point in the history
  • Loading branch information
kylepw committed Aug 1, 2019
1 parent 9e40a9c commit fc743e4
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 59 deletions.
4 changes: 2 additions & 2 deletions docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ This page contains some basic documentation for the Tweepy module.
:param auth_handler: authentication handler to be used
:param host: general API host
:param search_host: search API host
:param cache: cache backend to use
:param cache: cache backend to use (:class:`tweepy.FileCache('cache_dir')`, :class:`tweepy.MemoryCache()`, :class:`tweepy.RedisCache(client)`)
:param api_root: general API path root
:param search_root: search API path root
:param retry_count: default number of retries to attempt when error occurs
Expand Down Expand Up @@ -590,7 +590,7 @@ Help Methods
* popular : return only the most popular results in the response
:param count: The number of tweets to return per page, up to a maximum of 100. Defaults to 15.
:param until: Returns tweets created before the given date. Date should be formatted as YYYY-MM-DD. Keep in mind that the search index has a 7-day limit. In other words, no tweets will be found for a date older than one week.
:param since_id: |since_id| There are limits to the number of Tweets which can be accessed through the API. If the limit of Tweets has occured since the since_id, the since_id will be forced to the oldest ID available.
:param since_id: |since_id| There are limits to the number of Tweets which can be accessed through the API. If the limit of Tweets has occured since the since_id, the since_id will be forced to the oldest ID available.
:param max_id: |max_id|
:param include_entities: |include_entities|
:rtype: :class:`SearchResults` object
Expand Down
16 changes: 15 additions & 1 deletion tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from nose import SkipTest

from .config import tape, TweepyTestCase, use_replay, username
from tweepy import API, FileCache, Friendship, MemoryCache
from tweepy import API, FileCache, Friendship, MemoryCache, RedisCache
from tweepy.parsers import Parser

test_tweet_id = '266367358078169089'
Expand Down Expand Up @@ -460,5 +460,19 @@ def testfilecache(self):
if os.path.exists('cache_test_dir'):
shutil.rmtree('cache_test_dir')

def testrediscache(self):
try:
import redis
except ImportError:
raise SkipTest()
host, port = self.memcache_servers[0].split(':')
try:
client = redis.Redis(host=host, port=int(port))
client.ping()
except redis.exceptions.ConnectionError:
raise SkipTest()
self.cache = RedisCache(client, timeout=self.timeout)
self._run_tests()

if __name__ == '__main__':
unittest.main()
2 changes: 1 addition & 1 deletion tweepy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

from tweepy.api import API
from tweepy.auth import AppAuthHandler, OAuthHandler
from tweepy.cache import Cache, FileCache, MemoryCache
from tweepy.cache import Cache, FileCache, MemoryCache, RedisCache
from tweepy.cursor import Cursor
from tweepy.error import RateLimitError, TweepError
from tweepy.models import DirectMessage, Friendship, ModelFactory, SavedSearch, SearchResults, Status, User
Expand Down
82 changes: 27 additions & 55 deletions tweepy/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,88 +307,60 @@ def flush(self):


class RedisCache(Cache):
"""Cache running in a redis server"""
"""Redis server cache"""

def __init__(self, client,
timeout=60,
keys_container='tweepy:keys',
pre_identifier='tweepy:'):
def __init__(
self, client, timeout=60, keys_container='tweepy:keys', pre_identifier='tweepy:'
):
Cache.__init__(self, timeout)
self.client = client
self.keys_container = keys_container
self.pre_identifier = pre_identifier

def _is_expired(self, entry, timeout):
# Returns true if the entry has expired
return timeout > 0 and (time.time() - entry[0]) >= timeout
# Use milliseconds to support float values
self.timeout = int(timeout * 1000)

def store(self, key, value):
"""Store the key, value pair in our redis server"""
# Prepend tweepy to our key,
# this makes it easier to identify tweepy keys in our redis server
# Prepend 'tweepy:' to our key,
key = self.pre_identifier + key
# Get a pipe (to execute several redis commands in one step)
pipe = self.client.pipeline()
# Set our values in a redis hash (similar to python dict)
pipe.set(key, pickle.dumps((time.time(), value)))
# Set the expiration
pipe.expire(key, self.timeout)
# Add the key to a set containing all the keys
pipe.set(key, pickle.dumps(value), px=self.timeout)
# Add key to a set (container) to track cached keys
pipe.sadd(self.keys_container, key)
# Execute the instructions in the redis server
pipe.execute()

def get(self, key, timeout=None):
"""Given a key, returns an element from the redis table"""
key = self.pre_identifier + key
# Check to see if we have this key
unpickled_entry = self.client.get(key)
if not unpickled_entry:
# No hit, return nothing
return None

entry = pickle.loads(unpickled_entry)
# Use provided timeout in arguments if provided
# otherwise use the one provided during init.
if timeout is None:
timeout = self.timeout

# Make sure entry is not expired
if self._is_expired(entry, timeout):
# entry expired, delete and return nothing
self.delete_entry(key)
return None
# entry found and not expired, return it
return entry[1]
value = self.client.get(key)
if not value:
# Make sure expired key is not in container
self.client.srem(self.keys_container, key)
else:
if timeout:
self.client.pexpire(key, int(timeout * 1000))
value = pickle.loads(value)
return value

def count(self):
"""Note: This is not very efficient,
since it retreives all the keys from the redis
server to know how many keys we have"""
return len(self.client.smembers(self.keys_container))
# Remove expired keys before count
self.cleanup()
return self.client.scard(self.keys_container)

def delete_entry(self, key):
"""Delete an object from the redis table"""
pipe = self.client.pipeline()
pipe.srem(self.keys_container, key)
pipe.delete(key)
pipe.srem(self.keys_container, key)
pipe.execute()

def cleanup(self):
"""Cleanup all the expired keys"""
keys = self.client.smembers(self.keys_container)
for key in keys:
entry = self.client.get(key)
if entry:
entry = pickle.loads(entry)
if self._is_expired(entry, self.timeout):
self.delete_entry(key)
if not self.client.get(key):
# Key expired, so remove ref from container
self.client.srem(self.keys_container, key)

def flush(self):
"""Delete all entries from the cache"""
keys = self.client.smembers(self.keys_container)
for key in keys:
self.delete_entry(key)
keys = tuple(self.client.smembers(self.keys_container))
self.client.delete(self.keys_container, *keys)


class MongodbCache(Cache):
Expand Down

0 comments on commit fc743e4

Please sign in to comment.