Comments
Patch
@@ -35,3 +35,51 @@
data = fl.read(node)
yield 'ok', path, node, data
+
+def redisfiledatakey(path, node):
+ return b'filedata:%s:%s' % (path, node)
+
+class redisacceleratedrevlogfilesstore(repository.basefilesstore):
+ """"A filesstore that can use a redis server to speed up operations."""
+ def __init__(self, redis, basestore):
+ self._redis = redis
+ self._basestore = basestore
+
+ def resolvefilesdata(self, entries):
+ # Our strategy is to batch requests to redis because this is faster
+ # than a command for every entry.
+
+ batch = []
+ for i, entry in enumerate(entries):
+ batch.append(entry)
+
+ if i and not i % 1000:
+ for res in self._processfiledatabatch(batch):
+ yield res
+
+ batch = []
+
+ if batch:
+ for res in self._processfiledatabatch(batch):
+ yield res
+
+ def _processfiledatabatch(self, batch):
+ keys = [redisfiledatakey(path, node) for path, node in batch]
+
+ missing = []
+
+ for i, redisdata in enumerate(self._redis.mget(keys)):
+ path, node = batch[i]
+
+ if redisdata is None:
+ missing.append((path, node))
+ else:
+ yield 'ok', path, node, redisdata
+
+ # Now resolve all the missing data from the base store.
+ for res, path, node, data in self._basestore.resolvefilesdata(missing):
+ yield res, path, node, data
+
+ # Don't forget to cache it!
+ if res == 'ok':
+ self._redis.set(redisfiledatakey(path, node), data)
@@ -480,7 +480,11 @@
else: # standard vfs
self.svfs.audit = self._getsvfsward(self.svfs.audit)
self._applyopenerreqs()
- self.filesstore = revlogstore.revlogfilesstore(self.svfs)
+ import redis
+ basefilesstore = revlogstore.revlogfilesstore(self.svfs)
+ redisconn = redis.StrictRedis(host='localhost', port=6379, db=0)
+ self.filesstore = revlogstore.redisacceleratedrevlogfilesstore(
+ redisconn, basefilesstore)
if create:
self._writerequirements()