Remove caps which have not been seen for three months from the db.

Thanks Asterix for the initial version of this patch!
This commit is contained in:
Stephan Erb 2009-11-11 23:14:51 +01:00
parent 8203211e74
commit af3af5bec8
6 changed files with 79 additions and 33 deletions

View File

@ -40,6 +40,10 @@ from common.xmpp import NS_XHTML_IM, NS_RECEIPTS, NS_ESESSION, NS_CHATSTATES
# Features where we cannot safely assume that the other side supports them
FEATURE_BLACKLIST = [NS_CHATSTATES, NS_XHTML_IM, NS_RECEIPTS, NS_ESESSION]
# Query entry status codes
NEW = 0
QUERIED = 1
CACHED = 2 # got the answer
################################################################################
### Public API of this module
@ -58,7 +62,7 @@ def client_supports(client_caps, requested_feature):
supported_features = cache_item.features
if requested_feature in supported_features:
return True
elif supported_features == [] and cache_item.queried in (0, 1):
elif supported_features == [] and cache_item.status in (NEW, QUERIED):
# assume feature is supported, if we don't know yet, what the client
# is capable of
return requested_feature not in FEATURE_BLACKLIST
@ -220,7 +224,7 @@ class NullClientCaps(AbstractClientCaps):
def _lookup_in_cache(self, caps_cache):
# lookup something which does not exist to get a new CacheItem created
cache_item = caps_cache[('dummy', '')]
assert cache_item.queried == 0
assert cache_item.status != CACHED
return cache_item
def _discover(self, connection, jid):
@ -257,12 +261,8 @@ class CapsCache(object):
self._identities = []
self._logger = logger
# not cached into db:
# have we sent the query?
# 0 == not queried
# 1 == queried
# 2 == got the answer
self.queried = 0
self.status = NEW
self._recently_seen = False
def _get_features(self):
return self._features
@ -304,19 +304,28 @@ class CapsCache(object):
self.features = features
self._logger.add_caps_entry(self.hash_method, self.hash,
identities, features)
self.status = CACHED
def update_last_seen(self):
if not self._recently_seen:
self._recently_seen = True
self._logger.update_caps_time(self.hash_method, self.hash)
self.__CacheItem = CacheItem
self.logger = logger
def initialize_from_db(self):
# get data from logger...
if self.logger is not None:
self._remove_outdated_caps()
for hash_method, hash_, identities, features in \
self.logger.iter_caps_data():
x = self[(hash_method, hash_)]
x.identities = identities
x.features = features
x.queried = 2
x.status = CACHED
def _remove_outdated_caps(self):
'''Removes outdated values from the db'''
self.logger.clean_caps_table()
def __getitem__(self, caps):
if caps in self.__cache:
@ -336,13 +345,14 @@ class CapsCache(object):
lookup_cache_item = client_caps.get_cache_lookup_strategy()
q = lookup_cache_item(self)
if q.queried == 0:
if q.status == NEW:
# do query for bare node+hash pair
# this will create proper object
q.queried = 1
q.status = QUERIED
discover = client_caps.get_discover_strategy()
discover(connection, jid)
else:
q.update_last_seen()
################################################################################
### Caps network coding
@ -409,7 +419,7 @@ class ConnectionCaps(object):
lookup = contact.client_caps.get_cache_lookup_strategy()
cache_item = lookup(capscache)
if cache_item.queried == 2:
if cache_item.status == CACHED:
return
else:
validate = contact.client_caps.get_hash_validation_strategy()

View File

@ -91,7 +91,8 @@ def create_log_db():
CREATE TABLE caps_cache (
hash_method TEXT,
hash TEXT,
data BLOB);
data BLOB,
last_seen INTEGER);
CREATE TABLE rooms_last_message_time(
jid_id INTEGER PRIMARY KEY UNIQUE,

View File

@ -27,7 +27,7 @@ docdir = '../'
datadir = '../'
localedir = '../po'
version = '0.12.5.8-dev'
version = '0.13.0.1-dev'
import sys, os.path
for base in ('.', 'common'):

View File

@ -838,14 +838,27 @@ class Logger:
gzip.close()
data = string.getvalue()
self.cur.execute('''
INSERT INTO caps_cache ( hash_method, hash, data )
VALUES (?, ?, ?);
''', (hash_method, hash_, buffer(data))) # (1) -- note above
INSERT INTO caps_cache ( hash_method, hash, data, last_seen )
VALUES (?, ?, ?, ?);
''', (hash_method, hash_, buffer(data), int(time.time())))
# (1) -- note above
try:
self.con.commit()
except sqlite.OperationalError, e:
print >> sys.stderr, str(e)
def update_caps_time(self, method, hash_):
sql = '''UPDATE caps_cache SET last_seen = %d
WHERE hash_method = "%s" and hash = "%s"''' % \
(int(time.time()), method, hash_)
self.simple_commit(sql)
def clean_caps_table(self):
'''Remove caps which was not seen for 3 months'''
sql = '''DELETE FROM caps_cache WHERE last_seen < %d''' % \
int(time.time() - 3*30*24*3600)
self.simple_commit(sql)
def replace_roster(self, account_name, roster_version, roster):
''' Replace current roster in DB by a new one.
accout_name is the name of the account to change

View File

@ -29,6 +29,7 @@
import os
import locale
import re
from time import time
from common import gajim
from common import helpers
from common import caps
@ -218,6 +219,8 @@ class OptionsParser:
self.update_config_to_01257()
if old < [0, 12, 5, 8] and new >= [0, 12, 5, 8]:
self.update_config_to_01258()
if old < [0, 13, 0, 1] and new >= [0, 13, 0, 1]:
self.update_config_to_01301()
gajim.logger.init_vars()
gajim.config.set('version', new_version)
@ -817,4 +820,23 @@ class OptionsParser:
'proxy.jabber.ru', 'proxy.jabbim.cz'])
gajim.config.set('version', '0.12.5.8')
def update_config_to_01301(self):
back = os.getcwd()
os.chdir(logger.LOG_DB_FOLDER)
con = sqlite.connect(logger.LOG_DB_FILE)
os.chdir(back)
cur = con.cursor()
try:
cur.executescript(
'''
ALTER TABLE caps_cache
ADD last_seen INTEGER default %d;
''' % int(time())
)
con.commit()
except sqlite.OperationalError:
pass
con.close()
gajim.config.set('version', '0.13.0.1')
# vim: se ts=3:

View File

@ -66,9 +66,9 @@ class TestCapsCache(CommonCapsTest):
def test_initialize_from_db(self):
''' Read cashed dummy data from db '''
self.assertEqual(self.cc[self.client_caps].queried, 0)
self.assertEqual(self.cc[self.client_caps].status, caps.NEW)
self.cc.initialize_from_db()
self.assertEqual(self.cc[self.client_caps].queried, 2)
self.assertEqual(self.cc[self.client_caps].status, caps.CACHED)
def test_preload_triggering_query(self):
''' Make sure that preload issues a disco '''