Remove caps which have not been seen for three months from the db.

Thanks Asterix for the initial version of this patch!
This commit is contained in:
Stephan Erb 2009-11-11 23:14:51 +01:00
parent 8203211e74
commit af3af5bec8
6 changed files with 79 additions and 33 deletions

View file

@ -40,6 +40,10 @@ from common.xmpp import NS_XHTML_IM, NS_RECEIPTS, NS_ESESSION, NS_CHATSTATES
# Features where we cannot safely assume that the other side supports them # Features where we cannot safely assume that the other side supports them
FEATURE_BLACKLIST = [NS_CHATSTATES, NS_XHTML_IM, NS_RECEIPTS, NS_ESESSION] FEATURE_BLACKLIST = [NS_CHATSTATES, NS_XHTML_IM, NS_RECEIPTS, NS_ESESSION]
# Query entry status codes
NEW = 0
QUERIED = 1
CACHED = 2 # got the answer
################################################################################ ################################################################################
### Public API of this module ### Public API of this module
@ -58,7 +62,7 @@ def client_supports(client_caps, requested_feature):
supported_features = cache_item.features supported_features = cache_item.features
if requested_feature in supported_features: if requested_feature in supported_features:
return True return True
elif supported_features == [] and cache_item.queried in (0, 1): elif supported_features == [] and cache_item.status in (NEW, QUERIED):
# assume feature is supported, if we don't know yet, what the client # assume feature is supported, if we don't know yet, what the client
# is capable of # is capable of
return requested_feature not in FEATURE_BLACKLIST return requested_feature not in FEATURE_BLACKLIST
@ -168,8 +172,8 @@ class AbstractClientCaps(object):
def _is_hash_valid(self, identities, features, dataforms): def _is_hash_valid(self, identities, features, dataforms):
''' To be implemented by subclassess ''' ''' To be implemented by subclassess '''
raise NotImplementedError() raise NotImplementedError()
class ClientCaps(AbstractClientCaps): class ClientCaps(AbstractClientCaps):
''' The current XEP-115 implementation ''' ''' The current XEP-115 implementation '''
@ -188,7 +192,7 @@ class ClientCaps(AbstractClientCaps):
computed_hash = compute_caps_hash(identities, features, computed_hash = compute_caps_hash(identities, features,
dataforms=dataforms, hash_method=self._hash_method) dataforms=dataforms, hash_method=self._hash_method)
return computed_hash == self._hash return computed_hash == self._hash
class OldClientCaps(AbstractClientCaps): class OldClientCaps(AbstractClientCaps):
''' Old XEP-115 implemtation. Kept around for background competability. ''' ''' Old XEP-115 implemtation. Kept around for background competability. '''
@ -204,7 +208,7 @@ class OldClientCaps(AbstractClientCaps):
def _is_hash_valid(self, identities, features, dataforms): def _is_hash_valid(self, identities, features, dataforms):
return True return True
class NullClientCaps(AbstractClientCaps): class NullClientCaps(AbstractClientCaps):
''' '''
@ -220,7 +224,7 @@ class NullClientCaps(AbstractClientCaps):
def _lookup_in_cache(self, caps_cache): def _lookup_in_cache(self, caps_cache):
# lookup something which does not exist to get a new CacheItem created # lookup something which does not exist to get a new CacheItem created
cache_item = caps_cache[('dummy', '')] cache_item = caps_cache[('dummy', '')]
assert cache_item.queried == 0 assert cache_item.status != CACHED
return cache_item return cache_item
def _discover(self, connection, jid): def _discover(self, connection, jid):
@ -248,7 +252,7 @@ class CapsCache(object):
# another object, and we will have plenty of identical long # another object, and we will have plenty of identical long
# strings. therefore we can cache them # strings. therefore we can cache them
__names = {} __names = {}
def __init__(self, hash_method, hash_, logger): def __init__(self, hash_method, hash_, logger):
# cached into db # cached into db
self.hash_method = hash_method self.hash_method = hash_method
@ -257,12 +261,8 @@ class CapsCache(object):
self._identities = [] self._identities = []
self._logger = logger self._logger = logger
# not cached into db: self.status = NEW
# have we sent the query? self._recently_seen = False
# 0 == not queried
# 1 == queried
# 2 == got the answer
self.queried = 0
def _get_features(self): def _get_features(self):
return self._features return self._features
@ -304,19 +304,28 @@ class CapsCache(object):
self.features = features self.features = features
self._logger.add_caps_entry(self.hash_method, self.hash, self._logger.add_caps_entry(self.hash_method, self.hash,
identities, features) identities, features)
self.status = CACHED
def update_last_seen(self):
if not self._recently_seen:
self._recently_seen = True
self._logger.update_caps_time(self.hash_method, self.hash)
self.__CacheItem = CacheItem self.__CacheItem = CacheItem
self.logger = logger self.logger = logger
def initialize_from_db(self): def initialize_from_db(self):
# get data from logger... self._remove_outdated_caps()
if self.logger is not None: for hash_method, hash_, identities, features in \
for hash_method, hash_, identities, features in \ self.logger.iter_caps_data():
self.logger.iter_caps_data(): x = self[(hash_method, hash_)]
x = self[(hash_method, hash_)] x.identities = identities
x.identities = identities x.features = features
x.features = features x.status = CACHED
x.queried = 2
def _remove_outdated_caps(self):
'''Removes outdated values from the db'''
self.logger.clean_caps_table()
def __getitem__(self, caps): def __getitem__(self, caps):
if caps in self.__cache: if caps in self.__cache:
@ -336,13 +345,14 @@ class CapsCache(object):
lookup_cache_item = client_caps.get_cache_lookup_strategy() lookup_cache_item = client_caps.get_cache_lookup_strategy()
q = lookup_cache_item(self) q = lookup_cache_item(self)
if q.queried == 0: if q.status == NEW:
# do query for bare node+hash pair # do query for bare node+hash pair
# this will create proper object # this will create proper object
q.queried = 1 q.status = QUERIED
discover = client_caps.get_discover_strategy() discover = client_caps.get_discover_strategy()
discover(connection, jid) discover(connection, jid)
else:
q.update_last_seen()
################################################################################ ################################################################################
### Caps network coding ### Caps network coding
@ -391,7 +401,7 @@ class ConnectionCaps(object):
client_caps = OldClientCaps(caps_hash, node) client_caps = OldClientCaps(caps_hash, node)
else: else:
client_caps = ClientCaps(caps_hash, node, hash_method) client_caps = ClientCaps(caps_hash, node, hash_method)
capscache.query_client_of_jid_if_unknown(self, jid, client_caps) capscache.query_client_of_jid_if_unknown(self, jid, client_caps)
contact.client_caps = client_caps contact.client_caps = client_caps
@ -409,7 +419,7 @@ class ConnectionCaps(object):
lookup = contact.client_caps.get_cache_lookup_strategy() lookup = contact.client_caps.get_cache_lookup_strategy()
cache_item = lookup(capscache) cache_item = lookup(capscache)
if cache_item.queried == 2: if cache_item.status == CACHED:
return return
else: else:
validate = contact.client_caps.get_hash_validation_strategy() validate = contact.client_caps.get_hash_validation_strategy()

View file

@ -91,7 +91,8 @@ def create_log_db():
CREATE TABLE caps_cache ( CREATE TABLE caps_cache (
hash_method TEXT, hash_method TEXT,
hash TEXT, hash TEXT,
data BLOB); data BLOB,
last_seen INTEGER);
CREATE TABLE rooms_last_message_time( CREATE TABLE rooms_last_message_time(
jid_id INTEGER PRIMARY KEY UNIQUE, jid_id INTEGER PRIMARY KEY UNIQUE,

View file

@ -27,7 +27,7 @@ docdir = '../'
datadir = '../' datadir = '../'
localedir = '../po' localedir = '../po'
version = '0.12.5.8-dev' version = '0.13.0.1-dev'
import sys, os.path import sys, os.path
for base in ('.', 'common'): for base in ('.', 'common'):

View file

@ -838,14 +838,27 @@ class Logger:
gzip.close() gzip.close()
data = string.getvalue() data = string.getvalue()
self.cur.execute(''' self.cur.execute('''
INSERT INTO caps_cache ( hash_method, hash, data ) INSERT INTO caps_cache ( hash_method, hash, data, last_seen )
VALUES (?, ?, ?); VALUES (?, ?, ?, ?);
''', (hash_method, hash_, buffer(data))) # (1) -- note above ''', (hash_method, hash_, buffer(data), int(time.time())))
# (1) -- note above
try: try:
self.con.commit() self.con.commit()
except sqlite.OperationalError, e: except sqlite.OperationalError, e:
print >> sys.stderr, str(e) print >> sys.stderr, str(e)
def update_caps_time(self, method, hash_):
sql = '''UPDATE caps_cache SET last_seen = %d
WHERE hash_method = "%s" and hash = "%s"''' % \
(int(time.time()), method, hash_)
self.simple_commit(sql)
def clean_caps_table(self):
'''Remove caps which was not seen for 3 months'''
sql = '''DELETE FROM caps_cache WHERE last_seen < %d''' % \
int(time.time() - 3*30*24*3600)
self.simple_commit(sql)
def replace_roster(self, account_name, roster_version, roster): def replace_roster(self, account_name, roster_version, roster):
''' Replace current roster in DB by a new one. ''' Replace current roster in DB by a new one.
accout_name is the name of the account to change accout_name is the name of the account to change

View file

@ -29,6 +29,7 @@
import os import os
import locale import locale
import re import re
from time import time
from common import gajim from common import gajim
from common import helpers from common import helpers
from common import caps from common import caps
@ -218,6 +219,8 @@ class OptionsParser:
self.update_config_to_01257() self.update_config_to_01257()
if old < [0, 12, 5, 8] and new >= [0, 12, 5, 8]: if old < [0, 12, 5, 8] and new >= [0, 12, 5, 8]:
self.update_config_to_01258() self.update_config_to_01258()
if old < [0, 13, 0, 1] and new >= [0, 13, 0, 1]:
self.update_config_to_01301()
gajim.logger.init_vars() gajim.logger.init_vars()
gajim.config.set('version', new_version) gajim.config.set('version', new_version)
@ -817,4 +820,23 @@ class OptionsParser:
'proxy.jabber.ru', 'proxy.jabbim.cz']) 'proxy.jabber.ru', 'proxy.jabbim.cz'])
gajim.config.set('version', '0.12.5.8') gajim.config.set('version', '0.12.5.8')
def update_config_to_01301(self):
back = os.getcwd()
os.chdir(logger.LOG_DB_FOLDER)
con = sqlite.connect(logger.LOG_DB_FILE)
os.chdir(back)
cur = con.cursor()
try:
cur.executescript(
'''
ALTER TABLE caps_cache
ADD last_seen INTEGER default %d;
''' % int(time())
)
con.commit()
except sqlite.OperationalError:
pass
con.close()
gajim.config.set('version', '0.13.0.1')
# vim: se ts=3: # vim: se ts=3:

View file

@ -66,9 +66,9 @@ class TestCapsCache(CommonCapsTest):
def test_initialize_from_db(self): def test_initialize_from_db(self):
''' Read cashed dummy data from db ''' ''' Read cashed dummy data from db '''
self.assertEqual(self.cc[self.client_caps].queried, 0) self.assertEqual(self.cc[self.client_caps].status, caps.NEW)
self.cc.initialize_from_db() self.cc.initialize_from_db()
self.assertEqual(self.cc[self.client_caps].queried, 2) self.assertEqual(self.cc[self.client_caps].status, caps.CACHED)
def test_preload_triggering_query(self): def test_preload_triggering_query(self):
''' Make sure that preload issues a disco ''' ''' Make sure that preload issues a disco '''