caps: data stored and retrieved
This commit is contained in:
parent
952e6d96e3
commit
6c1858224e
6 changed files with 143 additions and 35 deletions
|
@ -1,5 +1,5 @@
|
|||
AC_INIT([Gajim - A Jabber Instant Messager],
|
||||
[0.11.1.2],[http://trac.gajim.org/],[gajim])
|
||||
[0.11.1.3],[http://trac.gajim.org/],[gajim])
|
||||
AC_PREREQ([2.59])
|
||||
AM_INIT_AUTOMAKE([1.8])
|
||||
AC_CONFIG_HEADER(config.h)
|
||||
|
|
|
@ -11,14 +11,10 @@
|
|||
## GNU General Public License for more details.
|
||||
##
|
||||
|
||||
#import logger
|
||||
#import gajim
|
||||
from itertools import *
|
||||
import gajim
|
||||
import xmpp
|
||||
import xmpp.features_nb
|
||||
|
||||
#from meta import VerboseClassType
|
||||
import gajim
|
||||
|
||||
class CapsCache(object):
|
||||
''' This object keeps the mapping between caps data and real disco
|
||||
|
@ -97,6 +93,7 @@ class CapsCache(object):
|
|||
ciself.node = node
|
||||
ciself.version = version
|
||||
ciself.features = set()
|
||||
ciself.ext = ext
|
||||
ciself.exts = {}
|
||||
|
||||
# set of tuples: (category, type, name)
|
||||
|
@ -110,10 +107,6 @@ class CapsCache(object):
|
|||
# 2 == got the answer
|
||||
ciself.queried = 0
|
||||
|
||||
def __iadd__(ciself, newfeature):
|
||||
newfeature=self.__names.setdefault(newfeature, newfeature)
|
||||
ciself.features.add(newfeature)
|
||||
|
||||
class CacheQuery(object):
|
||||
def __init__(cqself, proxied):
|
||||
cqself.proxied=proxied
|
||||
|
@ -125,6 +118,8 @@ class CapsCache(object):
|
|||
def __getitem__(ciself, exts):
|
||||
if not exts: # (), [], None, False, whatever
|
||||
return ciself
|
||||
if isinstance(exts, basestring):
|
||||
exts=(exts,)
|
||||
if len(exts)==1:
|
||||
ext=exts[0]
|
||||
if ext in ciself.exts:
|
||||
|
@ -135,12 +130,21 @@ class CapsCache(object):
|
|||
proxied = [ciself]
|
||||
proxied.extend(ciself[(e,)] for e in exts)
|
||||
return ciself.CacheQuery(proxied)
|
||||
|
||||
def update(ciself, identities, features):
|
||||
# NOTE: self refers to CapsCache object, not to CacheItem
|
||||
self.identities=identities
|
||||
self.features=features
|
||||
self.logger.add_caps_entry(
|
||||
ciself.node, ciself.version, ciself.ext,
|
||||
identities, features)
|
||||
|
||||
self.__CacheItem = CacheItem
|
||||
|
||||
# prepopulate data which we are sure of; note: we do not log these info
|
||||
gajim = 'http://gajim.org/caps'
|
||||
gajimnode = 'http://gajim.org/caps'
|
||||
|
||||
gajimcaps=self[(gajim, '0.11.1')]
|
||||
gajimcaps=self[(gajimnode, '0.11.1')]
|
||||
gajimcaps.category='client'
|
||||
gajimcaps.type='pc'
|
||||
gajimcaps.features=set((xmpp.NS_BYTESTREAM, xmpp.NS_SI,
|
||||
|
@ -152,17 +156,16 @@ class CapsCache(object):
|
|||
# TODO: older gajim versions
|
||||
|
||||
# start logging data from the net
|
||||
self.__logger = logger
|
||||
self.logger = logger
|
||||
|
||||
def load_from_db(self):
|
||||
# get data from logger...
|
||||
if self.__logger is not None:
|
||||
for node, version, category, type_, name in self.__logger.get_caps_cache():
|
||||
x=self.__clients[(node, version)]
|
||||
x.category=category
|
||||
x.type=type_
|
||||
x.name=name
|
||||
for node, version, ext, feature in self.__logger.get_caps_features_cache():
|
||||
self.__clients[(node, version)][ext]+=feature
|
||||
if self.logger is not None:
|
||||
for node, ver, ext, identities, features in self.logger.iter_caps_data():
|
||||
x=self[(node, ver, ext)]
|
||||
x.identities=identities
|
||||
x.features=features
|
||||
x.queried=2
|
||||
|
||||
def __getitem__(self, caps):
|
||||
node_version = caps[:2]
|
||||
|
@ -185,6 +188,7 @@ class CapsCache(object):
|
|||
# this will create proper object
|
||||
q.queried=1
|
||||
account.discoverInfo(jid, '%s#%s' % (node, ver))
|
||||
else:
|
||||
|
||||
for ext in exts:
|
||||
qq=q[ext]
|
||||
|
@ -192,8 +196,9 @@ class CapsCache(object):
|
|||
# do query for node+version+ext triple
|
||||
qq.queried=1
|
||||
account.discoverInfo(jid, '%s#%s' % (node, ext))
|
||||
else:
|
||||
|
||||
capscache = CapsCache()
|
||||
gajim.capscache = CapsCache(gajim.logger)
|
||||
|
||||
class ConnectionCaps(object):
|
||||
''' This class highly depends on that it is a part of Connection class. '''
|
||||
|
@ -223,7 +228,7 @@ class ConnectionCaps(object):
|
|||
jid=str(presence.getFrom())
|
||||
|
||||
# start disco query...
|
||||
capscache.preload(self, jid, node, ver, exts)
|
||||
gajim.capscache.preload(self, jid, node, ver, exts)
|
||||
|
||||
contact=gajim.contacts.get_contact_from_full_jid(self.name, jid)
|
||||
if contact in [None, []]:
|
||||
|
@ -237,10 +242,7 @@ class ConnectionCaps(object):
|
|||
contact.caps_exts=exts
|
||||
|
||||
def _capsDiscoCB(self, jid, node, identities, features, data):
|
||||
gajim.log.debug('capsDiscoCB(jid=%r, node=%r, identities=%r, features=%r, data=%r)' %\
|
||||
(jid, node, identities, features, data))
|
||||
contact=gajim.contacts.get_contact_from_full_jid(self.name, jid)
|
||||
gajim.log.debug(' contact=%r' % contact)
|
||||
if not contact: return
|
||||
if not contact.caps_node: return # we didn't asked for that?
|
||||
if not node.startswith(contact.caps_node+'#'): return
|
||||
|
@ -251,11 +253,9 @@ class ConnectionCaps(object):
|
|||
exts=(ext,)
|
||||
|
||||
# if we don't have this info already...
|
||||
caps=capscache[(node, contact.caps_ver, exts)]
|
||||
caps=gajim.capscache[(node, contact.caps_ver, exts)]
|
||||
if caps.queried==2: return
|
||||
|
||||
caps.identities=set((i['category'], i['type'], i.get('name')) for i in identities)
|
||||
caps.features=set(features)
|
||||
identities=set((i['category'], i['type'], i.get('name')) for i in identities)
|
||||
caps.update(identities, features)
|
||||
|
||||
gajim.log.debug('capsDiscoCB: added caps for %r:\n identities=%r\n features=%r'\
|
||||
% ((node, contact.caps_ver, exts), caps.identities, caps.features))
|
||||
|
|
|
@ -42,6 +42,7 @@ def create_log_db():
|
|||
# logs.jid_id --> jids.jid_id but Sqlite doesn't do FK etc so it's done in python code
|
||||
# jids.jid text column will be JID if TC-related, room_jid if GC-related,
|
||||
# ROOM_JID/nick if pm-related.
|
||||
# also check optparser.py, which updates databases on gajim updates
|
||||
cur.executescript(
|
||||
'''
|
||||
CREATE TABLE jids(
|
||||
|
@ -74,6 +75,12 @@ def create_log_db():
|
|||
);
|
||||
|
||||
CREATE INDEX idx_logs_jid_id_kind ON logs (jid_id, kind);
|
||||
|
||||
CREATE TABLE caps_cache (
|
||||
node TEXT,
|
||||
ver TEXT,
|
||||
ext TEXT,
|
||||
data BLOB);
|
||||
'''
|
||||
)
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ docdir = '../'
|
|||
|
||||
datadir = '../'
|
||||
|
||||
version = '0.11.1.2'
|
||||
version = '0.11.1.3'
|
||||
|
||||
import sys, os.path
|
||||
for base in ('.', 'common'):
|
||||
|
|
|
@ -19,6 +19,8 @@ import os
|
|||
import sys
|
||||
import time
|
||||
import datetime
|
||||
from gzip import GzipFile
|
||||
from cStringIO import StringIO
|
||||
|
||||
import exceptions
|
||||
import gajim
|
||||
|
@ -633,6 +635,76 @@ class Logger:
|
|||
answer[result[0]] = self.convert_api_values_to_human_transport_type(result[1])
|
||||
return answer
|
||||
|
||||
# initial interface for accessing logs of stored caps
|
||||
def get_stored_caps(self): pass
|
||||
def add_caps_entry(self): pass
|
||||
# A longer note here:
|
||||
# The database contains a blob field. Pysqlite seems to need special care for such fields.
|
||||
# When storing, we need to convert string into buffer object (1).
|
||||
# When retrieving, we need to convert it back to a string to decompress it. (2)
|
||||
# GzipFile needs a file-like object, StringIO emulates file for plain strings.
|
||||
def iter_caps_data(self):
|
||||
''' Iterate over caps cache data stored in the database.
|
||||
The iterator values are pairs of (node, ver, ext, identities, features):
|
||||
identities == {'category':'foo', 'type':'bar', 'name':'boo'},
|
||||
features being a list of feature namespaces. '''
|
||||
|
||||
# get data from table
|
||||
# the data field contains binary object (gzipped data), this is a hack
|
||||
# to get that data without trying to convert it to unicode
|
||||
#tmp, self.con.text_factory = self.con.text_factory, str
|
||||
try:
|
||||
self.cur.execute('''SELECT node, ver, ext, data FROM caps_cache;''');
|
||||
except sqlite.OperationalError:
|
||||
# might happen when there's no caps_cache table yet
|
||||
# -- there's no data to read anyway then
|
||||
#self.con.text_factory = tmp
|
||||
return
|
||||
#self.con.text_factory = tmp
|
||||
|
||||
for node, ver, ext, data in self.cur:
|
||||
# for each row: unpack the data field
|
||||
# (format: (category, type, name, category, type, name, ...
|
||||
# ..., 'FEAT', feature1, feature2, ...).join(' '))
|
||||
# NOTE: if there's a need to do more gzip, put that to a function
|
||||
data=GzipFile(fileobj=StringIO(str(data))).read().split(' ') # (2) -- note above
|
||||
i=0
|
||||
identities=set()
|
||||
features=set()
|
||||
while i<len(data) and data[i]!='FEAT':
|
||||
category=data[i]
|
||||
type=data[i+1]
|
||||
name=data[i+2]
|
||||
identities.add((category,type,name))
|
||||
i+=3
|
||||
i+=1
|
||||
while i<len(data):
|
||||
features.add(data[i])
|
||||
i+=1
|
||||
if not ext: ext=None # to make '' a None
|
||||
|
||||
# yield the row
|
||||
yield node, ver, ext, identities, features
|
||||
|
||||
def add_caps_entry(self, node, ver, ext, identities, features):
|
||||
data=[]
|
||||
for identity in identities:
|
||||
# there is no FEAT category
|
||||
if identity[0]=='FEAT': return
|
||||
if len(identity)<2 or not identity[2]:
|
||||
data.extend((identity[0], identity[1], ''))
|
||||
else:
|
||||
data.extend(identity)
|
||||
data.append('FEAT')
|
||||
data.extend(features)
|
||||
data = ' '.join(data)
|
||||
string = StringIO() # if there's a need to do more gzip, put that to a function
|
||||
gzip=GzipFile(fileobj=string, mode='w')
|
||||
gzip.write(data)
|
||||
gzip.close()
|
||||
data = string.getvalue()
|
||||
self.cur.execute('''
|
||||
INSERT INTO caps_cache ( node, ver, ext, data )
|
||||
VALUES (?, ?, ?, ?);
|
||||
''', (node, ver, ext, buffer(data))) # (1) -- note above
|
||||
try:
|
||||
self.con.commit()
|
||||
except sqlite.OperationalError, e:
|
||||
print >> sys.stderr, str(e)
|
||||
|
|
|
@ -157,9 +157,13 @@ class OptionsParser:
|
|||
self.update_config_to_01111()
|
||||
if old < [0, 11, 1, 2] and new >= [0, 11, 1, 2]:
|
||||
self.update_config_to_01112()
|
||||
if old < [0, 11, 1, 3] and new >= [0, 11, 1, 3]:
|
||||
self.update_config_to_01113()
|
||||
|
||||
gajim.logger.init_vars()
|
||||
gajim.config.set('version', new_version)
|
||||
|
||||
gajim.capscache.load_from_db()
|
||||
|
||||
def update_config_x_to_09(self):
|
||||
# Var name that changed:
|
||||
|
@ -402,3 +406,28 @@ class OptionsParser:
|
|||
self.old_values['roster_theme'] == 'gtk+':
|
||||
gajim.config.set('roster_theme', _('default'))
|
||||
gajim.config.set('version', '0.11.1.2')
|
||||
|
||||
def update_config_to_01113(self):
|
||||
# copy&pasted from update_config_to_01013, possibly 'FIXME see #2812' applies too
|
||||
back = os.getcwd()
|
||||
os.chdir(logger.LOG_DB_FOLDER)
|
||||
con = sqlite.connect(logger.LOG_DB_FILE)
|
||||
os.chdir(back)
|
||||
cur = con.cursor()
|
||||
try:
|
||||
cur.executescript(
|
||||
'''
|
||||
CREATE TABLE caps_cache (
|
||||
node TEXT,
|
||||
ver TEXT,
|
||||
ext TEXT,
|
||||
data BLOB
|
||||
);
|
||||
'''
|
||||
)
|
||||
con.commit()
|
||||
except sqlite.OperationalError, e:
|
||||
pass
|
||||
con.close()
|
||||
gajim.config.set('version', '0.11.1.3')
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue