2007-06-27 02:51:12 +02:00
|
|
|
##
|
|
|
|
## Copyright (C) 2006 Gajim Team
|
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## This file is part of Gajim.
|
|
|
|
##
|
|
|
|
## Gajim is free software; you can redistribute it and/or modify
|
2007-06-27 02:51:12 +02:00
|
|
|
## it under the terms of the GNU General Public License as published
|
2007-10-22 13:13:13 +02:00
|
|
|
## by the Free Software Foundation; version 3 only.
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## Gajim is distributed in the hope that it will be useful,
|
2007-06-27 02:51:12 +02:00
|
|
|
## but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
## GNU General Public License for more details.
|
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## You should have received a copy of the GNU General Public License
|
|
|
|
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
##
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
from itertools import *
|
2007-06-28 00:32:35 +02:00
|
|
|
import xmpp
|
|
|
|
import xmpp.features_nb
|
2007-07-09 23:24:47 +02:00
|
|
|
import gajim
|
2008-04-21 00:58:47 +02:00
|
|
|
import helpers
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
class CapsCache(object):
|
|
|
|
''' This object keeps the mapping between caps data and real disco
|
2008-06-12 05:56:47 +02:00
|
|
|
features they represent, and provides simple way to query that info.
|
2007-06-27 02:51:12 +02:00
|
|
|
It is application-wide, that is there's one object for all
|
|
|
|
connections.
|
|
|
|
Goals:
|
|
|
|
* handle storing/retrieving info from database
|
|
|
|
* cache info in memory
|
|
|
|
* expose simple interface
|
|
|
|
Properties:
|
|
|
|
* one object for all connections (move to logger.py?)
|
|
|
|
* store info efficiently (a set() of urls -- we can assume there won't be
|
|
|
|
too much of these, ensure that (X,Y,Z1) and (X,Y,Z2) has different
|
|
|
|
features.
|
|
|
|
|
|
|
|
Connections with other objects: (TODO)
|
|
|
|
|
|
|
|
Interface:
|
|
|
|
|
|
|
|
# object creation
|
|
|
|
>>> cc=CapsCache(logger_object)
|
|
|
|
|
2008-06-12 05:56:47 +02:00
|
|
|
>>> caps = ('sha-1', '66/0NaeaBKkwk85efJTGmU47vXI=')
|
|
|
|
>>> muc = 'http://jabber.org/protocol/muc'
|
|
|
|
>>> chatstates = 'http://jabber.org/protocol/chatstates'
|
|
|
|
|
|
|
|
# setting data
|
|
|
|
>>> cc[caps].identities = [{'category':'client', 'type':'pc'}]
|
|
|
|
>>> cc[caps].features = [muc]
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# retrieving data
|
|
|
|
>>> muc in cc[caps].features
|
|
|
|
True
|
2008-06-12 05:56:47 +02:00
|
|
|
>>> chatstates in cc[caps].features
|
2007-06-27 02:51:12 +02:00
|
|
|
False
|
2007-06-30 19:31:27 +02:00
|
|
|
>>> cc[caps].identities
|
2008-06-12 05:56:47 +02:00
|
|
|
[{'category': 'client', 'type': 'pc'}]
|
|
|
|
>>> x = cc[caps] # more efficient if making several queries for one set of caps
|
2007-06-27 02:51:12 +02:00
|
|
|
ATypicalBlackBoxObject
|
2008-06-12 05:56:47 +02:00
|
|
|
>>> muc in x.features
|
2007-06-27 02:51:12 +02:00
|
|
|
True
|
|
|
|
|
|
|
|
'''
|
|
|
|
def __init__(self, logger=None):
|
|
|
|
''' Create a cache for entity capabilities. '''
|
|
|
|
# our containers:
|
2008-06-12 05:56:47 +02:00
|
|
|
# __cache is a dictionary mapping: pair of hash method and hash maps
|
2007-06-27 02:51:12 +02:00
|
|
|
# to CapsCacheItem object
|
|
|
|
# __CacheItem is a class that stores data about particular
|
2008-06-12 05:56:47 +02:00
|
|
|
# client (hash method/hash pair)
|
2007-06-27 02:51:12 +02:00
|
|
|
self.__cache = {}
|
|
|
|
|
|
|
|
class CacheItem(object):
|
|
|
|
''' TODO: logging data into db '''
|
2008-04-21 16:18:19 +02:00
|
|
|
# __names is a string cache; every string long enough is given
|
|
|
|
# another object, and we will have plenty of identical long
|
|
|
|
# strings. therefore we can cache them
|
|
|
|
# TODO: maybe put all known xmpp namespace strings here
|
|
|
|
# (strings given in xmpppy)?
|
|
|
|
__names = {}
|
2008-04-21 00:58:47 +02:00
|
|
|
def __init__(ciself, hash_method, hash):
|
2007-06-27 02:51:12 +02:00
|
|
|
# cached into db
|
2008-04-21 00:58:47 +02:00
|
|
|
ciself.hash_method = hash_method
|
|
|
|
ciself.hash = hash
|
2008-04-21 14:57:34 +02:00
|
|
|
ciself._features = []
|
|
|
|
ciself._identities = []
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# not cached into db:
|
|
|
|
# have we sent the query?
|
|
|
|
# 0 == not queried
|
|
|
|
# 1 == queried
|
|
|
|
# 2 == got the answer
|
|
|
|
ciself.queried = 0
|
|
|
|
|
2008-04-21 14:57:34 +02:00
|
|
|
def _get_features(ciself):
|
|
|
|
return ciself._features
|
|
|
|
def _set_features(ciself, value):
|
|
|
|
ciself._features = []
|
|
|
|
for feature in value:
|
2008-04-21 16:18:19 +02:00
|
|
|
ciself._features.append(ciself.__names.setdefault(feature,
|
2008-04-21 14:57:34 +02:00
|
|
|
feature))
|
2008-04-21 16:16:20 +02:00
|
|
|
features = property(_get_features, _set_features)
|
2008-04-21 14:57:34 +02:00
|
|
|
|
|
|
|
def _get_identities(ciself):
|
2008-04-21 16:44:19 +02:00
|
|
|
list_ = []
|
|
|
|
for i in ciself._identities:
|
|
|
|
# transforms it back in a dict
|
|
|
|
d = dict()
|
|
|
|
d['category'] = i[0]
|
|
|
|
if i[1]:
|
|
|
|
d['type'] = i[1]
|
|
|
|
if i[2]:
|
|
|
|
d['xml:lang'] = i[2]
|
|
|
|
if i[3]:
|
|
|
|
d['name'] = i[3]
|
2008-06-12 05:56:47 +02:00
|
|
|
list_.append(d)
|
2008-04-21 16:44:19 +02:00
|
|
|
return list_
|
2008-04-21 14:57:34 +02:00
|
|
|
def _set_identities(ciself, value):
|
|
|
|
ciself._identities = []
|
|
|
|
for identity in value:
|
2008-04-21 16:44:19 +02:00
|
|
|
# dict are not hashable, so transform it into a tuple
|
2008-04-24 15:44:09 +02:00
|
|
|
t = (identity['category'], identity.get('type'), identity.get('xml:lang'), identity.get('name'))
|
2008-04-21 16:44:19 +02:00
|
|
|
ciself._identities.append(ciself.__names.setdefault(t, t))
|
2008-04-21 16:16:20 +02:00
|
|
|
identities = property(_get_identities, _set_identities)
|
2008-04-21 14:57:34 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
def update(ciself, identities, features):
|
|
|
|
# NOTE: self refers to CapsCache object, not to CacheItem
|
2008-04-21 00:58:47 +02:00
|
|
|
ciself.identities=identities
|
|
|
|
ciself.features=features
|
|
|
|
self.logger.add_caps_entry(ciself.hash_method, ciself.hash,
|
2007-07-09 23:24:47 +02:00
|
|
|
identities, features)
|
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
self.__CacheItem = CacheItem
|
|
|
|
|
|
|
|
# prepopulate data which we are sure of; note: we do not log these info
|
|
|
|
|
2008-05-27 12:24:29 +02:00
|
|
|
for account in gajim.connections:
|
|
|
|
gajimcaps = self[('sha-1', gajim.caps_hash[accout])]
|
|
|
|
gajimcaps.identities = [gajim.gajim_identity]
|
|
|
|
gajimcaps.features = gajim.gajim_common_features + \
|
|
|
|
gajim.gajim_optional_features[account]
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# start logging data from the net
|
2007-07-09 23:24:47 +02:00
|
|
|
self.logger = logger
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
def load_from_db(self):
|
2007-06-27 02:51:12 +02:00
|
|
|
# get data from logger...
|
2007-07-09 23:24:47 +02:00
|
|
|
if self.logger is not None:
|
2008-04-21 00:58:47 +02:00
|
|
|
for hash_method, hash, identities, features in \
|
|
|
|
self.logger.iter_caps_data():
|
|
|
|
x = self[(hash_method, hash)]
|
|
|
|
x.identities = identities
|
|
|
|
x.features = features
|
|
|
|
x.queried = 2
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
def __getitem__(self, caps):
|
2008-04-21 00:58:47 +02:00
|
|
|
if caps in self.__cache:
|
|
|
|
return self.__cache[caps]
|
2008-06-12 05:56:47 +02:00
|
|
|
|
|
|
|
hash_method, hash = caps
|
|
|
|
|
2008-04-21 00:58:47 +02:00
|
|
|
x = self.__CacheItem(hash_method, hash)
|
|
|
|
self.__cache[(hash_method, hash)] = x
|
2007-06-27 02:51:12 +02:00
|
|
|
return x
|
|
|
|
|
2008-04-21 00:58:47 +02:00
|
|
|
def preload(self, con, jid, node, hash_method, hash):
|
2007-06-27 02:51:12 +02:00
|
|
|
''' Preload data about (node, ver, exts) caps using disco
|
|
|
|
query to jid using proper connection. Don't query if
|
|
|
|
the data is already in cache. '''
|
2008-04-21 00:58:47 +02:00
|
|
|
q = self[(hash_method, hash)]
|
2007-06-28 00:32:35 +02:00
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
if q.queried==0:
|
2008-04-21 00:58:47 +02:00
|
|
|
# do query for bare node+hash pair
|
2007-06-27 02:51:12 +02:00
|
|
|
# this will create proper object
|
|
|
|
q.queried=1
|
2008-04-21 00:58:47 +02:00
|
|
|
con.discoverInfo(jid, '%s#%s' % (node, hash))
|
2007-06-28 00:32:35 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
gajim.capscache = CapsCache(gajim.logger)
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
class ConnectionCaps(object):
|
|
|
|
''' This class highly depends on that it is a part of Connection class. '''
|
|
|
|
def _capsPresenceCB(self, con, presence):
|
|
|
|
''' Handle incoming presence stanzas... This is a callback
|
|
|
|
for xmpp registered in connection_handlers.py'''
|
|
|
|
|
|
|
|
# get the caps element
|
2008-04-21 00:58:47 +02:00
|
|
|
caps = presence.getTag('c')
|
|
|
|
if not caps:
|
|
|
|
return
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2008-04-21 00:58:47 +02:00
|
|
|
hash_method, node, hash = caps['hash'], caps['node'], caps['ver']
|
|
|
|
if hash_method is None or node is None or hash is None:
|
2007-06-27 02:51:12 +02:00
|
|
|
# improper caps in stanza, ignoring
|
|
|
|
return
|
|
|
|
|
|
|
|
# we will put these into proper Contact object and ask
|
|
|
|
# for disco... so that disco will learn how to interpret
|
|
|
|
# these caps
|
|
|
|
|
2007-06-28 00:32:35 +02:00
|
|
|
jid=str(presence.getFrom())
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# start disco query...
|
2008-04-21 00:58:47 +02:00
|
|
|
gajim.capscache.preload(self, jid, node, hash_method, hash)
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2007-06-28 00:32:35 +02:00
|
|
|
contact=gajim.contacts.get_contact_from_full_jid(self.name, jid)
|
2007-06-29 23:19:47 +02:00
|
|
|
if contact in [None, []]:
|
2007-06-27 02:51:12 +02:00
|
|
|
return # TODO: a way to put contact not-in-roster into Contacts
|
2007-06-29 23:19:47 +02:00
|
|
|
elif isinstance(contact, list):
|
|
|
|
contact = contact[0]
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# overwriting old data
|
2008-04-21 00:58:47 +02:00
|
|
|
contact.caps_node = node
|
|
|
|
contact.caps_hash_method = hash_method
|
|
|
|
contact.caps_hash = hash
|
2007-06-30 19:31:27 +02:00
|
|
|
|
2008-04-21 14:57:34 +02:00
|
|
|
def _capsDiscoCB(self, jid, node, identities, features, dataforms):
|
2008-04-21 00:58:47 +02:00
|
|
|
contact = gajim.contacts.get_contact_from_full_jid(self.name, jid)
|
|
|
|
if not contact:
|
|
|
|
return
|
|
|
|
if not contact.caps_node:
|
|
|
|
return # we didn't asked for that?
|
|
|
|
if not node.startswith(contact.caps_node + '#'):
|
|
|
|
return
|
|
|
|
node, hash = node.split('#', 1)
|
|
|
|
computed_hash = helpers.compute_caps_hash(identities, features,
|
2008-05-14 20:30:39 +02:00
|
|
|
dataforms=dataforms, hash_method=contact.caps_hash_method)
|
2008-04-21 00:58:47 +02:00
|
|
|
if computed_hash != hash:
|
|
|
|
# wrong hash, forget it
|
|
|
|
contact.caps_node = ''
|
|
|
|
contact.caps_hash_method = ''
|
|
|
|
contact.caps_hash = ''
|
|
|
|
return
|
2007-06-30 19:31:27 +02:00
|
|
|
|
|
|
|
# if we don't have this info already...
|
2008-04-21 00:58:47 +02:00
|
|
|
caps = gajim.capscache[(contact.caps_hash_method, hash)]
|
|
|
|
if caps.queried == 2:
|
|
|
|
return
|
2007-06-30 19:31:27 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
caps.update(identities, features)
|