2008-08-15 19:31:51 +02:00
|
|
|
# -*- coding:utf-8 -*-
|
2008-08-15 05:20:23 +02:00
|
|
|
## src/common/caps.py
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2008-08-15 05:20:23 +02:00
|
|
|
## Copyright (C) 2007 Tomasz Melcer <liori AT exroot.org>
|
|
|
|
## Travis Shirk <travis AT pobox.com>
|
|
|
|
## Copyright (C) 2007-2008 Yann Leboulanger <asterix AT lagaule.org>
|
|
|
|
## Copyright (C) 2008 Brendan Taylor <whateley AT gmail.com>
|
|
|
|
## Jonathan Schleifer <js-gajim AT webkeks.org>
|
2009-10-25 21:57:20 +01:00
|
|
|
## Copyright (C) 2008-2009 Stephan Erb <steve-e AT h3c.de>
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## This file is part of Gajim.
|
|
|
|
##
|
|
|
|
## Gajim is free software; you can redistribute it and/or modify
|
2007-06-27 02:51:12 +02:00
|
|
|
## it under the terms of the GNU General Public License as published
|
2007-10-22 13:13:13 +02:00
|
|
|
## by the Free Software Foundation; version 3 only.
|
2007-06-27 02:51:12 +02:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## Gajim is distributed in the hope that it will be useful,
|
2007-06-27 02:51:12 +02:00
|
|
|
## but WITHOUT ANY WARRANTY; without even the implied warranty of
|
2008-08-15 05:20:23 +02:00
|
|
|
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
2007-06-27 02:51:12 +02:00
|
|
|
## GNU General Public License for more details.
|
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## You should have received a copy of the GNU General Public License
|
2008-08-15 05:20:23 +02:00
|
|
|
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
|
2007-10-22 13:13:13 +02:00
|
|
|
##
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2009-10-25 21:57:20 +01:00
|
|
|
'''
|
|
|
|
Module containing all XEP-115 (Entity Capabilities) related classes
|
|
|
|
|
|
|
|
Basic Idea:
|
|
|
|
CapsCache caches features to hash relationships. The cache is queried
|
2009-10-26 19:20:16 +01:00
|
|
|
through ClientCaps objects which are hold by contact instances.
|
2009-10-25 21:57:20 +01:00
|
|
|
'''
|
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
import gajim
|
2008-04-21 00:58:47 +02:00
|
|
|
import helpers
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2009-10-26 19:20:16 +01:00
|
|
|
from common.xmpp import NS_XHTML_IM, NS_RECEIPTS, NS_ESESSION, NS_CHATSTATES
|
|
|
|
# Features where we cannot safely assume that the other side supports them
|
|
|
|
FEATURE_BLACKLIST = [NS_CHATSTATES, NS_XHTML_IM, NS_RECEIPTS, NS_ESESSION]
|
|
|
|
|
2009-10-27 22:48:47 +01:00
|
|
|
|
2009-10-26 19:20:16 +01:00
|
|
|
class AbstractClientCaps(object):
|
2009-10-25 21:57:20 +01:00
|
|
|
'''
|
|
|
|
Base class representing a client and its capabilities as advertised by
|
2009-10-27 22:48:47 +01:00
|
|
|
a caps tag in a presence.
|
2009-10-25 21:57:20 +01:00
|
|
|
'''
|
2009-10-27 20:31:09 +01:00
|
|
|
def __init__(self, caps_hash, node):
|
2009-10-25 21:57:20 +01:00
|
|
|
self._hash = caps_hash
|
|
|
|
self._node = node
|
|
|
|
|
2009-10-27 20:31:09 +01:00
|
|
|
def get_discover_strategy(self):
|
|
|
|
return self._discover
|
|
|
|
|
2009-10-25 21:57:20 +01:00
|
|
|
def _discover(self, connection, jid):
|
|
|
|
''' To be implemented by subclassess '''
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2009-10-27 20:31:09 +01:00
|
|
|
def get_cache_lookup_strategy(self):
|
|
|
|
return self._lookup_in_cache
|
|
|
|
|
|
|
|
def _lookup_in_cache(self, caps_cache):
|
2009-10-25 21:57:20 +01:00
|
|
|
''' To be implemented by subclassess '''
|
|
|
|
raise NotImplementedError()
|
2009-10-27 22:41:39 +01:00
|
|
|
|
|
|
|
def get_hash_validation_strategy(self):
|
|
|
|
return self._is_hash_valid
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2009-10-27 22:41:39 +01:00
|
|
|
def _is_hash_valid(self, identities, features, dataforms):
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
|
2009-10-26 19:20:16 +01:00
|
|
|
class ClientCaps(AbstractClientCaps):
|
2009-10-25 21:57:20 +01:00
|
|
|
''' The current XEP-115 implementation '''
|
|
|
|
|
2009-10-27 20:31:09 +01:00
|
|
|
def __init__(self, caps_hash, node, hash_method):
|
|
|
|
AbstractClientCaps.__init__(self, caps_hash, node)
|
2009-10-25 21:57:20 +01:00
|
|
|
assert hash_method != 'old'
|
|
|
|
self._hash_method = hash_method
|
|
|
|
|
2009-10-27 20:31:09 +01:00
|
|
|
def _lookup_in_cache(self, caps_cache):
|
|
|
|
return caps_cache[(self._hash_method, self._hash)]
|
2009-10-25 21:57:20 +01:00
|
|
|
|
|
|
|
def _discover(self, connection, jid):
|
|
|
|
connection.discoverInfo(jid, '%s#%s' % (self._node, self._hash))
|
|
|
|
|
2009-10-27 22:41:39 +01:00
|
|
|
def _is_hash_valid(self, identities, features, dataforms):
|
|
|
|
computed_hash = helpers.compute_caps_hash(identities, features,
|
|
|
|
dataforms=dataforms, hash_method=self._hash_method)
|
|
|
|
return computed_hash == self._hash
|
|
|
|
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2009-10-26 19:20:16 +01:00
|
|
|
class OldClientCaps(AbstractClientCaps):
|
2009-10-25 21:57:20 +01:00
|
|
|
''' Old XEP-115 implemtation. Kept around for background competability. '''
|
|
|
|
|
2009-10-27 20:31:09 +01:00
|
|
|
def __init__(self, caps_hash, node):
|
|
|
|
AbstractClientCaps.__init__(self, caps_hash, node)
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2009-10-27 20:31:09 +01:00
|
|
|
def _lookup_in_cache(self, caps_cache):
|
|
|
|
return caps_cache[('old', self._node + '#' + self._hash)]
|
2009-10-25 21:57:20 +01:00
|
|
|
|
|
|
|
def _discover(self, connection, jid):
|
|
|
|
connection.discoverInfo(jid)
|
|
|
|
|
2009-10-27 22:41:39 +01:00
|
|
|
def _is_hash_valid(self, identities, features, dataforms):
|
|
|
|
return True
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2009-10-27 22:48:47 +01:00
|
|
|
|
2009-10-26 19:20:16 +01:00
|
|
|
class NullClientCaps(AbstractClientCaps):
|
2009-10-25 21:57:20 +01:00
|
|
|
'''
|
2009-10-26 19:20:16 +01:00
|
|
|
This is a NULL-Object to streamline caps handling if a client has not
|
2009-10-25 21:57:20 +01:00
|
|
|
advertised any caps or has advertised them in an improper way.
|
|
|
|
|
2009-10-26 19:20:16 +01:00
|
|
|
Assumes (almost) everything is supported.
|
2009-10-25 21:57:20 +01:00
|
|
|
'''
|
|
|
|
|
2009-10-25 23:56:14 +01:00
|
|
|
def __init__(self):
|
2009-10-27 20:31:09 +01:00
|
|
|
AbstractClientCaps.__init__(self, None, None)
|
2009-10-25 23:56:14 +01:00
|
|
|
|
2009-10-27 20:31:09 +01:00
|
|
|
def _lookup_in_cache(self, caps_cache):
|
|
|
|
return caps_cache[('old', '')]
|
2009-10-25 22:46:45 +01:00
|
|
|
|
2009-10-27 20:31:09 +01:00
|
|
|
def _discover(self, connection, jid):
|
|
|
|
pass
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2009-10-27 22:41:39 +01:00
|
|
|
def _is_hash_valid(self, identities, features, dataforms):
|
|
|
|
return False
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2009-10-27 22:48:47 +01:00
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
class CapsCache(object):
|
2009-10-27 20:31:09 +01:00
|
|
|
'''
|
|
|
|
This object keeps the mapping between caps data and real disco
|
2008-06-12 05:56:47 +02:00
|
|
|
features they represent, and provides simple way to query that info.
|
2007-06-27 02:51:12 +02:00
|
|
|
'''
|
|
|
|
def __init__(self, logger=None):
|
|
|
|
# our containers:
|
2008-06-12 05:56:47 +02:00
|
|
|
# __cache is a dictionary mapping: pair of hash method and hash maps
|
2007-06-27 02:51:12 +02:00
|
|
|
# to CapsCacheItem object
|
|
|
|
# __CacheItem is a class that stores data about particular
|
2008-06-12 05:56:47 +02:00
|
|
|
# client (hash method/hash pair)
|
2007-06-27 02:51:12 +02:00
|
|
|
self.__cache = {}
|
|
|
|
|
|
|
|
class CacheItem(object):
|
2008-04-21 16:18:19 +02:00
|
|
|
# __names is a string cache; every string long enough is given
|
|
|
|
# another object, and we will have plenty of identical long
|
|
|
|
# strings. therefore we can cache them
|
|
|
|
__names = {}
|
2009-10-25 21:17:32 +01:00
|
|
|
|
|
|
|
def __init__(self, hash_method, hash_, logger):
|
2007-06-27 02:51:12 +02:00
|
|
|
# cached into db
|
2009-10-24 20:49:16 +02:00
|
|
|
self.hash_method = hash_method
|
|
|
|
self.hash = hash_
|
|
|
|
self._features = []
|
|
|
|
self._identities = []
|
2009-10-25 21:17:32 +01:00
|
|
|
self._logger = logger
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# not cached into db:
|
|
|
|
# have we sent the query?
|
|
|
|
# 0 == not queried
|
|
|
|
# 1 == queried
|
|
|
|
# 2 == got the answer
|
2009-10-24 20:49:16 +02:00
|
|
|
self.queried = 0
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2009-10-24 20:49:16 +02:00
|
|
|
def _get_features(self):
|
|
|
|
return self._features
|
2008-12-03 18:16:04 +01:00
|
|
|
|
2009-10-24 20:49:16 +02:00
|
|
|
def _set_features(self, value):
|
|
|
|
self._features = []
|
2008-04-21 14:57:34 +02:00
|
|
|
for feature in value:
|
2009-10-25 21:17:32 +01:00
|
|
|
self._features.append(self.__names.setdefault(feature, feature))
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2008-04-21 16:16:20 +02:00
|
|
|
features = property(_get_features, _set_features)
|
2008-04-21 14:57:34 +02:00
|
|
|
|
2009-10-24 20:49:16 +02:00
|
|
|
def _get_identities(self):
|
2008-04-21 16:44:19 +02:00
|
|
|
list_ = []
|
2009-10-24 20:49:16 +02:00
|
|
|
for i in self._identities:
|
2008-04-21 16:44:19 +02:00
|
|
|
# transforms it back in a dict
|
|
|
|
d = dict()
|
|
|
|
d['category'] = i[0]
|
|
|
|
if i[1]:
|
|
|
|
d['type'] = i[1]
|
|
|
|
if i[2]:
|
|
|
|
d['xml:lang'] = i[2]
|
|
|
|
if i[3]:
|
|
|
|
d['name'] = i[3]
|
2008-06-12 05:56:47 +02:00
|
|
|
list_.append(d)
|
2008-04-21 16:44:19 +02:00
|
|
|
return list_
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2009-10-24 20:49:16 +02:00
|
|
|
def _set_identities(self, value):
|
|
|
|
self._identities = []
|
2008-04-21 14:57:34 +02:00
|
|
|
for identity in value:
|
2008-04-21 16:44:19 +02:00
|
|
|
# dict are not hashable, so transform it into a tuple
|
2008-12-03 18:16:04 +01:00
|
|
|
t = (identity['category'], identity.get('type'),
|
|
|
|
identity.get('xml:lang'), identity.get('name'))
|
2009-10-24 20:49:16 +02:00
|
|
|
self._identities.append(self.__names.setdefault(t, t))
|
2009-10-25 21:57:20 +01:00
|
|
|
|
2008-04-21 16:16:20 +02:00
|
|
|
identities = property(_get_identities, _set_identities)
|
2008-04-21 14:57:34 +02:00
|
|
|
|
2009-10-24 20:49:16 +02:00
|
|
|
def update(self, identities, features):
|
2009-10-27 20:31:09 +01:00
|
|
|
self.identities = identities
|
|
|
|
self.features = features
|
2009-10-25 21:17:32 +01:00
|
|
|
self._logger.add_caps_entry(self.hash_method, self.hash,
|
2007-07-09 23:24:47 +02:00
|
|
|
identities, features)
|
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
self.__CacheItem = CacheItem
|
|
|
|
|
|
|
|
# prepopulate data which we are sure of; note: we do not log these info
|
2008-05-27 12:24:29 +02:00
|
|
|
for account in gajim.connections:
|
2008-12-02 16:10:31 +01:00
|
|
|
gajimcaps = self[('sha-1', gajim.caps_hash[account])]
|
2008-05-27 12:24:29 +02:00
|
|
|
gajimcaps.identities = [gajim.gajim_identity]
|
|
|
|
gajimcaps.features = gajim.gajim_common_features + \
|
|
|
|
gajim.gajim_optional_features[account]
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
# start logging data from the net
|
2007-07-09 23:24:47 +02:00
|
|
|
self.logger = logger
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2009-10-25 21:17:32 +01:00
|
|
|
def initialize_from_db(self):
|
2007-06-27 02:51:12 +02:00
|
|
|
# get data from logger...
|
2007-07-09 23:24:47 +02:00
|
|
|
if self.logger is not None:
|
2008-12-03 18:16:04 +01:00
|
|
|
for hash_method, hash_, identities, features in \
|
2008-04-21 00:58:47 +02:00
|
|
|
self.logger.iter_caps_data():
|
2008-12-03 18:16:04 +01:00
|
|
|
x = self[(hash_method, hash_)]
|
2008-04-21 00:58:47 +02:00
|
|
|
x.identities = identities
|
|
|
|
x.features = features
|
|
|
|
x.queried = 2
|
2007-06-27 02:51:12 +02:00
|
|
|
|
|
|
|
def __getitem__(self, caps):
|
2008-04-21 00:58:47 +02:00
|
|
|
if caps in self.__cache:
|
|
|
|
return self.__cache[caps]
|
2008-06-12 05:56:47 +02:00
|
|
|
|
2008-12-03 18:16:04 +01:00
|
|
|
hash_method, hash_ = caps
|
2008-06-12 05:56:47 +02:00
|
|
|
|
2009-10-25 21:17:32 +01:00
|
|
|
x = self.__CacheItem(hash_method, hash_, self.logger)
|
2008-12-03 18:16:04 +01:00
|
|
|
self.__cache[(hash_method, hash_)] = x
|
2007-06-27 02:51:12 +02:00
|
|
|
return x
|
|
|
|
|
2009-10-27 20:31:09 +01:00
|
|
|
def query_client_of_jid_if_unknown(self, connection, jid, client_caps):
|
2009-10-27 22:48:47 +01:00
|
|
|
'''
|
|
|
|
Start a disco query to determine caps (node, ver, exts).
|
|
|
|
Won't query if the data is already in cache.
|
|
|
|
'''
|
2009-10-27 20:31:09 +01:00
|
|
|
lookup_cache_item = client_caps.get_cache_lookup_strategy()
|
|
|
|
q = lookup_cache_item(self)
|
|
|
|
|
|
|
|
if q.queried == 0:
|
2008-04-21 00:58:47 +02:00
|
|
|
# do query for bare node+hash pair
|
2007-06-27 02:51:12 +02:00
|
|
|
# this will create proper object
|
2009-10-27 20:31:09 +01:00
|
|
|
q.queried = 1
|
|
|
|
discover = client_caps.get_discover_strategy()
|
|
|
|
discover(connection, jid)
|
2007-06-28 00:32:35 +02:00
|
|
|
|
2007-07-09 23:24:47 +02:00
|
|
|
gajim.capscache = CapsCache(gajim.logger)
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2009-10-27 22:41:39 +01:00
|
|
|
|
2007-06-27 02:51:12 +02:00
|
|
|
class ConnectionCaps(object):
|
2009-10-27 22:41:39 +01:00
|
|
|
'''
|
|
|
|
This class highly depends on that it is a part of Connection class.
|
|
|
|
'''
|
2007-06-27 02:51:12 +02:00
|
|
|
def _capsPresenceCB(self, con, presence):
|
2009-10-27 22:41:39 +01:00
|
|
|
'''
|
|
|
|
Handle incoming presence stanzas... This is a callback for xmpp
|
|
|
|
registered in connection_handlers.py
|
|
|
|
'''
|
2008-07-23 20:28:55 +02:00
|
|
|
# we will put these into proper Contact object and ask
|
|
|
|
# for disco... so that disco will learn how to interpret
|
|
|
|
# these caps
|
2008-10-12 13:20:35 +02:00
|
|
|
pm_ctrl = None
|
2009-04-30 17:20:37 +02:00
|
|
|
try:
|
|
|
|
jid = helpers.get_full_jid_from_iq(presence)
|
|
|
|
except:
|
|
|
|
# Bad jid
|
|
|
|
return
|
2008-08-05 00:06:54 +02:00
|
|
|
contact = gajim.contacts.get_contact_from_full_jid(self.name, jid)
|
2008-07-23 20:28:55 +02:00
|
|
|
if contact is None:
|
|
|
|
room_jid, nick = gajim.get_room_and_nick_from_fjid(jid)
|
|
|
|
contact = gajim.contacts.get_gc_contact(
|
|
|
|
self.name, room_jid, nick)
|
2008-10-12 13:20:35 +02:00
|
|
|
pm_ctrl = gajim.interface.msg_win_mgr.get_control(jid, self.name)
|
2008-07-23 20:28:55 +02:00
|
|
|
if contact is None:
|
|
|
|
# TODO: a way to put contact not-in-roster
|
|
|
|
# into Contacts
|
2008-12-03 22:56:12 +01:00
|
|
|
return
|
2008-07-23 20:28:55 +02:00
|
|
|
|
2009-10-27 22:41:39 +01:00
|
|
|
caps_tag = presence.getTag('c')
|
|
|
|
if not caps_tag:
|
|
|
|
# presence did not contain caps_tag
|
|
|
|
client_caps = NullClientCaps()
|
|
|
|
else:
|
|
|
|
hash_method, node, caps_hash = caps_tag['hash'], caps_tag['node'], caps_tag['ver']
|
|
|
|
|
|
|
|
if node is None or caps_hash is None:
|
|
|
|
# improper caps in stanza, ignore client capabilities.
|
|
|
|
client_caps = NullClientCaps()
|
|
|
|
elif hash_method is None:
|
|
|
|
client_caps = OldClientCaps(caps_hash, node)
|
|
|
|
else:
|
|
|
|
client_caps = ClientCaps(caps_hash, node, hash_method)
|
|
|
|
|
|
|
|
gajim.capscache.query_client_of_jid_if_unknown(self, jid, client_caps)
|
|
|
|
contact.client_caps = client_caps
|
2007-06-27 02:51:12 +02:00
|
|
|
|
2008-10-12 13:20:35 +02:00
|
|
|
if pm_ctrl:
|
|
|
|
pm_ctrl.update_contact()
|
2007-06-30 19:31:27 +02:00
|
|
|
|
2009-10-27 22:41:39 +01:00
|
|
|
def _capsDiscoCB(self, jid, node, identities, features, dataforms):
|
2008-04-21 00:58:47 +02:00
|
|
|
contact = gajim.contacts.get_contact_from_full_jid(self.name, jid)
|
|
|
|
if not contact:
|
2008-06-27 17:07:15 +02:00
|
|
|
room_jid, nick = gajim.get_room_and_nick_from_fjid(jid)
|
|
|
|
contact = gajim.contacts.get_gc_contact(self.name, room_jid, nick)
|
|
|
|
if contact is None:
|
|
|
|
return
|
2007-06-30 19:31:27 +02:00
|
|
|
|
2009-10-27 22:41:39 +01:00
|
|
|
lookup = contact.client_caps.get_cache_lookup_strategy()
|
|
|
|
cache_item = lookup(gajim.capscache)
|
|
|
|
|
|
|
|
if cache_item.queried == 2:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
validate = contact.client_caps.get_hash_validation_strategy()
|
|
|
|
hash_is_valid = validate(identities, features, dataforms)
|
|
|
|
|
|
|
|
if hash_is_valid:
|
|
|
|
cache_item.update(identities, features)
|
|
|
|
else:
|
|
|
|
contact.client_caps = NullClientCaps()
|
2008-07-29 21:49:31 +02:00
|
|
|
|
2008-08-05 00:06:54 +02:00
|
|
|
# vim: se ts=3:
|