2018-09-05 02:59:34 +02:00
|
|
|
# Copyright (C) 2003-2014 Yann Leboulanger <asterix AT lagaule.org>
|
|
|
|
# Copyright (C) 2005-2006 Dimitur Kirov <dkirov AT gmail.com>
|
|
|
|
# Nikos Kouremenos <kourem AT gmail.com>
|
|
|
|
# Copyright (C) 2006 Alex Mauer <hawke AT hawkesnest.net>
|
|
|
|
# Copyright (C) 2006-2007 Travis Shirk <travis AT pobox.com>
|
|
|
|
# Copyright (C) 2006-2008 Jean-Marie Traissard <jim AT lapin.org>
|
|
|
|
# Copyright (C) 2007 Lukas Petrovicky <lukas AT petrovicky.net>
|
|
|
|
# James Newton <redshodan AT gmail.com>
|
|
|
|
# Julien Pivotto <roidelapluie AT gmail.com>
|
|
|
|
# Copyright (C) 2007-2008 Stephan Erb <steve-e AT h3c.de>
|
|
|
|
# Copyright (C) 2008 Brendan Taylor <whateley AT gmail.com>
|
|
|
|
# Jonathan Schleifer <js-gajim AT webkeks.org>
|
|
|
|
#
|
|
|
|
# This file is part of Gajim.
|
|
|
|
#
|
|
|
|
# Gajim is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published
|
|
|
|
# by the Free Software Foundation; version 3 only.
|
|
|
|
#
|
|
|
|
# Gajim is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Gajim. If not, see <http://www.gnu.org/licenses/>.
|
2005-05-20 17:58:23 +02:00
|
|
|
|
2010-03-09 13:15:54 +01:00
|
|
|
import sys
|
2006-11-19 00:21:59 +01:00
|
|
|
import re
|
2005-08-09 13:33:56 +02:00
|
|
|
import os
|
2006-08-27 00:51:33 +02:00
|
|
|
import subprocess
|
2005-08-14 21:08:56 +02:00
|
|
|
import urllib
|
2011-04-19 14:36:22 +02:00
|
|
|
import webbrowser
|
2005-09-28 17:00:01 +02:00
|
|
|
import errno
|
2006-04-11 20:36:09 +02:00
|
|
|
import select
|
2008-04-21 00:58:47 +02:00
|
|
|
import base64
|
2009-01-08 17:51:26 +01:00
|
|
|
import hashlib
|
2011-11-08 22:41:07 +01:00
|
|
|
import shlex
|
2011-11-14 23:23:40 +01:00
|
|
|
import socket
|
|
|
|
import time
|
2018-09-16 11:37:38 +02:00
|
|
|
import logging
|
2018-10-28 21:26:25 +01:00
|
|
|
import json
|
2018-07-09 00:21:24 +02:00
|
|
|
from datetime import datetime, timedelta
|
2018-01-03 22:09:33 +01:00
|
|
|
from distutils.version import LooseVersion as V
|
2006-03-26 13:46:04 +02:00
|
|
|
from encodings.punycode import punycode_encode
|
2010-01-27 17:23:37 +01:00
|
|
|
from string import Template
|
2005-08-09 13:33:56 +02:00
|
|
|
|
2017-07-24 02:14:25 +02:00
|
|
|
import nbxmpp
|
|
|
|
|
2018-09-16 11:37:38 +02:00
|
|
|
from gajim.common import caps_cache
|
|
|
|
from gajim.common import configpaths
|
2017-06-13 23:58:06 +02:00
|
|
|
from gajim.common.i18n import Q_
|
2018-09-13 23:56:12 +02:00
|
|
|
from gajim.common.i18n import _
|
2017-06-13 23:58:06 +02:00
|
|
|
from gajim.common.i18n import ngettext
|
2018-09-15 20:45:38 +02:00
|
|
|
from gajim.common.caps_cache import muc_caps_cache
|
2006-10-05 02:06:57 +02:00
|
|
|
|
2017-02-22 20:54:48 +01:00
|
|
|
try:
|
2018-09-16 11:37:38 +02:00
|
|
|
import precis_i18n.codec # pylint: disable=unused-import
|
2017-02-22 20:54:48 +01:00
|
|
|
HAS_PRECIS_I18N = True
|
|
|
|
except ImportError:
|
|
|
|
HAS_PRECIS_I18N = False
|
|
|
|
|
2017-07-19 13:16:27 +02:00
|
|
|
HAS_SOUND = True
|
|
|
|
if sys.platform == 'win32':
|
2017-01-01 23:08:30 +01:00
|
|
|
try:
|
|
|
|
import winsound # windows-only built-in module for playing wav
|
|
|
|
except ImportError:
|
2017-07-19 13:16:27 +02:00
|
|
|
HAS_SOUND = False
|
2017-01-01 23:08:30 +01:00
|
|
|
print('Gajim is not able to playback sound because'
|
|
|
|
'pywin32 is missing', file=sys.stderr)
|
|
|
|
|
2017-07-19 13:16:27 +02:00
|
|
|
elif sys.platform == 'darwin':
|
|
|
|
try:
|
|
|
|
from AppKit import NSSound
|
|
|
|
except ImportError:
|
|
|
|
HAS_SOUND = False
|
|
|
|
print('Gajim is not able to playback sound because'
|
|
|
|
'pyobjc is missing', file=sys.stderr)
|
|
|
|
|
2005-09-13 12:51:02 +02:00
|
|
|
try:
|
2011-04-12 16:36:49 +02:00
|
|
|
import wave # posix-only fallback wav playback
|
|
|
|
import ossaudiodev as oss
|
2008-10-11 11:37:13 +02:00
|
|
|
except Exception:
|
2010-02-08 15:08:40 +01:00
|
|
|
pass
|
2005-09-13 12:51:02 +02:00
|
|
|
|
2011-11-14 23:23:40 +01:00
|
|
|
log = logging.getLogger('gajim.c.helpers')
|
|
|
|
|
2007-05-03 23:02:50 +02:00
|
|
|
special_groups = (_('Transports'), _('Not in Roster'), _('Observers'), _('Groupchats'))
|
2006-03-30 23:35:43 +02:00
|
|
|
|
2017-07-30 23:14:50 +02:00
|
|
|
|
2005-11-03 15:46:56 +01:00
|
|
|
class InvalidFormat(Exception):
|
2010-02-08 15:08:40 +01:00
|
|
|
pass
|
2005-11-03 15:46:56 +01:00
|
|
|
|
2018-07-04 23:58:52 +02:00
|
|
|
|
2006-11-23 00:21:33 +01:00
|
|
|
def decompose_jid(jidstring):
|
2010-02-08 15:08:40 +01:00
|
|
|
user = None
|
|
|
|
server = None
|
|
|
|
resource = None
|
|
|
|
|
|
|
|
# Search for delimiters
|
|
|
|
user_sep = jidstring.find('@')
|
|
|
|
res_sep = jidstring.find('/')
|
|
|
|
|
|
|
|
if user_sep == -1:
|
|
|
|
if res_sep == -1:
|
|
|
|
# host
|
|
|
|
server = jidstring
|
|
|
|
else:
|
|
|
|
# host/resource
|
|
|
|
server = jidstring[0:res_sep]
|
2010-08-10 16:06:41 +02:00
|
|
|
resource = jidstring[res_sep + 1:]
|
2010-02-08 15:08:40 +01:00
|
|
|
else:
|
|
|
|
if res_sep == -1:
|
|
|
|
# user@host
|
2010-08-10 16:06:41 +02:00
|
|
|
user = jidstring[0:user_sep]
|
2010-02-08 15:08:40 +01:00
|
|
|
server = jidstring[user_sep + 1:]
|
|
|
|
else:
|
|
|
|
if user_sep < res_sep:
|
|
|
|
# user@host/resource
|
2010-08-10 16:06:41 +02:00
|
|
|
user = jidstring[0:user_sep]
|
2010-02-08 15:08:40 +01:00
|
|
|
server = jidstring[user_sep + 1:user_sep + (res_sep - user_sep)]
|
2010-08-10 16:06:41 +02:00
|
|
|
resource = jidstring[res_sep + 1:]
|
2010-02-08 15:08:40 +01:00
|
|
|
else:
|
|
|
|
# server/resource (with an @ in resource)
|
|
|
|
server = jidstring[0:res_sep]
|
2010-08-10 16:06:41 +02:00
|
|
|
resource = jidstring[res_sep + 1:]
|
2010-02-08 15:08:40 +01:00
|
|
|
return user, server, resource
|
2006-11-23 00:21:33 +01:00
|
|
|
|
|
|
|
def parse_jid(jidstring):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Perform stringprep on all JID fragments from a string and return the full
|
|
|
|
jid
|
|
|
|
"""
|
|
|
|
# This function comes from http://svn.twistedmatrix.com/cvs/trunk/twisted/words/protocols/jabber/jid.py
|
2005-11-03 15:46:56 +01:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
return prep(*decompose_jid(jidstring))
|
2005-11-03 15:46:56 +01:00
|
|
|
|
2007-01-17 12:40:59 +01:00
|
|
|
def idn_to_ascii(host):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Convert IDN (Internationalized Domain Names) to ACE (ASCII-compatible
|
|
|
|
encoding)
|
|
|
|
"""
|
|
|
|
from encodings import idna
|
|
|
|
labels = idna.dots.split(host)
|
|
|
|
converted_labels = []
|
|
|
|
for label in labels:
|
2015-03-25 10:05:29 +01:00
|
|
|
if label:
|
|
|
|
converted_labels.append(idna.ToASCII(label).decode('utf-8'))
|
|
|
|
else:
|
|
|
|
converted_labels.append('')
|
2010-02-08 15:08:40 +01:00
|
|
|
return ".".join(converted_labels)
|
2007-01-16 23:36:41 +01:00
|
|
|
|
2008-08-30 20:24:07 +02:00
|
|
|
def ascii_to_idn(host):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Convert ACE (ASCII-compatible encoding) to IDN (Internationalized Domain
|
|
|
|
Names)
|
|
|
|
"""
|
|
|
|
from encodings import idna
|
|
|
|
labels = idna.dots.split(host)
|
|
|
|
converted_labels = []
|
|
|
|
for label in labels:
|
|
|
|
converted_labels.append(idna.ToUnicode(label))
|
|
|
|
return ".".join(converted_labels)
|
2008-08-30 20:24:07 +02:00
|
|
|
|
2015-02-28 15:47:51 +01:00
|
|
|
def puny_encode_url(url):
|
|
|
|
_url = url
|
|
|
|
if '//' not in _url:
|
|
|
|
_url = '//' + _url
|
2016-12-29 00:34:13 +01:00
|
|
|
try:
|
|
|
|
o = urllib.parse.urlparse(_url)
|
|
|
|
p_loc = idn_to_ascii(o.netloc)
|
|
|
|
except Exception:
|
|
|
|
log.debug('urlparse failed: %s', url)
|
|
|
|
return False
|
2015-02-28 15:47:51 +01:00
|
|
|
return url.replace(o.netloc, p_loc)
|
|
|
|
|
2005-11-03 15:46:56 +01:00
|
|
|
def parse_resource(resource):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Perform stringprep on resource and return it
|
|
|
|
"""
|
|
|
|
if resource:
|
|
|
|
try:
|
2017-02-22 20:54:48 +01:00
|
|
|
if HAS_PRECIS_I18N:
|
|
|
|
return resource.encode('Nickname').decode('utf-8')
|
2018-09-18 10:14:04 +02:00
|
|
|
from nbxmpp.stringprepare import resourceprep
|
|
|
|
return resourceprep.prepare(resource)
|
2010-02-08 15:08:40 +01:00
|
|
|
except UnicodeError:
|
2013-01-02 13:54:02 +01:00
|
|
|
raise InvalidFormat('Invalid character in resource.')
|
2005-11-03 15:46:56 +01:00
|
|
|
|
|
|
|
def prep(user, server, resource):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Perform stringprep on all JID fragments and return the full jid
|
|
|
|
"""
|
|
|
|
# This function comes from
|
|
|
|
#http://svn.twistedmatrix.com/cvs/trunk/twisted/words/protocols/jabber/jid.py
|
2017-06-16 23:40:42 +02:00
|
|
|
|
|
|
|
ip_address = False
|
|
|
|
|
|
|
|
try:
|
|
|
|
socket.inet_aton(server)
|
|
|
|
ip_address = True
|
|
|
|
except socket.error:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if not ip_address and hasattr(socket, 'inet_pton'):
|
|
|
|
try:
|
|
|
|
socket.inet_pton(socket.AF_INET6, server.strip('[]'))
|
|
|
|
server = '[%s]' % server.strip('[]')
|
|
|
|
ip_address = True
|
|
|
|
except (socket.error, ValueError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
if not ip_address:
|
|
|
|
if server is not None:
|
2018-06-23 20:15:00 +02:00
|
|
|
if server.endswith('.'): # RFC7622, 3.2
|
2017-08-10 14:53:20 +02:00
|
|
|
server = server[:-1]
|
2018-09-16 01:10:04 +02:00
|
|
|
if not server or len(server.encode('utf-8')) > 1023:
|
2017-02-22 20:54:48 +01:00
|
|
|
raise InvalidFormat(_('Server must be between 1 and 1023 bytes'))
|
2017-06-16 23:40:42 +02:00
|
|
|
try:
|
2018-06-23 20:15:00 +02:00
|
|
|
from nbxmpp.stringprepare import nameprep
|
|
|
|
server = nameprep.prepare(server)
|
2017-06-16 23:40:42 +02:00
|
|
|
except UnicodeError:
|
|
|
|
raise InvalidFormat(_('Invalid character in hostname.'))
|
|
|
|
else:
|
|
|
|
raise InvalidFormat(_('Server address required.'))
|
|
|
|
|
2010-08-10 16:06:41 +02:00
|
|
|
if user is not None:
|
2018-09-16 01:10:04 +02:00
|
|
|
if not user or len(user.encode('utf-8')) > 1023:
|
2017-02-22 20:54:48 +01:00
|
|
|
raise InvalidFormat(_('Username must be between 1 and 1023 bytes'))
|
2010-02-08 15:08:40 +01:00
|
|
|
try:
|
2017-02-22 20:54:48 +01:00
|
|
|
if HAS_PRECIS_I18N:
|
|
|
|
user = user.encode('UsernameCaseMapped').decode('utf-8')
|
|
|
|
else:
|
|
|
|
from nbxmpp.stringprepare import nodeprep
|
|
|
|
user = nodeprep.prepare(user)
|
2010-02-08 15:08:40 +01:00
|
|
|
except UnicodeError:
|
2013-01-02 13:54:02 +01:00
|
|
|
raise InvalidFormat(_('Invalid character in username.'))
|
2010-02-08 15:08:40 +01:00
|
|
|
else:
|
|
|
|
user = None
|
|
|
|
|
2010-08-10 16:06:41 +02:00
|
|
|
if resource is not None:
|
2018-09-16 01:10:04 +02:00
|
|
|
if not resource or len(resource.encode('utf-8')) > 1023:
|
2017-02-22 20:54:48 +01:00
|
|
|
raise InvalidFormat(_('Resource must be between 1 and 1023 bytes'))
|
2010-02-08 15:08:40 +01:00
|
|
|
try:
|
2017-02-22 20:54:48 +01:00
|
|
|
if HAS_PRECIS_I18N:
|
|
|
|
resource = resource.encode('OpaqueString').decode('utf-8')
|
|
|
|
else:
|
|
|
|
from nbxmpp.stringprepare import resourceprep
|
|
|
|
resource = resourceprep.prepare(resource)
|
2010-02-08 15:08:40 +01:00
|
|
|
except UnicodeError:
|
2013-01-02 13:54:02 +01:00
|
|
|
raise InvalidFormat(_('Invalid character in resource.'))
|
2010-02-08 15:08:40 +01:00
|
|
|
else:
|
|
|
|
resource = None
|
|
|
|
|
|
|
|
if user:
|
|
|
|
if resource:
|
|
|
|
return '%s@%s/%s' % (user, server, resource)
|
2018-09-18 10:14:04 +02:00
|
|
|
return '%s@%s' % (user, server)
|
|
|
|
|
|
|
|
if resource:
|
|
|
|
return '%s/%s' % (server, resource)
|
|
|
|
return server
|
2005-11-03 15:46:56 +01:00
|
|
|
|
2009-09-23 22:27:52 +02:00
|
|
|
def windowsify(s):
|
2010-02-08 15:08:40 +01:00
|
|
|
if os.name == 'nt':
|
|
|
|
return s.capitalize()
|
|
|
|
return s
|
2009-09-23 22:27:52 +02:00
|
|
|
|
2005-09-28 17:00:01 +02:00
|
|
|
def temp_failure_retry(func, *args, **kwargs):
|
2010-02-08 15:08:40 +01:00
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
return func(*args, **kwargs)
|
2013-01-01 23:18:36 +01:00
|
|
|
except (os.error, IOError, select.error) as ex:
|
2010-02-08 15:08:40 +01:00
|
|
|
if ex.errno == errno.EINTR:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
raise
|
2005-09-28 17:00:01 +02:00
|
|
|
|
2018-09-18 12:06:01 +02:00
|
|
|
def get_uf_show(show, use_mnemonic=False):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Return a userfriendly string for dnd/xa/chat and make all strings
|
|
|
|
translatable
|
|
|
|
|
|
|
|
If use_mnemonic is True, it adds _ so GUI should call with True for
|
|
|
|
accessibility issues
|
|
|
|
"""
|
|
|
|
if show == 'dnd':
|
|
|
|
if use_mnemonic:
|
|
|
|
uf_show = _('_Busy')
|
|
|
|
else:
|
|
|
|
uf_show = _('Busy')
|
|
|
|
elif show == 'xa':
|
|
|
|
if use_mnemonic:
|
|
|
|
uf_show = _('_Not Available')
|
|
|
|
else:
|
|
|
|
uf_show = _('Not Available')
|
|
|
|
elif show == 'chat':
|
|
|
|
if use_mnemonic:
|
|
|
|
uf_show = _('_Free for Chat')
|
|
|
|
else:
|
|
|
|
uf_show = _('Free for Chat')
|
|
|
|
elif show == 'online':
|
|
|
|
if use_mnemonic:
|
2010-04-03 19:49:31 +02:00
|
|
|
uf_show = Q_('?user status:_Available')
|
2010-02-08 15:08:40 +01:00
|
|
|
else:
|
2010-04-03 13:38:12 +02:00
|
|
|
uf_show = Q_('?user status:Available')
|
2010-02-08 15:08:40 +01:00
|
|
|
elif show == 'connecting':
|
|
|
|
uf_show = _('Connecting')
|
|
|
|
elif show == 'away':
|
|
|
|
if use_mnemonic:
|
|
|
|
uf_show = _('A_way')
|
|
|
|
else:
|
|
|
|
uf_show = _('Away')
|
|
|
|
elif show == 'offline':
|
|
|
|
if use_mnemonic:
|
|
|
|
uf_show = _('_Offline')
|
|
|
|
else:
|
|
|
|
uf_show = _('Offline')
|
|
|
|
elif show == 'invisible':
|
|
|
|
if use_mnemonic:
|
|
|
|
uf_show = _('_Invisible')
|
|
|
|
else:
|
|
|
|
uf_show = _('Invisible')
|
|
|
|
elif show == 'not in roster':
|
|
|
|
uf_show = _('Not in Roster')
|
|
|
|
elif show == 'requested':
|
|
|
|
uf_show = Q_('?contact has status:Unknown')
|
|
|
|
else:
|
|
|
|
uf_show = Q_('?contact has status:Has errors')
|
2013-01-01 19:44:25 +01:00
|
|
|
return uf_show
|
2006-02-27 16:29:13 +01:00
|
|
|
|
2017-11-30 22:55:06 +01:00
|
|
|
def get_css_show_color(show):
|
|
|
|
if show in ('online', 'chat', 'invisible'):
|
|
|
|
return 'status-online'
|
2018-09-18 10:14:04 +02:00
|
|
|
if show in ('offline', 'not in roster', 'requested'):
|
2017-11-30 22:55:06 +01:00
|
|
|
return None
|
2018-09-18 10:14:04 +02:00
|
|
|
if show in ('xa', 'dnd'):
|
2017-11-30 22:55:06 +01:00
|
|
|
return 'status-dnd'
|
2018-09-18 10:14:04 +02:00
|
|
|
if show == 'away':
|
2017-11-30 22:55:06 +01:00
|
|
|
return 'status-away'
|
|
|
|
|
2005-08-05 00:08:33 +02:00
|
|
|
def get_uf_sub(sub):
|
2010-02-08 15:08:40 +01:00
|
|
|
if sub == 'none':
|
|
|
|
uf_sub = Q_('?Subscription we already have:None')
|
|
|
|
elif sub == 'to':
|
|
|
|
uf_sub = _('To')
|
|
|
|
elif sub == 'from':
|
|
|
|
uf_sub = _('From')
|
|
|
|
elif sub == 'both':
|
|
|
|
uf_sub = _('Both')
|
|
|
|
else:
|
2018-03-29 09:41:40 +02:00
|
|
|
uf_sub = _('Unknown')
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2013-01-01 19:44:25 +01:00
|
|
|
return uf_sub
|
2006-02-27 16:29:13 +01:00
|
|
|
|
2005-08-05 00:08:33 +02:00
|
|
|
def get_uf_ask(ask):
|
2010-02-08 15:08:40 +01:00
|
|
|
if ask is None:
|
|
|
|
uf_ask = Q_('?Ask (for Subscription):None')
|
|
|
|
elif ask == 'subscribe':
|
|
|
|
uf_ask = _('Subscribe')
|
|
|
|
else:
|
|
|
|
uf_ask = ask
|
2006-02-27 16:29:13 +01:00
|
|
|
|
2013-01-01 19:44:25 +01:00
|
|
|
return uf_ask
|
2005-06-06 13:53:07 +02:00
|
|
|
|
2018-09-18 12:06:01 +02:00
|
|
|
def get_uf_role(role, plural=False):
|
2010-02-08 15:08:40 +01:00
|
|
|
''' plural determines if you get Moderators or Moderator'''
|
|
|
|
if role == 'none':
|
|
|
|
role_name = Q_('?Group Chat Contact Role:None')
|
|
|
|
elif role == 'moderator':
|
|
|
|
if plural:
|
|
|
|
role_name = _('Moderators')
|
|
|
|
else:
|
|
|
|
role_name = _('Moderator')
|
|
|
|
elif role == 'participant':
|
|
|
|
if plural:
|
|
|
|
role_name = _('Participants')
|
|
|
|
else:
|
|
|
|
role_name = _('Participant')
|
|
|
|
elif role == 'visitor':
|
|
|
|
if plural:
|
|
|
|
role_name = _('Visitors')
|
|
|
|
else:
|
|
|
|
role_name = _('Visitor')
|
|
|
|
return role_name
|
2008-12-03 22:56:12 +01:00
|
|
|
|
2006-09-26 04:41:47 +02:00
|
|
|
def get_uf_affiliation(affiliation):
|
2010-02-08 15:08:40 +01:00
|
|
|
'''Get a nice and translated affilition for muc'''
|
|
|
|
if affiliation == 'none':
|
|
|
|
affiliation_name = Q_('?Group Chat Contact Affiliation:None')
|
|
|
|
elif affiliation == 'owner':
|
|
|
|
affiliation_name = _('Owner')
|
|
|
|
elif affiliation == 'admin':
|
|
|
|
affiliation_name = _('Administrator')
|
|
|
|
elif affiliation == 'member':
|
|
|
|
affiliation_name = _('Member')
|
|
|
|
else: # Argl ! An unknown affiliation !
|
|
|
|
affiliation_name = affiliation.capitalize()
|
|
|
|
return affiliation_name
|
2006-09-26 04:41:47 +02:00
|
|
|
|
2005-06-06 13:53:07 +02:00
|
|
|
def get_sorted_keys(adict):
|
2010-02-08 15:08:40 +01:00
|
|
|
keys = sorted(adict.keys())
|
|
|
|
return keys
|
2005-07-02 14:36:21 +02:00
|
|
|
|
|
|
|
def to_one_line(msg):
|
2010-02-08 15:08:40 +01:00
|
|
|
msg = msg.replace('\\', '\\\\')
|
|
|
|
msg = msg.replace('\n', '\\n')
|
|
|
|
# s1 = 'test\ntest\\ntest'
|
|
|
|
# s11 = s1.replace('\\', '\\\\')
|
|
|
|
# s12 = s11.replace('\n', '\\n')
|
|
|
|
# s12
|
|
|
|
# 'test\\ntest\\\\ntest'
|
|
|
|
return msg
|
2005-07-02 14:36:21 +02:00
|
|
|
|
|
|
|
def from_one_line(msg):
|
2010-02-08 15:08:40 +01:00
|
|
|
# (?<!\\) is a lookbehind assertion which asks anything but '\'
|
|
|
|
# to match the regexp that follows it
|
|
|
|
|
|
|
|
# So here match '\\n' but not if you have a '\' before that
|
|
|
|
expr = re.compile(r'(?<!\\)\\n')
|
|
|
|
msg = expr.sub('\n', msg)
|
|
|
|
msg = msg.replace('\\\\', '\\')
|
|
|
|
# s12 = 'test\\ntest\\\\ntest'
|
|
|
|
# s13 = re.sub('\n', s12)
|
|
|
|
# s14 s13.replace('\\\\', '\\')
|
|
|
|
# s14
|
|
|
|
# 'test\ntest\\ntest'
|
|
|
|
return msg
|
2005-07-22 16:30:35 +02:00
|
|
|
|
|
|
|
def get_uf_chatstate(chatstate):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Remove chatstate jargon and returns user friendly messages
|
|
|
|
"""
|
|
|
|
if chatstate == 'active':
|
|
|
|
return _('is paying attention to the conversation')
|
2018-09-18 10:14:04 +02:00
|
|
|
if chatstate == 'inactive':
|
2010-02-08 15:08:40 +01:00
|
|
|
return _('is doing something else')
|
2018-09-18 10:14:04 +02:00
|
|
|
if chatstate == 'composing':
|
2017-02-04 23:29:45 +01:00
|
|
|
return _('is composing a message…')
|
2018-09-18 10:14:04 +02:00
|
|
|
if chatstate == 'paused':
|
2010-02-08 15:08:40 +01:00
|
|
|
#paused means he or she was composing but has stopped for a while
|
|
|
|
return _('paused composing a message')
|
2018-09-18 10:14:04 +02:00
|
|
|
if chatstate == 'gone':
|
2010-02-08 15:08:40 +01:00
|
|
|
return _('has closed the chat window or tab')
|
|
|
|
return ''
|
2005-08-09 13:33:56 +02:00
|
|
|
|
2008-12-03 18:38:16 +01:00
|
|
|
def is_in_path(command, return_abs_path=False):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Return True if 'command' is found in one of the directories in the user's
|
|
|
|
path. If 'return_abs_path' is True, return the absolute path of the first
|
|
|
|
found command instead. Return False otherwise and on errors
|
|
|
|
"""
|
|
|
|
for directory in os.getenv('PATH').split(os.pathsep):
|
|
|
|
try:
|
|
|
|
if command in os.listdir(directory):
|
|
|
|
if return_abs_path:
|
|
|
|
return os.path.join(directory, command)
|
2018-09-18 10:14:04 +02:00
|
|
|
return True
|
2010-02-08 15:08:40 +01:00
|
|
|
except OSError:
|
|
|
|
# If the user has non directories in his path
|
|
|
|
pass
|
|
|
|
return False
|
2005-08-10 00:46:13 +02:00
|
|
|
|
2017-04-21 18:44:27 +02:00
|
|
|
def exec_command(command, use_shell=False, posix=True):
|
2011-11-08 22:41:07 +01:00
|
|
|
"""
|
|
|
|
execute a command. if use_shell is True, we run the command as is it was
|
|
|
|
typed in a console. So it may be dangerous if you are not sure about what
|
|
|
|
is executed.
|
|
|
|
"""
|
|
|
|
if use_shell:
|
|
|
|
subprocess.Popen('%s &' % command, shell=True).wait()
|
|
|
|
else:
|
2017-04-21 18:44:27 +02:00
|
|
|
args = shlex.split(command, posix=posix)
|
2011-11-08 22:41:07 +01:00
|
|
|
p = subprocess.Popen(args)
|
2017-08-13 13:18:56 +02:00
|
|
|
app.thread_interface(p.wait)
|
2006-09-28 12:44:16 +02:00
|
|
|
|
|
|
|
def build_command(executable, parameter):
|
2010-02-08 15:08:40 +01:00
|
|
|
# we add to the parameter (can hold path with spaces)
|
|
|
|
# "" so we have good parsing from shell
|
|
|
|
parameter = parameter.replace('"', '\\"') # but first escape "
|
|
|
|
command = '%s "%s"' % (executable, parameter)
|
|
|
|
return command
|
2006-08-27 00:51:33 +02:00
|
|
|
|
2009-02-08 14:47:28 +01:00
|
|
|
def get_file_path_from_dnd_dropped_uri(uri):
|
2017-08-16 23:06:05 +02:00
|
|
|
path = urllib.parse.unquote(uri) # escape special chars
|
2010-02-08 15:08:40 +01:00
|
|
|
path = path.strip('\r\n\x00') # remove \r\n and NULL
|
|
|
|
# get the path to file
|
|
|
|
if re.match('^file:///[a-zA-Z]:/', path): # windows
|
|
|
|
path = path[8:] # 8 is len('file:///')
|
|
|
|
elif path.startswith('file://'): # nautilus, rox
|
|
|
|
path = path[7:] # 7 is len('file://')
|
|
|
|
elif path.startswith('file:'): # xffm
|
|
|
|
path = path[5:] # 5 is len('file:')
|
|
|
|
return path
|
2009-02-08 14:47:28 +01:00
|
|
|
|
|
|
|
def get_xmpp_show(show):
|
2010-02-08 15:08:40 +01:00
|
|
|
if show in ('online', 'offline'):
|
|
|
|
return None
|
|
|
|
return show
|
2009-02-08 14:47:28 +01:00
|
|
|
|
|
|
|
def sanitize_filename(filename):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Make sure the filename we will write does contain only acceptable and latin
|
|
|
|
characters, and is not too long (in that case hash it)
|
|
|
|
"""
|
|
|
|
# 48 is the limit
|
|
|
|
if len(filename) > 48:
|
2018-09-18 13:54:25 +02:00
|
|
|
hash_ = hashlib.md5(filename.encode('utf-8'))
|
|
|
|
filename = base64.b64encode(hash_.digest()).decode('utf-8')
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2013-01-02 13:54:02 +01:00
|
|
|
# make it latin chars only
|
|
|
|
filename = punycode_encode(filename).decode('utf-8')
|
2010-02-08 15:08:40 +01:00
|
|
|
filename = filename.replace('/', '_')
|
|
|
|
if os.name == 'nt':
|
|
|
|
filename = filename.replace('?', '_').replace(':', '_')\
|
|
|
|
.replace('\\', '_').replace('"', "'").replace('|', '_')\
|
|
|
|
.replace('*', '_').replace('<', '_').replace('>', '_')
|
|
|
|
|
|
|
|
return filename
|
2009-02-08 14:47:28 +01:00
|
|
|
|
2018-09-18 12:06:01 +02:00
|
|
|
def reduce_chars_newlines(text, max_chars=0, max_lines=0):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Cut the chars after 'max_chars' on each line and show only the first
|
|
|
|
'max_lines'
|
|
|
|
|
|
|
|
If any of the params is not present (None or 0) the action on it is not
|
|
|
|
performed
|
|
|
|
"""
|
|
|
|
def _cut_if_long(string):
|
|
|
|
if len(string) > max_chars:
|
2017-02-04 23:29:45 +01:00
|
|
|
string = string[:max_chars - 3] + '…'
|
2010-02-08 15:08:40 +01:00
|
|
|
return string
|
|
|
|
|
|
|
|
if max_lines == 0:
|
|
|
|
lines = text.split('\n')
|
|
|
|
else:
|
|
|
|
lines = text.split('\n', max_lines)[:max_lines]
|
|
|
|
if max_chars > 0:
|
|
|
|
if lines:
|
|
|
|
lines = [_cut_if_long(e) for e in lines]
|
|
|
|
if lines:
|
|
|
|
reduced_text = '\n'.join(lines)
|
|
|
|
if reduced_text != text:
|
2017-02-04 23:29:45 +01:00
|
|
|
reduced_text += '…'
|
2010-02-08 15:08:40 +01:00
|
|
|
else:
|
|
|
|
reduced_text = ''
|
|
|
|
return reduced_text
|
2009-02-08 14:47:28 +01:00
|
|
|
|
|
|
|
def get_account_status(account):
|
2010-02-08 15:08:40 +01:00
|
|
|
status = reduce_chars_newlines(account['status_line'], 100, 1)
|
|
|
|
return status
|
2009-02-08 14:47:28 +01:00
|
|
|
|
|
|
|
def datetime_tuple(timestamp):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Convert timestamp using strptime and the format: %Y%m%dT%H:%M:%S
|
|
|
|
|
|
|
|
Because of various datetime formats are used the following exceptions
|
|
|
|
are handled:
|
|
|
|
- Optional milliseconds appened to the string are removed
|
|
|
|
- Optional Z (that means UTC) appened to the string are removed
|
|
|
|
- XEP-082 datetime strings have all '-' cahrs removed to meet
|
|
|
|
the above format.
|
|
|
|
"""
|
2016-03-31 22:46:44 +02:00
|
|
|
date, tim = timestamp.split('T', 1)
|
|
|
|
date = date.replace('-', '')
|
|
|
|
tim = tim.replace('z', '')
|
|
|
|
tim = tim.replace('Z', '')
|
|
|
|
zone = None
|
|
|
|
if '+' in tim:
|
|
|
|
sign = -1
|
|
|
|
tim, zone = tim.split('+', 1)
|
|
|
|
if '-' in tim:
|
|
|
|
sign = 1
|
|
|
|
tim, zone = tim.split('-', 1)
|
|
|
|
tim = tim.split('.')[0]
|
|
|
|
tim = time.strptime(date + 'T' + tim, '%Y%m%dT%H:%M:%S')
|
|
|
|
if zone:
|
2016-10-08 17:38:10 +02:00
|
|
|
zone = zone.replace(':', '')
|
2017-07-24 02:14:25 +02:00
|
|
|
tim = datetime.fromtimestamp(time.mktime(tim))
|
2016-10-08 17:38:10 +02:00
|
|
|
if len(zone) > 2:
|
|
|
|
zone = time.strptime(zone, '%H%M')
|
|
|
|
else:
|
|
|
|
zone = time.strptime(zone, '%H')
|
2017-07-24 02:14:25 +02:00
|
|
|
zone = timedelta(hours=zone.tm_hour, minutes=zone.tm_min)
|
2016-03-31 22:46:44 +02:00
|
|
|
tim += zone * sign
|
|
|
|
tim = tim.timetuple()
|
|
|
|
return tim
|
2009-02-08 14:47:28 +01:00
|
|
|
|
2017-07-24 02:14:25 +02:00
|
|
|
|
2017-08-13 13:18:56 +02:00
|
|
|
from gajim.common import app
|
2018-04-24 19:36:33 +02:00
|
|
|
if app.is_installed('PYCURL'):
|
2011-11-14 23:23:40 +01:00
|
|
|
import pycurl
|
2013-01-02 13:54:02 +01:00
|
|
|
from io import StringIO
|
2009-02-08 14:47:28 +01:00
|
|
|
|
|
|
|
def convert_bytes(string):
|
2010-02-08 15:08:40 +01:00
|
|
|
suffix = ''
|
|
|
|
# IEC standard says KiB = 1024 bytes KB = 1000 bytes
|
|
|
|
# but do we use the standard?
|
2017-08-13 13:18:56 +02:00
|
|
|
use_kib_mib = app.config.get('use_kib_mib')
|
2010-02-08 15:08:40 +01:00
|
|
|
align = 1024.
|
2013-01-01 21:06:16 +01:00
|
|
|
bytes_ = float(string)
|
|
|
|
if bytes_ >= align:
|
|
|
|
bytes_ = round(bytes_/align, 1)
|
|
|
|
if bytes_ >= align:
|
|
|
|
bytes_ = round(bytes_/align, 1)
|
|
|
|
if bytes_ >= align:
|
|
|
|
bytes_ = round(bytes_/align, 1)
|
2010-02-08 15:08:40 +01:00
|
|
|
if use_kib_mib:
|
|
|
|
#GiB means gibibyte
|
|
|
|
suffix = _('%s GiB')
|
|
|
|
else:
|
|
|
|
#GB means gigabyte
|
|
|
|
suffix = _('%s GB')
|
|
|
|
else:
|
|
|
|
if use_kib_mib:
|
|
|
|
#MiB means mibibyte
|
|
|
|
suffix = _('%s MiB')
|
|
|
|
else:
|
|
|
|
#MB means megabyte
|
|
|
|
suffix = _('%s MB')
|
|
|
|
else:
|
|
|
|
if use_kib_mib:
|
|
|
|
#KiB means kibibyte
|
|
|
|
suffix = _('%s KiB')
|
|
|
|
else:
|
|
|
|
#KB means kilo bytes
|
|
|
|
suffix = _('%s KB')
|
|
|
|
else:
|
|
|
|
#B means bytes
|
|
|
|
suffix = _('%s B')
|
2013-01-01 21:06:16 +01:00
|
|
|
return suffix % str(bytes_)
|
2009-02-08 14:47:28 +01:00
|
|
|
|
|
|
|
def get_contact_dict_for_account(account):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Create a dict of jid, nick -> contact with all contacts of account.
|
|
|
|
|
|
|
|
Can be used for completion lists
|
|
|
|
"""
|
|
|
|
contacts_dict = {}
|
2017-08-13 13:18:56 +02:00
|
|
|
for jid in app.contacts.get_jid_list(account):
|
|
|
|
contact = app.contacts.get_contact_with_highest_priority(account,
|
2010-02-08 15:08:40 +01:00
|
|
|
jid)
|
|
|
|
contacts_dict[jid] = contact
|
|
|
|
name = contact.name
|
|
|
|
if name in contacts_dict:
|
|
|
|
contact1 = contacts_dict[name]
|
|
|
|
del contacts_dict[name]
|
|
|
|
contacts_dict['%s (%s)' % (name, contact1.jid)] = contact1
|
|
|
|
contacts_dict['%s (%s)' % (name, jid)] = contact
|
2014-06-01 22:32:44 +02:00
|
|
|
elif contact.name:
|
2017-08-13 13:18:56 +02:00
|
|
|
if contact.name == app.get_nick_from_jid(jid):
|
2010-02-08 15:08:40 +01:00
|
|
|
del contacts_dict[jid]
|
|
|
|
contacts_dict[name] = contact
|
|
|
|
return contacts_dict
|
2009-02-08 14:47:28 +01:00
|
|
|
|
2005-08-10 00:55:52 +02:00
|
|
|
def launch_browser_mailer(kind, uri):
|
2011-04-19 14:36:22 +02:00
|
|
|
# kind = 'url' or 'mail'
|
2011-05-24 22:43:42 +02:00
|
|
|
if kind == 'url' and uri.startswith('file://'):
|
|
|
|
launch_file_manager(uri)
|
|
|
|
return
|
2011-04-19 14:36:22 +02:00
|
|
|
if kind in ('mail', 'sth_at_sth') and not uri.startswith('mailto:'):
|
|
|
|
uri = 'mailto:' + uri
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2011-04-19 14:36:22 +02:00
|
|
|
if kind == 'url' and uri.startswith('www.'):
|
|
|
|
uri = 'http://' + uri
|
|
|
|
|
2017-08-13 13:18:56 +02:00
|
|
|
if not app.config.get('autodetect_browser_mailer'):
|
2011-04-19 14:36:22 +02:00
|
|
|
if kind == 'url':
|
2017-08-13 13:18:56 +02:00
|
|
|
command = app.config.get('custombrowser')
|
2011-04-19 14:36:22 +02:00
|
|
|
elif kind in ('mail', 'sth_at_sth'):
|
2017-08-13 13:18:56 +02:00
|
|
|
command = app.config.get('custommailapp')
|
2011-04-19 14:36:22 +02:00
|
|
|
if command == '': # if no app is configured
|
|
|
|
return
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
command = build_command(command, uri)
|
|
|
|
try:
|
|
|
|
exec_command(command)
|
|
|
|
except Exception:
|
|
|
|
pass
|
2005-08-10 00:46:13 +02:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
else:
|
2011-04-19 14:36:22 +02:00
|
|
|
webbrowser.open(uri)
|
|
|
|
|
|
|
|
|
2005-08-10 00:55:52 +02:00
|
|
|
def launch_file_manager(path_to_open):
|
2010-02-08 15:08:40 +01:00
|
|
|
if os.name == 'nt':
|
|
|
|
try:
|
|
|
|
os.startfile(path_to_open) # if pywin32 is installed we open
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
else:
|
2017-08-13 13:18:56 +02:00
|
|
|
if not app.config.get('autodetect_browser_mailer'):
|
|
|
|
command = app.config.get('custom_file_manager')
|
2011-05-24 22:43:42 +02:00
|
|
|
if command == '': # if no app is configured
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
command = 'xdg-open'
|
2010-02-08 15:08:40 +01:00
|
|
|
command = build_command(command, path_to_open)
|
|
|
|
try:
|
|
|
|
exec_command(command)
|
|
|
|
except Exception:
|
|
|
|
pass
|
2005-08-10 00:55:52 +02:00
|
|
|
|
|
|
|
def play_sound(event):
|
2017-08-13 13:18:56 +02:00
|
|
|
if not app.config.get('sounds_on'):
|
2010-02-08 15:08:40 +01:00
|
|
|
return
|
2017-08-13 13:18:56 +02:00
|
|
|
path_to_soundfile = app.config.get_per('soundevents', event, 'path')
|
2010-02-08 15:08:40 +01:00
|
|
|
play_sound_file(path_to_soundfile)
|
2006-07-17 12:04:43 +02:00
|
|
|
|
2018-04-21 12:44:10 +02:00
|
|
|
def check_soundfile_path(file_, dirs=None):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Check if the sound file exists
|
|
|
|
|
2014-01-07 19:51:56 +01:00
|
|
|
:param file_: the file to check, absolute or relative to 'dirs' path
|
2010-02-08 15:08:40 +01:00
|
|
|
:param dirs: list of knows paths to fallback if the file doesn't exists
|
|
|
|
(eg: ~/.gajim/sounds/, DATADIR/sounds...).
|
|
|
|
:return the path to file or None if it doesn't exists.
|
|
|
|
"""
|
2018-04-21 12:44:10 +02:00
|
|
|
if dirs is None:
|
2018-04-25 20:49:37 +02:00
|
|
|
dirs = [configpaths.get('MY_DATA'),
|
|
|
|
configpaths.get('DATA')]
|
2018-04-21 12:44:10 +02:00
|
|
|
|
2014-01-07 19:51:56 +01:00
|
|
|
if not file_:
|
2010-02-08 15:08:40 +01:00
|
|
|
return None
|
2018-09-18 10:14:04 +02:00
|
|
|
if os.path.exists(file_):
|
2014-01-07 19:51:56 +01:00
|
|
|
return file_
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
for d in dirs:
|
2014-01-07 19:51:56 +01:00
|
|
|
d = os.path.join(d, 'sounds', file_)
|
2010-02-08 15:08:40 +01:00
|
|
|
if os.path.exists(d):
|
|
|
|
return d
|
|
|
|
return None
|
2009-05-10 21:32:57 +02:00
|
|
|
|
2018-09-18 13:54:25 +02:00
|
|
|
def strip_soundfile_path(file_, dirs=None, abs_=True):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Remove knowns paths from a sound file
|
|
|
|
|
|
|
|
Filechooser returns absolute path. If path is a known fallback path, we remove it.
|
|
|
|
So config have no hardcoded path to DATA_DIR and text in textfield is shorther.
|
2014-01-07 19:51:56 +01:00
|
|
|
param: file_: the filename to strip.
|
2010-02-08 15:08:40 +01:00
|
|
|
param: dirs: list of knowns paths from which the filename should be stripped.
|
2018-09-18 13:54:25 +02:00
|
|
|
param: abs_: force absolute path on dirs
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2014-01-07 19:51:56 +01:00
|
|
|
if not file_:
|
2010-02-08 15:08:40 +01:00
|
|
|
return None
|
|
|
|
|
2018-04-21 12:44:10 +02:00
|
|
|
if dirs is None:
|
2018-04-25 20:49:37 +02:00
|
|
|
dirs = [configpaths.get('MY_DATA'),
|
|
|
|
configpaths.get('DATA')]
|
2018-04-21 12:44:10 +02:00
|
|
|
|
2014-01-07 19:51:56 +01:00
|
|
|
name = os.path.basename(file_)
|
2010-02-08 15:08:40 +01:00
|
|
|
for d in dirs:
|
|
|
|
d = os.path.join(d, 'sounds', name)
|
2018-09-18 13:54:25 +02:00
|
|
|
if abs_:
|
2010-02-08 15:08:40 +01:00
|
|
|
d = os.path.abspath(d)
|
2014-01-07 19:51:56 +01:00
|
|
|
if file_ == d:
|
2010-02-08 15:08:40 +01:00
|
|
|
return name
|
2014-01-07 19:51:56 +01:00
|
|
|
return file_
|
2009-05-10 21:32:57 +02:00
|
|
|
|
2006-07-17 12:04:43 +02:00
|
|
|
def play_sound_file(path_to_soundfile):
|
2010-02-08 15:08:40 +01:00
|
|
|
if path_to_soundfile == 'beep':
|
|
|
|
exec_command('beep')
|
|
|
|
return
|
|
|
|
path_to_soundfile = check_soundfile_path(path_to_soundfile)
|
|
|
|
if path_to_soundfile is None:
|
|
|
|
return
|
2018-09-18 10:14:04 +02:00
|
|
|
if sys.platform == 'win32' and HAS_SOUND:
|
2010-02-08 15:08:40 +01:00
|
|
|
try:
|
|
|
|
winsound.PlaySound(path_to_soundfile,
|
|
|
|
winsound.SND_FILENAME|winsound.SND_ASYNC)
|
|
|
|
except Exception:
|
2017-01-01 23:08:30 +01:00
|
|
|
log.exception('Sound Playback Error')
|
2017-02-12 19:04:49 +01:00
|
|
|
elif sys.platform == 'linux':
|
2017-08-13 13:18:56 +02:00
|
|
|
if app.config.get('soundplayer') == '':
|
2011-04-12 16:36:49 +02:00
|
|
|
def _oss_play():
|
|
|
|
sndfile = wave.open(path_to_soundfile, 'rb')
|
2018-09-17 21:11:45 +02:00
|
|
|
nc, sw, fr, nf, _comptype, _compname = sndfile.getparams()
|
2011-04-12 16:36:49 +02:00
|
|
|
dev = oss.open('/dev/dsp', 'w')
|
|
|
|
dev.setparameters(sw * 8, nc, fr)
|
|
|
|
dev.write(sndfile.readframes(nf))
|
|
|
|
sndfile.close()
|
|
|
|
dev.close()
|
2017-08-13 13:18:56 +02:00
|
|
|
app.thread_interface(_oss_play)
|
2010-02-08 15:08:40 +01:00
|
|
|
return
|
2017-08-13 13:18:56 +02:00
|
|
|
player = app.config.get('soundplayer')
|
2010-02-08 15:08:40 +01:00
|
|
|
command = build_command(player, path_to_soundfile)
|
|
|
|
exec_command(command)
|
2017-07-19 13:16:27 +02:00
|
|
|
elif sys.platform == 'darwin' and HAS_SOUND:
|
|
|
|
sound = NSSound.alloc()
|
|
|
|
sound.initWithContentsOfFile_byReference_(path_to_soundfile, True)
|
|
|
|
sound.play()
|
2005-11-09 08:00:46 +01:00
|
|
|
|
|
|
|
def get_global_show():
|
2010-02-08 15:08:40 +01:00
|
|
|
maxi = 0
|
2017-08-13 13:18:56 +02:00
|
|
|
for account in app.connections:
|
|
|
|
if not app.config.get_per('accounts', account,
|
2010-02-08 15:08:40 +01:00
|
|
|
'sync_with_global_status'):
|
|
|
|
continue
|
2017-08-13 13:18:56 +02:00
|
|
|
connected = app.connections[account].connected
|
2010-02-08 15:08:40 +01:00
|
|
|
if connected > maxi:
|
|
|
|
maxi = connected
|
2017-08-13 13:18:56 +02:00
|
|
|
return app.SHOW_LIST[maxi]
|
2006-02-27 16:29:13 +01:00
|
|
|
|
2005-12-30 22:37:36 +01:00
|
|
|
def get_global_status():
|
2010-02-08 15:08:40 +01:00
|
|
|
maxi = 0
|
2017-08-13 13:18:56 +02:00
|
|
|
for account in app.connections:
|
|
|
|
if not app.config.get_per('accounts', account,
|
2010-02-08 15:08:40 +01:00
|
|
|
'sync_with_global_status'):
|
|
|
|
continue
|
2017-08-13 13:18:56 +02:00
|
|
|
connected = app.connections[account].connected
|
2010-02-08 15:08:40 +01:00
|
|
|
if connected > maxi:
|
|
|
|
maxi = connected
|
2017-08-13 13:18:56 +02:00
|
|
|
status = app.connections[account].status
|
2010-02-08 15:08:40 +01:00
|
|
|
return status
|
2005-11-12 21:07:46 +01:00
|
|
|
|
2009-04-12 20:13:37 +02:00
|
|
|
|
2008-12-03 22:56:12 +01:00
|
|
|
def statuses_unified():
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Test if all statuses are the same
|
|
|
|
"""
|
|
|
|
reference = None
|
2017-08-13 13:18:56 +02:00
|
|
|
for account in app.connections:
|
|
|
|
if not app.config.get_per('accounts', account,
|
2010-02-08 15:08:40 +01:00
|
|
|
'sync_with_global_status'):
|
|
|
|
continue
|
|
|
|
if reference is None:
|
2017-08-13 13:18:56 +02:00
|
|
|
reference = app.connections[account].connected
|
|
|
|
elif reference != app.connections[account].connected:
|
2010-02-08 15:08:40 +01:00
|
|
|
return False
|
|
|
|
return True
|
2007-10-14 21:08:29 +02:00
|
|
|
|
2018-09-18 10:14:04 +02:00
|
|
|
def get_icon_name_to_show(contact, account=None):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Get the icon name to show in online, away, requested, etc
|
|
|
|
"""
|
2017-08-13 13:18:56 +02:00
|
|
|
if account and app.events.get_nb_roster_events(account, contact.jid):
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'event'
|
2018-09-18 10:14:04 +02:00
|
|
|
if account and app.events.get_nb_roster_events(
|
|
|
|
account, contact.get_full_jid()):
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'event'
|
2017-08-13 13:18:56 +02:00
|
|
|
if account and account in app.interface.minimized_controls and \
|
|
|
|
contact.jid in app.interface.minimized_controls[account] and app.interface.\
|
2010-02-08 15:08:40 +01:00
|
|
|
minimized_controls[account][contact.jid].get_nb_unread_pm() > 0:
|
|
|
|
return 'event'
|
2017-08-13 13:18:56 +02:00
|
|
|
if account and contact.jid in app.gc_connected[account]:
|
|
|
|
if app.gc_connected[account][contact.jid]:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'muc_active'
|
2018-09-18 10:14:04 +02:00
|
|
|
return 'muc_inactive'
|
2010-02-08 15:08:40 +01:00
|
|
|
if contact.jid.find('@') <= 0: # if not '@' or '@' starts the jid ==> agent
|
|
|
|
return contact.show
|
|
|
|
if contact.sub in ('both', 'to'):
|
|
|
|
return contact.show
|
|
|
|
if contact.ask == 'subscribe':
|
|
|
|
return 'requested'
|
2017-08-13 13:18:56 +02:00
|
|
|
transport = app.get_transport_name_from_jid(contact.jid)
|
2010-02-08 15:08:40 +01:00
|
|
|
if transport:
|
|
|
|
return contact.show
|
2017-08-13 13:18:56 +02:00
|
|
|
if contact.show in app.SHOW_LIST:
|
2010-02-08 15:08:40 +01:00
|
|
|
return contact.show
|
|
|
|
return 'not in roster'
|
2005-11-18 13:17:50 +01:00
|
|
|
|
2006-03-19 15:54:00 +01:00
|
|
|
def get_full_jid_from_iq(iq_obj):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2013-01-01 21:06:16 +01:00
|
|
|
Return the full jid (with resource) from an iq
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2018-07-09 00:21:24 +02:00
|
|
|
jid = iq_obj.getFrom()
|
|
|
|
if jid is None:
|
|
|
|
return None
|
2013-01-02 17:53:42 +01:00
|
|
|
return parse_jid(str(iq_obj.getFrom()))
|
2006-03-19 15:54:00 +01:00
|
|
|
|
|
|
|
def get_jid_from_iq(iq_obj):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2013-01-01 21:06:16 +01:00
|
|
|
Return the jid (without resource) from an iq
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
jid = get_full_jid_from_iq(iq_obj)
|
2017-08-13 13:18:56 +02:00
|
|
|
return app.get_jid_without_resource(jid)
|
2006-03-19 15:54:00 +01:00
|
|
|
|
|
|
|
def get_auth_sha(sid, initiator, target):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Return sha of sid + initiator + target used for proxy auth
|
|
|
|
"""
|
2013-01-05 00:03:36 +01:00
|
|
|
return hashlib.sha1(("%s%s%s" % (sid, initiator, target)).encode('utf-8')).\
|
|
|
|
hexdigest()
|
2006-03-19 15:54:00 +01:00
|
|
|
|
2009-02-08 14:47:28 +01:00
|
|
|
def remove_invalid_xml_chars(string):
|
2010-02-08 15:08:40 +01:00
|
|
|
if string:
|
2017-08-13 13:18:56 +02:00
|
|
|
string = re.sub(app.interface.invalid_XML_chars_re, '', string)
|
2010-02-08 15:08:40 +01:00
|
|
|
return string
|
2006-03-19 15:54:00 +01:00
|
|
|
|
2006-03-20 22:24:10 +01:00
|
|
|
def get_random_string_16():
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Create random string of length 16
|
|
|
|
"""
|
2013-01-03 01:08:07 +01:00
|
|
|
rng = list(range(65, 90))
|
2010-02-08 15:08:40 +01:00
|
|
|
rng.extend(range(48, 57))
|
|
|
|
char_sequence = [chr(e) for e in rng]
|
|
|
|
from random import sample
|
|
|
|
return ''.join(sample(char_sequence, 16))
|
2008-12-03 22:56:12 +01:00
|
|
|
|
2006-03-19 15:54:00 +01:00
|
|
|
def get_os_info():
|
2017-08-13 13:18:56 +02:00
|
|
|
if app.os_info:
|
|
|
|
return app.os_info
|
2017-08-25 23:27:44 +02:00
|
|
|
app.os_info = 'N/A'
|
|
|
|
if os.name == 'nt' or sys.platform == 'darwin':
|
|
|
|
import platform
|
|
|
|
app.os_info = platform.system() + " " + platform.release()
|
2010-02-08 15:08:40 +01:00
|
|
|
elif os.name == 'posix':
|
2017-08-25 23:27:44 +02:00
|
|
|
try:
|
|
|
|
import distro
|
|
|
|
app.os_info = distro.name(pretty=True)
|
|
|
|
except ImportError:
|
|
|
|
import platform
|
|
|
|
app.os_info = platform.system()
|
|
|
|
return app.os_info
|
2006-03-28 00:31:14 +02:00
|
|
|
|
2011-06-13 23:06:43 +02:00
|
|
|
def allow_showing_notification(account, type_='notify_on_new_message',
|
|
|
|
is_first_message=True):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Is it allowed to show nofication?
|
|
|
|
|
|
|
|
Check OUR status and if we allow notifications for that status type is the
|
|
|
|
option that need to be True e.g.: notify_on_signing is_first_message: set it
|
|
|
|
to false when it's not the first message
|
|
|
|
"""
|
2017-08-13 13:18:56 +02:00
|
|
|
if type_ and (not app.config.get(type_) or not is_first_message):
|
2010-02-08 15:08:40 +01:00
|
|
|
return False
|
2017-08-13 13:18:56 +02:00
|
|
|
if app.config.get('autopopupaway'): # always show notification
|
2010-02-08 15:08:40 +01:00
|
|
|
return True
|
2017-08-13 13:18:56 +02:00
|
|
|
if app.connections[account].connected in (2, 3): # we're online or chat
|
2010-02-08 15:08:40 +01:00
|
|
|
return True
|
|
|
|
return False
|
2006-03-28 00:31:14 +02:00
|
|
|
|
2011-06-13 23:06:43 +02:00
|
|
|
def allow_popup_window(account):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Is it allowed to popup windows?
|
|
|
|
"""
|
2017-08-13 13:18:56 +02:00
|
|
|
autopopup = app.config.get('autopopup')
|
|
|
|
autopopupaway = app.config.get('autopopupaway')
|
2010-02-08 15:08:40 +01:00
|
|
|
if autopopup and (autopopupaway or \
|
2017-08-13 13:18:56 +02:00
|
|
|
app.connections[account].connected in (2, 3)): # we're online or chat
|
2010-02-08 15:08:40 +01:00
|
|
|
return True
|
|
|
|
return False
|
2006-08-27 00:51:33 +02:00
|
|
|
|
2011-06-13 23:06:43 +02:00
|
|
|
def allow_sound_notification(account, sound_event):
|
2017-08-13 13:18:56 +02:00
|
|
|
if app.config.get('sounddnd') or app.connections[account].connected != \
|
|
|
|
app.SHOW_LIST.index('dnd') and app.config.get_per('soundevents',
|
2010-02-08 15:08:40 +01:00
|
|
|
sound_event, 'enabled'):
|
|
|
|
return True
|
|
|
|
return False
|
2006-09-02 23:01:11 +02:00
|
|
|
|
|
|
|
def get_chat_control(account, contact):
|
2010-02-08 15:08:40 +01:00
|
|
|
full_jid_with_resource = contact.jid
|
|
|
|
if contact.resource:
|
|
|
|
full_jid_with_resource += '/' + contact.resource
|
2017-08-13 13:18:56 +02:00
|
|
|
highest_contact = app.contacts.get_contact_with_highest_priority(
|
2018-09-18 10:14:04 +02:00
|
|
|
account, contact.jid)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
# Look for a chat control that has the given resource, or default to
|
|
|
|
# one without resource
|
2017-08-13 13:18:56 +02:00
|
|
|
ctrl = app.interface.msg_win_mgr.get_control(full_jid_with_resource,
|
2010-02-08 15:08:40 +01:00
|
|
|
account)
|
|
|
|
|
|
|
|
if ctrl:
|
|
|
|
return ctrl
|
2018-09-18 10:14:04 +02:00
|
|
|
|
|
|
|
if (highest_contact and
|
|
|
|
highest_contact.resource and
|
|
|
|
contact.resource != highest_contact.resource):
|
2010-02-08 15:08:40 +01:00
|
|
|
return None
|
2018-09-18 10:14:04 +02:00
|
|
|
|
|
|
|
# unknown contact or offline message
|
|
|
|
return app.interface.msg_win_mgr.get_control(contact.jid, account)
|
2006-10-05 02:06:57 +02:00
|
|
|
|
2007-07-03 11:31:43 +02:00
|
|
|
def get_notification_icon_tooltip_dict():
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Return a dict of the form {acct: {'show': show, 'message': message,
|
|
|
|
'event_lines': [list of text lines to show in tooltip]}
|
|
|
|
"""
|
|
|
|
# How many events must there be before they're shown summarized, not per-user
|
|
|
|
max_ungrouped_events = 10
|
|
|
|
|
|
|
|
accounts = get_accounts_info()
|
|
|
|
|
|
|
|
# Gather events. (With accounts, when there are more.)
|
|
|
|
for account in accounts:
|
|
|
|
account_name = account['name']
|
|
|
|
account['event_lines'] = []
|
|
|
|
# Gather events per-account
|
2018-09-18 12:06:01 +02:00
|
|
|
pending_events = app.events.get_events(account=account_name)
|
2010-02-08 15:08:40 +01:00
|
|
|
messages, non_messages, total_messages, total_non_messages = {}, {}, 0, 0
|
|
|
|
for jid in pending_events:
|
|
|
|
for event in pending_events[jid]:
|
|
|
|
if event.type_.count('file') > 0:
|
|
|
|
# This is a non-messagee event.
|
|
|
|
messages[jid] = non_messages.get(jid, 0) + 1
|
|
|
|
total_non_messages = total_non_messages + 1
|
|
|
|
else:
|
|
|
|
# This is a message.
|
|
|
|
messages[jid] = messages.get(jid, 0) + 1
|
|
|
|
total_messages = total_messages + 1
|
|
|
|
# Display unread messages numbers, if any
|
|
|
|
if total_messages > 0:
|
|
|
|
if total_messages > max_ungrouped_events:
|
|
|
|
text = ngettext(
|
|
|
|
'%d message pending',
|
|
|
|
'%d messages pending',
|
|
|
|
total_messages, total_messages, total_messages)
|
|
|
|
account['event_lines'].append(text)
|
|
|
|
else:
|
2018-09-17 22:34:15 +02:00
|
|
|
for jid in messages:
|
2010-02-08 15:08:40 +01:00
|
|
|
text = ngettext(
|
|
|
|
'%d message pending',
|
|
|
|
'%d messages pending',
|
|
|
|
messages[jid], messages[jid], messages[jid])
|
2017-08-13 13:18:56 +02:00
|
|
|
contact = app.contacts.get_first_contact_from_jid(
|
2010-02-08 15:08:40 +01:00
|
|
|
account['name'], jid)
|
2017-01-04 13:14:10 +01:00
|
|
|
text += ' '
|
2017-08-13 13:18:56 +02:00
|
|
|
if jid in app.gc_connected[account['name']]:
|
2017-01-04 13:14:10 +01:00
|
|
|
text += _('from room %s') % (jid)
|
2010-02-08 15:08:40 +01:00
|
|
|
elif contact:
|
|
|
|
name = contact.get_shown_name()
|
2017-01-04 13:14:10 +01:00
|
|
|
text += _('from user %s') % (name)
|
2010-02-08 15:08:40 +01:00
|
|
|
else:
|
2017-01-04 13:14:10 +01:00
|
|
|
text += _('from %s') % (jid)
|
2010-02-08 15:08:40 +01:00
|
|
|
account['event_lines'].append(text)
|
|
|
|
|
|
|
|
# Display unseen events numbers, if any
|
|
|
|
if total_non_messages > 0:
|
|
|
|
if total_non_messages > max_ungrouped_events:
|
|
|
|
text = ngettext(
|
2011-01-03 21:08:13 +01:00
|
|
|
'%d event pending',
|
|
|
|
'%d events pending',
|
2018-09-18 12:06:01 +02:00
|
|
|
total_non_messages, total_non_messages, total_non_messages)
|
2010-02-08 15:08:40 +01:00
|
|
|
account['event_lines'].append(text)
|
|
|
|
else:
|
2018-09-17 22:34:15 +02:00
|
|
|
for jid in non_messages:
|
2011-01-03 21:08:13 +01:00
|
|
|
text = ngettext('%d event pending', '%d events pending',
|
|
|
|
non_messages[jid], non_messages[jid], non_messages[jid])
|
2017-01-04 13:14:10 +01:00
|
|
|
text += ' ' + _('from user %s') % (jid)
|
2010-02-08 15:08:40 +01:00
|
|
|
account[account]['event_lines'].append(text)
|
|
|
|
|
|
|
|
return accounts
|
2007-07-03 11:31:43 +02:00
|
|
|
|
2006-10-05 02:06:57 +02:00
|
|
|
def get_notification_icon_tooltip_text():
|
2010-02-08 15:08:40 +01:00
|
|
|
text = None
|
|
|
|
# How many events must there be before they're shown summarized, not per-user
|
|
|
|
# max_ungrouped_events = 10
|
|
|
|
# Character which should be used to indent in the tooltip.
|
|
|
|
indent_with = ' '
|
|
|
|
|
|
|
|
accounts = get_notification_icon_tooltip_dict()
|
|
|
|
|
2018-09-16 01:10:04 +02:00
|
|
|
if not accounts:
|
2010-02-08 15:08:40 +01:00
|
|
|
# No configured account
|
|
|
|
return _('Gajim')
|
|
|
|
|
|
|
|
# at least one account present
|
|
|
|
|
|
|
|
# Is there more that one account?
|
|
|
|
if len(accounts) == 1:
|
|
|
|
show_more_accounts = False
|
|
|
|
else:
|
|
|
|
show_more_accounts = True
|
|
|
|
|
|
|
|
# If there is only one account, its status is shown on the first line.
|
|
|
|
if show_more_accounts:
|
|
|
|
text = _('Gajim')
|
|
|
|
else:
|
|
|
|
text = _('Gajim - %s') % (get_account_status(accounts[0]))
|
|
|
|
|
|
|
|
# Gather and display events. (With accounts, when there are more.)
|
|
|
|
for account in accounts:
|
|
|
|
account_name = account['name']
|
|
|
|
# Set account status, if not set above
|
2018-09-16 14:42:05 +02:00
|
|
|
if show_more_accounts:
|
2010-02-08 15:08:40 +01:00
|
|
|
message = '\n' + indent_with + ' %s - %s'
|
|
|
|
text += message % (account_name, get_account_status(account))
|
|
|
|
# Account list shown, messages need to be indented more
|
|
|
|
indent_how = 2
|
|
|
|
else:
|
|
|
|
# If no account list is shown, messages could have default indenting.
|
|
|
|
indent_how = 1
|
|
|
|
for line in account['event_lines']:
|
|
|
|
text += '\n' + indent_with * indent_how + ' '
|
|
|
|
text += line
|
|
|
|
return text
|
2006-10-05 02:06:57 +02:00
|
|
|
|
|
|
|
def get_accounts_info():
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Helper for notification icon tooltip
|
|
|
|
"""
|
|
|
|
accounts = []
|
2017-08-13 13:18:56 +02:00
|
|
|
accounts_list = sorted(app.contacts.get_accounts())
|
2010-02-08 15:08:40 +01:00
|
|
|
for account in accounts_list:
|
2017-08-13 13:18:56 +02:00
|
|
|
status_idx = app.connections[account].connected
|
2010-02-08 15:08:40 +01:00
|
|
|
# uncomment the following to hide offline accounts
|
|
|
|
# if status_idx == 0: continue
|
2017-08-13 13:18:56 +02:00
|
|
|
status = app.SHOW_LIST[status_idx]
|
|
|
|
message = app.connections[account].status
|
2010-02-08 15:08:40 +01:00
|
|
|
single_line = get_uf_show(status)
|
|
|
|
if message is None:
|
|
|
|
message = ''
|
|
|
|
else:
|
|
|
|
message = message.strip()
|
|
|
|
if message != '':
|
|
|
|
single_line += ': ' + message
|
2018-08-30 19:27:40 +02:00
|
|
|
account_label = app.get_account_label(account)
|
2018-04-29 23:47:30 +02:00
|
|
|
accounts.append({'name': account,
|
2018-08-30 19:27:40 +02:00
|
|
|
'account_label': account_label,
|
2018-04-29 23:47:30 +02:00
|
|
|
'status_line': single_line,
|
|
|
|
'show': status,
|
|
|
|
'message': message})
|
2010-02-08 15:08:40 +01:00
|
|
|
return accounts
|
2007-04-22 20:21:30 +02:00
|
|
|
|
2018-04-14 20:18:23 +02:00
|
|
|
def get_current_show(account):
|
|
|
|
if account not in app.connections:
|
|
|
|
return 'offline'
|
|
|
|
status = app.connections[account].connected
|
|
|
|
return app.SHOW_LIST[status]
|
2007-08-06 00:57:04 +02:00
|
|
|
|
|
|
|
def get_iconset_path(iconset):
|
2018-04-25 20:49:37 +02:00
|
|
|
if os.path.isdir(os.path.join(configpaths.get('DATA'), 'iconsets', iconset)):
|
|
|
|
return os.path.join(configpaths.get('DATA'), 'iconsets', iconset)
|
2018-09-18 10:14:04 +02:00
|
|
|
if os.path.isdir(os.path.join(configpaths.get('MY_ICONSETS'), iconset)):
|
2018-04-25 20:49:37 +02:00
|
|
|
return os.path.join(configpaths.get('MY_ICONSETS'), iconset)
|
2007-08-23 00:46:31 +02:00
|
|
|
|
2008-07-22 23:08:52 +02:00
|
|
|
def get_mood_iconset_path(iconset):
|
2018-04-25 20:49:37 +02:00
|
|
|
if os.path.isdir(os.path.join(configpaths.get('DATA'), 'moods', iconset)):
|
|
|
|
return os.path.join(configpaths.get('DATA'), 'moods', iconset)
|
2018-09-18 10:14:04 +02:00
|
|
|
if os.path.isdir(
|
2018-04-25 20:49:37 +02:00
|
|
|
os.path.join(configpaths.get('MY_MOOD_ICONSETS'), iconset)):
|
|
|
|
return os.path.join(configpaths.get('MY_MOOD_ICONSETS'), iconset)
|
2008-07-22 23:08:52 +02:00
|
|
|
|
2008-07-28 22:57:56 +02:00
|
|
|
def get_activity_iconset_path(iconset):
|
2018-04-25 20:49:37 +02:00
|
|
|
if os.path.isdir(os.path.join(configpaths.get('DATA'), 'activities', iconset)):
|
|
|
|
return os.path.join(configpaths.get('DATA'), 'activities', iconset)
|
2018-09-18 10:14:04 +02:00
|
|
|
if os.path.isdir(os.path.join(configpaths.get('MY_ACTIVITY_ICONSETS'),
|
2010-02-08 15:08:40 +01:00
|
|
|
iconset)):
|
2018-04-25 20:49:37 +02:00
|
|
|
return os.path.join(configpaths.get('MY_ACTIVITY_ICONSETS'), iconset)
|
2008-07-28 22:57:56 +02:00
|
|
|
|
2007-08-23 00:46:31 +02:00
|
|
|
def get_transport_path(transport):
|
2018-04-25 20:49:37 +02:00
|
|
|
if os.path.isdir(os.path.join(configpaths.get('DATA'), 'iconsets', 'transports',
|
2010-02-08 15:08:40 +01:00
|
|
|
transport)):
|
2018-04-25 20:49:37 +02:00
|
|
|
return os.path.join(configpaths.get('DATA'), 'iconsets', 'transports', transport)
|
2018-09-18 10:14:04 +02:00
|
|
|
if os.path.isdir(os.path.join(configpaths.get('MY_ICONSETS'), 'transports',
|
2010-02-08 15:08:40 +01:00
|
|
|
transport)):
|
2018-04-25 20:49:37 +02:00
|
|
|
return os.path.join(configpaths.get('MY_ICONSETS'), 'transports', transport)
|
2010-02-08 15:08:40 +01:00
|
|
|
# No transport folder found, use default jabber one
|
2017-08-13 13:18:56 +02:00
|
|
|
return get_iconset_path(app.config.get('iconset'))
|
2007-12-30 00:28:27 +01:00
|
|
|
|
|
|
|
def prepare_and_validate_gpg_keyID(account, jid, keyID):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Return an eight char long keyID that can be used with for GPG encryption
|
|
|
|
with this contact
|
|
|
|
|
|
|
|
If the given keyID is None, return UNKNOWN; if the key does not match the
|
|
|
|
assigned key XXXXXXXXMISMATCH is returned. If the key is trusted and not yet
|
|
|
|
assigned, assign it.
|
|
|
|
"""
|
2017-08-13 13:18:56 +02:00
|
|
|
if app.connections[account].USE_GPG:
|
2010-02-08 15:08:40 +01:00
|
|
|
if keyID and len(keyID) == 16:
|
|
|
|
keyID = keyID[8:]
|
|
|
|
|
2017-08-13 13:18:56 +02:00
|
|
|
attached_keys = app.config.get_per('accounts', account,
|
2010-02-08 15:08:40 +01:00
|
|
|
'attached_gpg_keys').split()
|
|
|
|
|
|
|
|
if jid in attached_keys and keyID:
|
|
|
|
attachedkeyID = attached_keys[attached_keys.index(jid) + 1]
|
|
|
|
if attachedkeyID != keyID:
|
2011-03-26 22:35:13 +01:00
|
|
|
# Get signing subkeys for the attached key
|
|
|
|
subkeys = []
|
2017-08-13 13:18:56 +02:00
|
|
|
for key in app.connections[account].gpg.list_keys():
|
2011-03-26 22:35:13 +01:00
|
|
|
if key['keyid'][8:] == attachedkeyID:
|
|
|
|
subkeys = [subkey[0][8:] for subkey in key['subkeys'] \
|
|
|
|
if subkey[1] == 's']
|
|
|
|
break
|
|
|
|
|
|
|
|
if keyID not in subkeys:
|
|
|
|
# Mismatch! Another gpg key was expected
|
|
|
|
keyID += 'MISMATCH'
|
2010-02-08 15:08:40 +01:00
|
|
|
elif jid in attached_keys:
|
|
|
|
# An unsigned presence, just use the assigned key
|
|
|
|
keyID = attached_keys[attached_keys.index(jid) + 1]
|
|
|
|
elif keyID:
|
2017-08-13 13:18:56 +02:00
|
|
|
full_key = app.connections[account].ask_gpg_keys(keyID=keyID)
|
2010-02-08 15:08:40 +01:00
|
|
|
# Assign the corresponding key, if we have it in our keyring
|
2016-12-17 14:06:36 +01:00
|
|
|
if full_key:
|
2017-08-13 13:18:56 +02:00
|
|
|
for u in app.contacts.get_contacts(account, jid):
|
2010-02-08 15:08:40 +01:00
|
|
|
u.keyID = keyID
|
2017-08-13 13:18:56 +02:00
|
|
|
keys_str = app.config.get_per('accounts', account,
|
2011-01-03 21:08:13 +01:00
|
|
|
'attached_gpg_keys')
|
2010-02-08 15:08:40 +01:00
|
|
|
keys_str += jid + ' ' + keyID + ' '
|
2017-08-13 13:18:56 +02:00
|
|
|
app.config.set_per('accounts', account, 'attached_gpg_keys',
|
2011-01-03 21:08:13 +01:00
|
|
|
keys_str)
|
2010-02-08 15:08:40 +01:00
|
|
|
elif keyID is None:
|
|
|
|
keyID = 'UNKNOWN'
|
|
|
|
return keyID
|
2007-12-30 00:28:27 +01:00
|
|
|
|
2018-09-18 12:06:01 +02:00
|
|
|
def update_optional_features(account=None):
|
2010-02-08 15:08:40 +01:00
|
|
|
if account:
|
|
|
|
accounts = [account]
|
|
|
|
else:
|
2017-08-13 13:18:56 +02:00
|
|
|
accounts = [a for a in app.connections]
|
2010-02-08 15:08:40 +01:00
|
|
|
for a in accounts:
|
2017-08-13 13:18:56 +02:00
|
|
|
app.gajim_optional_features[a] = []
|
|
|
|
if app.config.get_per('accounts', a, 'subscribe_mood'):
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_MOOD + '+notify')
|
|
|
|
if app.config.get_per('accounts', a, 'subscribe_activity'):
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_ACTIVITY + \
|
2012-12-09 21:37:51 +01:00
|
|
|
'+notify')
|
2017-08-13 13:18:56 +02:00
|
|
|
if app.config.get_per('accounts', a, 'publish_tune'):
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_TUNE)
|
|
|
|
if app.config.get_per('accounts', a, 'publish_location'):
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_LOCATION)
|
|
|
|
if app.config.get_per('accounts', a, 'subscribe_tune'):
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_TUNE + '+notify')
|
|
|
|
if app.config.get_per('accounts', a, 'subscribe_nick'):
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_NICK + '+notify')
|
|
|
|
if app.config.get_per('accounts', a, 'subscribe_location'):
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_LOCATION + \
|
2012-12-09 21:37:51 +01:00
|
|
|
'+notify')
|
2017-08-13 13:18:56 +02:00
|
|
|
if app.config.get('outgoing_chat_state_notifactions') != 'disabled':
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_CHATSTATES)
|
|
|
|
if not app.config.get('ignore_incoming_xhtml'):
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_XHTML_IM)
|
|
|
|
if app.config.get_per('accounts', a, 'answer_receipts'):
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_RECEIPTS)
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_JINGLE)
|
2018-04-24 19:36:33 +02:00
|
|
|
if app.is_installed('FARSTREAM'):
|
2017-08-13 13:18:56 +02:00
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_JINGLE_RTP)
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_JINGLE_RTP_AUDIO)
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_JINGLE_RTP_VIDEO)
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_JINGLE_ICE_UDP)
|
|
|
|
app.gajim_optional_features[a].append(
|
2017-06-14 21:31:29 +02:00
|
|
|
nbxmpp.NS_JINGLE_FILE_TRANSFER_5)
|
2017-08-13 13:18:56 +02:00
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_JINGLE_XTLS)
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_JINGLE_BYTESTREAM)
|
|
|
|
app.gajim_optional_features[a].append(nbxmpp.NS_JINGLE_IBB)
|
2017-11-15 22:28:11 +01:00
|
|
|
|
|
|
|
# Give plugins the possibility to add their features
|
|
|
|
app.plugin_manager.extension_point('update_caps', a)
|
|
|
|
|
2017-08-13 13:18:56 +02:00
|
|
|
app.caps_hash[a] = caps_cache.compute_caps_hash([app.gajim_identity],
|
|
|
|
app.gajim_common_features + app.gajim_optional_features[a])
|
2010-02-08 15:08:40 +01:00
|
|
|
# re-send presence with new hash
|
2017-08-13 13:18:56 +02:00
|
|
|
connected = app.connections[a].connected
|
|
|
|
if connected > 1 and app.SHOW_LIST[connected] != 'invisible':
|
|
|
|
app.connections[a].change_status(app.SHOW_LIST[connected],
|
|
|
|
app.connections[a].status)
|
2008-07-29 21:49:31 +02:00
|
|
|
|
2009-02-20 11:15:16 +01:00
|
|
|
def jid_is_blocked(account, jid):
|
2018-07-08 10:29:24 +02:00
|
|
|
con = app.connections[account]
|
|
|
|
return (jid in con.get_module('Blocking').blocked or
|
|
|
|
jid in con.get_module('PrivacyLists').blocked_contacts or
|
|
|
|
con.get_module('PrivacyLists').blocked_all)
|
2009-02-20 11:15:16 +01:00
|
|
|
|
|
|
|
def group_is_blocked(account, group):
|
2018-07-08 10:29:24 +02:00
|
|
|
con = app.connections[account]
|
|
|
|
return (group in con.get_module('PrivacyLists').blocked_groups or
|
|
|
|
con.get_module('PrivacyLists').blocked_all)
|
2009-02-20 11:15:16 +01:00
|
|
|
|
2010-01-27 17:23:37 +01:00
|
|
|
def get_subscription_request_msg(account=None):
|
2017-08-13 13:18:56 +02:00
|
|
|
s = app.config.get_per('accounts', account, 'subscription_request_msg')
|
2010-02-08 15:08:40 +01:00
|
|
|
if s:
|
|
|
|
return s
|
|
|
|
s = _('I would like to add you to my contact list.')
|
|
|
|
if account:
|
|
|
|
s = _('Hello, I am $name.') + ' ' + s
|
2018-06-30 19:23:10 +02:00
|
|
|
name = app.connections[account].get_module('VCardTemp').get_vard_name()
|
2017-08-13 13:18:56 +02:00
|
|
|
nick = app.nicks[account]
|
2010-02-08 15:08:40 +01:00
|
|
|
if name and nick:
|
|
|
|
name += ' (%s)' % nick
|
|
|
|
elif nick:
|
|
|
|
name = nick
|
|
|
|
s = Template(s).safe_substitute({'name': name})
|
2010-02-11 18:50:56 +01:00
|
|
|
return s
|
2010-10-26 00:10:25 +02:00
|
|
|
|
|
|
|
def replace_dataform_media(form, stanza):
|
2011-01-09 22:11:08 +01:00
|
|
|
found = False
|
2010-10-26 00:10:25 +02:00
|
|
|
for field in form.getTags('field'):
|
|
|
|
for media in field.getTags('media'):
|
|
|
|
for uri in media.getTags('uri'):
|
|
|
|
uri_data = uri.getData()
|
|
|
|
if uri_data.startswith('cid:'):
|
|
|
|
uri_data = uri_data[4:]
|
2012-12-09 21:37:51 +01:00
|
|
|
for data in stanza.getTags('data', namespace=nbxmpp.NS_BOB):
|
2010-10-26 00:10:25 +02:00
|
|
|
if data.getAttr('cid') == uri_data:
|
|
|
|
uri.setData(data.getData())
|
|
|
|
found = True
|
2010-11-16 15:12:47 +01:00
|
|
|
return found
|
2011-11-10 20:37:48 +01:00
|
|
|
|
|
|
|
def get_proxy_info(account):
|
2017-08-13 13:18:56 +02:00
|
|
|
p = app.config.get_per('accounts', account, 'proxy')
|
2011-11-10 20:37:48 +01:00
|
|
|
if not p:
|
2017-08-13 13:18:56 +02:00
|
|
|
if app.config.get_per('accounts', account, 'use_env_http_proxy'):
|
2011-11-10 20:37:48 +01:00
|
|
|
try:
|
|
|
|
try:
|
|
|
|
env_http_proxy = os.environ['HTTP_PROXY']
|
|
|
|
except Exception:
|
|
|
|
env_http_proxy = os.environ['http_proxy']
|
|
|
|
env_http_proxy = env_http_proxy.strip('"')
|
|
|
|
# Dispose of the http:// prefix
|
|
|
|
env_http_proxy = env_http_proxy.split('://')[-1]
|
|
|
|
env_http_proxy = env_http_proxy.split('@')
|
|
|
|
|
|
|
|
if len(env_http_proxy) == 2:
|
|
|
|
login = env_http_proxy[0].split(':')
|
|
|
|
addr = env_http_proxy[1].split(':')
|
|
|
|
else:
|
|
|
|
login = ['', '']
|
|
|
|
addr = env_http_proxy[0].split(':')
|
|
|
|
|
2013-01-01 19:44:25 +01:00
|
|
|
proxy = {'host': addr[0], 'type' : 'http', 'user':login[0]}
|
2011-11-10 20:37:48 +01:00
|
|
|
|
|
|
|
if len(addr) == 2:
|
|
|
|
proxy['port'] = addr[1]
|
|
|
|
else:
|
|
|
|
proxy['port'] = 3128
|
|
|
|
|
|
|
|
if len(login) == 2:
|
|
|
|
proxy['pass'] = login[1]
|
|
|
|
proxy['useauth'] = True
|
|
|
|
else:
|
2013-01-01 19:44:25 +01:00
|
|
|
proxy['pass'] = ''
|
2011-11-10 20:37:48 +01:00
|
|
|
return proxy
|
|
|
|
|
|
|
|
except Exception:
|
|
|
|
proxy = None
|
2017-08-13 13:18:56 +02:00
|
|
|
p = app.config.get('global_proxy')
|
|
|
|
if p and p in app.config.get_per('proxies'):
|
2011-11-10 20:37:48 +01:00
|
|
|
proxy = {}
|
2017-08-13 13:18:56 +02:00
|
|
|
proxyptr = app.config.get_per('proxies', p)
|
2012-01-22 00:28:14 +01:00
|
|
|
if not proxyptr:
|
|
|
|
return proxy
|
2011-11-10 20:37:48 +01:00
|
|
|
for key in proxyptr.keys():
|
2012-04-11 21:30:05 +02:00
|
|
|
proxy[key] = proxyptr[key]
|
2011-11-14 23:23:40 +01:00
|
|
|
return proxy
|
|
|
|
|
|
|
|
def _get_img_direct(attrs):
|
|
|
|
"""
|
|
|
|
Download an image. This function should be launched in a separated thread.
|
|
|
|
"""
|
2013-01-10 22:53:09 +01:00
|
|
|
mem = b''
|
|
|
|
alt = ''
|
2015-08-20 21:53:59 +02:00
|
|
|
max_size = 2*1024*1024
|
|
|
|
if 'max_size' in attrs:
|
|
|
|
max_size = attrs['max_size']
|
|
|
|
# Wait maximum 10s for connection
|
|
|
|
socket.setdefaulttimeout(10)
|
2011-11-14 23:23:40 +01:00
|
|
|
try:
|
2013-01-02 13:54:02 +01:00
|
|
|
req = urllib.request.Request(attrs['src'])
|
2017-08-13 13:18:56 +02:00
|
|
|
req.add_header('User-Agent', 'Gajim ' + app.version)
|
2013-01-02 13:54:02 +01:00
|
|
|
f = urllib.request.urlopen(req)
|
2013-01-01 23:18:36 +01:00
|
|
|
except Exception as ex:
|
2018-09-17 18:57:00 +02:00
|
|
|
log.debug('Error loading image %s ', attrs['src'] + str(ex))
|
2011-11-14 23:23:40 +01:00
|
|
|
alt = attrs.get('alt', 'Broken image')
|
|
|
|
else:
|
2015-08-20 21:53:59 +02:00
|
|
|
# Wait 2s between each byte
|
2011-11-14 23:23:40 +01:00
|
|
|
try:
|
2015-08-20 21:53:59 +02:00
|
|
|
f.fp._sock.fp._sock.settimeout(2)
|
2011-11-14 23:23:40 +01:00
|
|
|
except Exception:
|
|
|
|
pass
|
2015-08-20 21:53:59 +02:00
|
|
|
# On a slow internet connection with ~1000kbps you need ~10 seconds for 1 MB
|
|
|
|
deadline = time.time() + (10 * (max_size / 1048576))
|
2011-11-14 23:23:40 +01:00
|
|
|
while True:
|
|
|
|
if time.time() > deadline:
|
2018-09-17 18:57:00 +02:00
|
|
|
log.debug('Timeout loading image %s ', attrs['src'])
|
2011-11-14 23:23:40 +01:00
|
|
|
mem = ''
|
|
|
|
alt = attrs.get('alt', '')
|
|
|
|
if alt:
|
|
|
|
alt += '\n'
|
|
|
|
alt += _('Timeout loading image')
|
|
|
|
break
|
|
|
|
try:
|
|
|
|
temp = f.read(100)
|
2013-01-01 23:18:36 +01:00
|
|
|
except socket.timeout as ex:
|
2018-09-17 18:57:00 +02:00
|
|
|
log.debug('Timeout loading image %s ', attrs['src'] + str(ex))
|
2011-11-14 23:23:40 +01:00
|
|
|
alt = attrs.get('alt', '')
|
|
|
|
if alt:
|
|
|
|
alt += '\n'
|
|
|
|
alt += _('Timeout loading image')
|
|
|
|
break
|
|
|
|
if temp:
|
|
|
|
mem += temp
|
|
|
|
else:
|
|
|
|
break
|
2015-08-20 21:53:59 +02:00
|
|
|
if len(mem) > max_size:
|
2011-11-14 23:23:40 +01:00
|
|
|
alt = attrs.get('alt', '')
|
|
|
|
if alt:
|
|
|
|
alt += '\n'
|
|
|
|
alt += _('Image is too big')
|
|
|
|
break
|
2013-04-22 16:09:58 +02:00
|
|
|
f.close()
|
2011-11-14 23:23:40 +01:00
|
|
|
return (mem, alt)
|
|
|
|
|
|
|
|
def _get_img_proxy(attrs, proxy):
|
|
|
|
"""
|
|
|
|
Download an image through a proxy. This function should be launched in a
|
|
|
|
separated thread.
|
|
|
|
"""
|
2018-04-24 19:36:33 +02:00
|
|
|
if not app.is_installed('PYCURL'):
|
2011-11-14 23:23:40 +01:00
|
|
|
return '', _('PyCURL is not installed')
|
2018-09-17 21:11:45 +02:00
|
|
|
alt, max_size = '', 2*1024*1024
|
2015-08-20 21:53:59 +02:00
|
|
|
if 'max_size' in attrs:
|
|
|
|
max_size = attrs['max_size']
|
2011-11-14 23:23:40 +01:00
|
|
|
try:
|
|
|
|
b = StringIO()
|
|
|
|
c = pycurl.Curl()
|
|
|
|
c.setopt(pycurl.URL, attrs['src'].encode('utf-8'))
|
|
|
|
c.setopt(pycurl.FOLLOWLOCATION, 1)
|
2015-08-20 21:53:59 +02:00
|
|
|
# Wait maximum 10s for connection
|
|
|
|
c.setopt(pycurl.CONNECTTIMEOUT, 10)
|
|
|
|
# On a slow internet connection with ~1000kbps you need ~10 seconds for 1 MB
|
|
|
|
c.setopt(pycurl.TIMEOUT, 10 * (max_size / 1048576))
|
|
|
|
c.setopt(pycurl.MAXFILESIZE, max_size)
|
2011-11-14 23:23:40 +01:00
|
|
|
c.setopt(pycurl.WRITEFUNCTION, b.write)
|
2017-08-13 13:18:56 +02:00
|
|
|
c.setopt(pycurl.USERAGENT, 'Gajim ' + app.version)
|
2011-11-14 23:23:40 +01:00
|
|
|
# set proxy
|
|
|
|
c.setopt(pycurl.PROXY, proxy['host'].encode('utf-8'))
|
|
|
|
c.setopt(pycurl.PROXYPORT, proxy['port'])
|
|
|
|
if proxy['useauth']:
|
|
|
|
c.setopt(pycurl.PROXYUSERPWD, proxy['user'].encode('utf-8')\
|
|
|
|
+ ':' + proxy['pass'].encode('utf-8'))
|
|
|
|
c.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_ANY)
|
|
|
|
if proxy['type'] == 'http':
|
|
|
|
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_HTTP)
|
|
|
|
elif proxy['type'] == 'socks5':
|
|
|
|
c.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5)
|
|
|
|
c.close()
|
|
|
|
t = b.getvalue()
|
|
|
|
return (t, attrs.get('alt', ''))
|
2013-01-01 23:18:36 +01:00
|
|
|
except pycurl.error as ex:
|
2011-11-14 23:23:40 +01:00
|
|
|
alt = attrs.get('alt', '')
|
|
|
|
if alt:
|
|
|
|
alt += '\n'
|
2013-01-05 00:03:36 +01:00
|
|
|
if ex.errno == pycurl.E_FILESIZE_EXCEEDED:
|
2011-11-14 23:23:40 +01:00
|
|
|
alt += _('Image is too big')
|
2013-01-05 00:03:36 +01:00
|
|
|
elif ex.errno == pycurl.E_OPERATION_TIMEOUTED:
|
2011-11-14 23:23:40 +01:00
|
|
|
alt += _('Timeout loading image')
|
|
|
|
else:
|
|
|
|
alt += _('Error loading image')
|
2013-01-01 23:18:36 +01:00
|
|
|
except Exception as ex:
|
2018-09-17 18:57:00 +02:00
|
|
|
log.debug('Error loading image %s ', attrs['src'] + str(ex))
|
2011-11-14 23:23:40 +01:00
|
|
|
alt = attrs.get('alt', 'Broken image')
|
|
|
|
return ('', alt)
|
|
|
|
|
|
|
|
def download_image(account, attrs):
|
|
|
|
proxy = get_proxy_info(account)
|
|
|
|
if proxy and proxy['type'] in ('http', 'socks5'):
|
|
|
|
return _get_img_proxy(attrs, proxy)
|
2012-01-22 00:28:14 +01:00
|
|
|
return _get_img_direct(attrs)
|
2018-01-03 22:09:33 +01:00
|
|
|
|
|
|
|
def version_condition(current_version, required_version):
|
|
|
|
if V(current_version) < V(required_version):
|
|
|
|
return False
|
|
|
|
return True
|
2018-02-21 23:52:23 +01:00
|
|
|
|
|
|
|
def get_available_emoticon_themes():
|
2018-08-20 19:18:07 +02:00
|
|
|
emoticons_themes = ['font']
|
2018-08-15 23:58:02 +02:00
|
|
|
files = []
|
2018-08-18 14:06:54 +02:00
|
|
|
dir_iterator = os.scandir(configpaths.get('EMOTICONS'))
|
|
|
|
for folder in dir_iterator:
|
|
|
|
if not folder.is_dir():
|
|
|
|
continue
|
|
|
|
file_iterator = os.scandir(folder.path)
|
|
|
|
for theme in file_iterator:
|
|
|
|
if theme.is_file():
|
|
|
|
files.append(theme.name)
|
2018-02-21 23:52:23 +01:00
|
|
|
|
2018-04-25 20:49:37 +02:00
|
|
|
if os.path.isdir(configpaths.get('MY_EMOTS')):
|
2018-08-15 23:58:02 +02:00
|
|
|
files += os.listdir(configpaths.get('MY_EMOTS'))
|
|
|
|
|
|
|
|
for file in files:
|
|
|
|
if file.endswith('.png'):
|
|
|
|
emoticons_themes.append(file[:-4])
|
2018-02-21 23:52:23 +01:00
|
|
|
emoticons_themes.sort()
|
|
|
|
return emoticons_themes
|
2018-02-22 00:37:54 +01:00
|
|
|
|
2018-07-27 15:46:02 +02:00
|
|
|
def call_counter(func):
|
|
|
|
def helper(self, restart=False):
|
|
|
|
if restart:
|
2018-07-28 13:32:37 +02:00
|
|
|
self._connect_machine_calls = 0
|
|
|
|
self._connect_machine_calls += 1
|
2018-07-27 15:46:02 +02:00
|
|
|
return func(self, restart=False)
|
|
|
|
return helper
|
2018-09-15 20:45:38 +02:00
|
|
|
|
|
|
|
def get_sync_threshold(jid, archive_info):
|
|
|
|
if archive_info is None or archive_info.sync_threshold is None:
|
|
|
|
if muc_caps_cache.supports(jid, 'muc#roomconfig_membersonly'):
|
|
|
|
threshold = app.config.get('private_room_sync_threshold')
|
|
|
|
else:
|
|
|
|
threshold = app.config.get('public_room_sync_threshold')
|
|
|
|
app.logger.set_archive_infos(jid, sync_threshold=threshold)
|
|
|
|
return threshold
|
|
|
|
return archive_info.sync_threshold
|
2018-10-28 21:26:25 +01:00
|
|
|
|
|
|
|
def load_json(path, key=None, default=None):
|
|
|
|
try:
|
|
|
|
with open(path, 'r') as file:
|
|
|
|
json_dict = json.loads(file.read())
|
|
|
|
except Exception:
|
|
|
|
log.exception('Parsing error')
|
|
|
|
return default
|
|
|
|
|
|
|
|
if key is None:
|
|
|
|
return json_dict
|
|
|
|
return json_dict.get(key, default)
|