2018-09-05 02:59:34 +02:00
|
|
|
# Copyright (C) 2003-2014 Yann Leboulanger <asterix AT lagaule.org>
|
|
|
|
# Copyright (C) 2004-2005 Vincent Hanquez <tab AT snarc.org>
|
|
|
|
# Copyright (C) 2005-2006 Nikos Kouremenos <kourem AT gmail.com>
|
|
|
|
# Copyright (C) 2006 Dimitur Kirov <dkirov AT gmail.com>
|
|
|
|
# Copyright (C) 2006-2008 Jean-Marie Traissard <jim AT lapin.org>
|
|
|
|
# Copyright (C) 2007 Tomasz Melcer <liori AT exroot.org>
|
|
|
|
# Julien Pivotto <roidelapluie AT gmail.com>
|
|
|
|
# Copyright (C) 2018 Philipp Hörist <philipp AT hoerist.com>
|
|
|
|
#
|
|
|
|
# This file is part of Gajim.
|
|
|
|
#
|
|
|
|
# Gajim is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published
|
|
|
|
# by the Free Software Foundation; version 3 only.
|
|
|
|
#
|
|
|
|
# Gajim is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Gajim. If not, see <http://www.gnu.org/licenses/>.
|
2003-11-30 17:02:00 +01:00
|
|
|
|
2009-11-26 12:58:12 +01:00
|
|
|
"""
|
|
|
|
This module allows to access the on-disk database of logs
|
|
|
|
"""
|
2007-06-27 02:51:38 +02:00
|
|
|
|
2003-11-30 17:02:00 +01:00
|
|
|
import os
|
2005-11-23 20:12:52 +01:00
|
|
|
import sys
|
2003-11-30 17:02:00 +01:00
|
|
|
import time
|
2005-11-23 20:12:52 +01:00
|
|
|
import datetime
|
2017-07-16 01:14:42 +02:00
|
|
|
import calendar
|
2016-09-05 00:01:29 +02:00
|
|
|
import json
|
2018-04-23 20:27:19 +02:00
|
|
|
import logging
|
|
|
|
import sqlite3 as sqlite
|
2017-03-23 22:56:54 +01:00
|
|
|
from collections import namedtuple
|
2007-07-09 23:24:47 +02:00
|
|
|
from gzip import GzipFile
|
2013-01-08 10:17:09 +01:00
|
|
|
from io import BytesIO
|
2013-07-28 20:50:30 +02:00
|
|
|
from gi.repository import GLib
|
2005-08-25 18:50:02 +02:00
|
|
|
|
2017-06-13 23:58:06 +02:00
|
|
|
from gajim.common import exceptions
|
2017-08-13 13:18:56 +02:00
|
|
|
from gajim.common import app
|
2018-04-21 12:44:10 +02:00
|
|
|
from gajim.common import configpaths
|
2018-10-04 23:55:35 +02:00
|
|
|
from gajim.common.i18n import _
|
2018-04-23 18:22:47 +02:00
|
|
|
from gajim.common.const import (
|
|
|
|
JIDConstant, KindConstant, ShowConstant, TypeConstant,
|
|
|
|
SubscriptionConstant)
|
2006-06-04 11:54:11 +02:00
|
|
|
|
2005-09-02 23:45:35 +02:00
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
LOGS_SQL_STATEMENT = '''
|
|
|
|
CREATE TABLE jids(
|
|
|
|
jid_id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE,
|
|
|
|
jid TEXT UNIQUE,
|
|
|
|
type INTEGER
|
|
|
|
);
|
|
|
|
CREATE TABLE unread_messages(
|
|
|
|
message_id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE,
|
|
|
|
jid_id INTEGER,
|
|
|
|
shown BOOLEAN default 0
|
|
|
|
);
|
|
|
|
CREATE INDEX idx_unread_messages_jid_id ON unread_messages (jid_id);
|
|
|
|
CREATE TABLE logs(
|
|
|
|
log_line_id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE,
|
|
|
|
account_id INTEGER,
|
|
|
|
jid_id INTEGER,
|
|
|
|
contact_name TEXT,
|
|
|
|
time INTEGER,
|
|
|
|
kind INTEGER,
|
|
|
|
show INTEGER,
|
|
|
|
message TEXT,
|
|
|
|
subject TEXT,
|
|
|
|
additional_data TEXT,
|
|
|
|
stanza_id TEXT,
|
|
|
|
encryption TEXT,
|
|
|
|
encryption_state TEXT,
|
|
|
|
marker INTEGER
|
|
|
|
);
|
|
|
|
CREATE TABLE last_archive_message(
|
|
|
|
jid_id INTEGER PRIMARY KEY UNIQUE,
|
|
|
|
last_mam_id TEXT,
|
|
|
|
oldest_mam_timestamp TEXT,
|
2018-09-15 20:45:38 +02:00
|
|
|
last_muc_timestamp TEXT,
|
|
|
|
sync_threshold INTEGER
|
2018-04-23 20:27:19 +02:00
|
|
|
);
|
|
|
|
CREATE INDEX idx_logs_jid_id_time ON logs (jid_id, time DESC);
|
|
|
|
CREATE INDEX idx_logs_stanza_id ON logs (stanza_id);
|
2018-09-15 20:45:38 +02:00
|
|
|
PRAGMA user_version=2;
|
2018-04-23 20:27:19 +02:00
|
|
|
'''
|
|
|
|
|
|
|
|
CACHE_SQL_STATEMENT = '''
|
|
|
|
CREATE TABLE transports_cache (
|
|
|
|
transport TEXT UNIQUE,
|
|
|
|
type INTEGER
|
|
|
|
);
|
|
|
|
CREATE TABLE caps_cache (
|
|
|
|
hash_method TEXT,
|
|
|
|
hash TEXT,
|
|
|
|
data BLOB,
|
|
|
|
last_seen INTEGER);
|
|
|
|
CREATE TABLE rooms_last_message_time(
|
|
|
|
jid_id INTEGER PRIMARY KEY UNIQUE,
|
|
|
|
time INTEGER
|
|
|
|
);
|
|
|
|
CREATE TABLE roster_entry(
|
|
|
|
account_jid_id INTEGER,
|
|
|
|
jid_id INTEGER,
|
|
|
|
name TEXT,
|
|
|
|
subscription INTEGER,
|
|
|
|
ask BOOLEAN,
|
|
|
|
avatar_sha TEXT,
|
|
|
|
PRIMARY KEY (account_jid_id, jid_id)
|
|
|
|
);
|
|
|
|
CREATE TABLE roster_group(
|
|
|
|
account_jid_id INTEGER,
|
|
|
|
jid_id INTEGER,
|
|
|
|
group_name TEXT,
|
|
|
|
PRIMARY KEY (account_jid_id, jid_id, group_name)
|
|
|
|
);
|
|
|
|
PRAGMA user_version=1;
|
|
|
|
'''
|
|
|
|
|
2009-11-25 19:53:17 +01:00
|
|
|
log = logging.getLogger('gajim.c.logger')
|
|
|
|
|
2005-11-24 19:29:45 +01:00
|
|
|
|
2005-04-16 00:02:13 +02:00
|
|
|
class Logger:
|
2010-02-08 15:08:40 +01:00
|
|
|
def __init__(self):
|
2017-10-22 01:11:42 +02:00
|
|
|
self._jid_ids = {}
|
2018-05-08 19:20:11 +02:00
|
|
|
self._jid_ids_reversed = {}
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con = None
|
|
|
|
self._commit_timout_id = None
|
2018-04-25 21:26:02 +02:00
|
|
|
self._log_db_path = configpaths.get('LOG_DB')
|
|
|
|
self._cache_db_path = configpaths.get('CACHE_DB')
|
2018-04-23 20:27:19 +02:00
|
|
|
|
|
|
|
self._create_databases()
|
|
|
|
self._migrate_databases()
|
|
|
|
self._connect_databases()
|
|
|
|
self._get_jid_ids_from_db()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
def _create_databases(self):
|
2018-04-25 21:26:02 +02:00
|
|
|
if os.path.isdir(self._log_db_path):
|
2018-04-23 20:27:19 +02:00
|
|
|
log.error(_('%s is a directory but should be a file'),
|
2018-04-25 21:26:02 +02:00
|
|
|
self._log_db_path)
|
2018-04-22 22:16:17 +02:00
|
|
|
sys.exit()
|
|
|
|
|
2018-04-25 21:26:02 +02:00
|
|
|
if os.path.isdir(self._cache_db_path):
|
2018-04-23 20:27:19 +02:00
|
|
|
log.error(_('%s is a directory but should be a file'),
|
2018-04-25 21:26:02 +02:00
|
|
|
self._cache_db_path)
|
2018-04-22 22:16:17 +02:00
|
|
|
sys.exit()
|
|
|
|
|
2018-04-25 21:26:02 +02:00
|
|
|
if not os.path.exists(self._log_db_path):
|
|
|
|
if os.path.exists(self._cache_db_path):
|
|
|
|
os.remove(self._cache_db_path)
|
|
|
|
self._create(LOGS_SQL_STATEMENT, self._log_db_path)
|
2018-04-22 22:16:17 +02:00
|
|
|
|
2018-04-25 21:26:02 +02:00
|
|
|
if not os.path.exists(self._cache_db_path):
|
|
|
|
self._create(CACHE_SQL_STATEMENT, self._cache_db_path)
|
2018-04-22 22:16:17 +02:00
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
@staticmethod
|
2018-06-12 22:34:28 +02:00
|
|
|
def _connect(*args, **kwargs):
|
|
|
|
con = sqlite.connect(*args, **kwargs)
|
|
|
|
con.execute("PRAGMA secure_delete=1")
|
|
|
|
return con
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _create(cls, statement, path):
|
2018-04-23 20:27:19 +02:00
|
|
|
log.info(_('Creating %s'), path)
|
2018-06-12 22:34:28 +02:00
|
|
|
con = cls._connect(path)
|
2018-04-23 20:27:19 +02:00
|
|
|
os.chmod(path, 0o600)
|
2018-04-22 22:16:17 +02:00
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
try:
|
|
|
|
con.executescript(statement)
|
2018-09-17 21:11:45 +02:00
|
|
|
except Exception:
|
2018-04-23 20:27:19 +02:00
|
|
|
log.exception('Error')
|
|
|
|
con.close()
|
|
|
|
os.remove(path)
|
|
|
|
sys.exit()
|
2018-04-22 22:16:17 +02:00
|
|
|
|
|
|
|
con.commit()
|
|
|
|
con.close()
|
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
@staticmethod
|
2018-09-19 20:11:45 +02:00
|
|
|
def _get_user_version(con: sqlite.Connection) -> int:
|
2018-04-23 20:27:19 +02:00
|
|
|
""" Return the value of PRAGMA user_version. """
|
|
|
|
return con.execute('PRAGMA user_version').fetchone()[0]
|
|
|
|
|
|
|
|
def _migrate_databases(self):
|
|
|
|
try:
|
2018-06-12 22:34:28 +02:00
|
|
|
con = self._connect(self._log_db_path)
|
2018-04-23 20:27:19 +02:00
|
|
|
self._migrate_logs(con)
|
|
|
|
con.close()
|
|
|
|
|
2018-06-12 22:34:28 +02:00
|
|
|
con = self._connect(self._cache_db_path)
|
2018-04-23 20:27:19 +02:00
|
|
|
self._migrate_cache(con)
|
|
|
|
con.close()
|
|
|
|
except Exception:
|
|
|
|
log.exception('Error')
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
def _migrate_logs(self, con):
|
|
|
|
if self._get_user_version(con) == 0:
|
|
|
|
# All migrations from 0.16.9 until 1.0.0
|
|
|
|
statements = [
|
|
|
|
'ALTER TABLE logs ADD COLUMN "account_id" INTEGER',
|
|
|
|
'ALTER TABLE logs ADD COLUMN "stanza_id" TEXT',
|
|
|
|
'ALTER TABLE logs ADD COLUMN "encryption" TEXT',
|
|
|
|
'ALTER TABLE logs ADD COLUMN "encryption_state" TEXT',
|
|
|
|
'ALTER TABLE logs ADD COLUMN "marker" INTEGER',
|
|
|
|
'ALTER TABLE logs ADD COLUMN "additional_data" TEXT',
|
|
|
|
'''CREATE TABLE IF NOT EXISTS last_archive_message(
|
|
|
|
jid_id INTEGER PRIMARY KEY UNIQUE,
|
|
|
|
last_mam_id TEXT,
|
|
|
|
oldest_mam_timestamp TEXT,
|
|
|
|
last_muc_timestamp TEXT
|
|
|
|
)''',
|
|
|
|
|
|
|
|
'''CREATE INDEX IF NOT EXISTS idx_logs_stanza_id
|
|
|
|
ON logs(stanza_id)''',
|
|
|
|
'PRAGMA user_version=1'
|
2018-09-15 20:45:38 +02:00
|
|
|
]
|
2018-04-23 20:27:19 +02:00
|
|
|
|
|
|
|
self._execute_multiple(con, statements)
|
|
|
|
|
|
|
|
if self._get_user_version(con) < 2:
|
2018-09-15 20:45:38 +02:00
|
|
|
statements = [
|
|
|
|
'ALTER TABLE last_archive_message ADD COLUMN "sync_threshold" INTEGER',
|
|
|
|
'PRAGMA user_version=2'
|
|
|
|
]
|
|
|
|
self._execute_multiple(con, statements)
|
2018-04-23 20:27:19 +02:00
|
|
|
|
|
|
|
def _migrate_cache(self, con):
|
|
|
|
if self._get_user_version(con) == 0:
|
|
|
|
# All migrations from 0.16.9 until 1.0.0
|
|
|
|
statements = [
|
|
|
|
'ALTER TABLE roster_entry ADD COLUMN "avatar_sha" TEXT',
|
|
|
|
'PRAGMA user_version=1'
|
|
|
|
]
|
|
|
|
self._execute_multiple(con, statements)
|
|
|
|
|
|
|
|
if self._get_user_version(con) < 2:
|
|
|
|
pass
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _execute_multiple(con, statements):
|
|
|
|
"""
|
|
|
|
Execute mutliple statements with the option to fail on duplicates
|
|
|
|
but still continue
|
|
|
|
"""
|
|
|
|
for sql in statements:
|
|
|
|
try:
|
|
|
|
con.execute(sql)
|
|
|
|
con.commit()
|
|
|
|
except sqlite.OperationalError as error:
|
|
|
|
if str(error).startswith('duplicate column name:'):
|
|
|
|
log.info(error)
|
|
|
|
else:
|
|
|
|
log.exception('Error')
|
|
|
|
sys.exit()
|
|
|
|
|
2018-05-08 19:20:11 +02:00
|
|
|
def namedtuple_factory(self, cursor, row):
|
2017-07-12 17:56:28 +02:00
|
|
|
"""
|
|
|
|
Usage:
|
|
|
|
con.row_factory = namedtuple_factory
|
|
|
|
"""
|
|
|
|
fields = [col[0] for col in cursor.description]
|
|
|
|
Row = namedtuple("Row", fields)
|
|
|
|
named_row = Row(*row)
|
|
|
|
if 'additional_data' in fields:
|
2017-11-01 23:14:47 +01:00
|
|
|
named_row = named_row._replace(
|
|
|
|
additional_data=json.loads(named_row.additional_data or '{}'))
|
2018-05-08 19:20:11 +02:00
|
|
|
|
|
|
|
# if an alias `account` for the field `account_id` is used for the
|
|
|
|
# query, the account_id is converted to the account jid
|
|
|
|
if 'account' in fields:
|
|
|
|
if named_row.account:
|
|
|
|
jid = self._jid_ids_reversed[named_row.account].jid
|
|
|
|
named_row = named_row._replace(account=jid)
|
2017-07-12 17:56:28 +02:00
|
|
|
return named_row
|
|
|
|
|
2017-04-08 22:53:50 +02:00
|
|
|
def dispatch(self, event, error):
|
2017-08-13 13:18:56 +02:00
|
|
|
app.ged.raise_event(event, None, str(error))
|
2017-04-08 22:53:50 +02:00
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
def _connect_databases(self):
|
2018-06-12 22:34:28 +02:00
|
|
|
self._con = self._connect(
|
2018-04-25 21:26:02 +02:00
|
|
|
self._log_db_path, timeout=20.0, isolation_level='IMMEDIATE')
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.row_factory = self.namedtuple_factory
|
2017-07-15 03:54:13 +02:00
|
|
|
|
|
|
|
# DB functions
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.create_function("like", 1, self._like)
|
|
|
|
self._con.create_function("get_timeout", 0, self._get_timeout)
|
2017-07-15 03:54:13 +02:00
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
self._set_synchronous(False)
|
2010-02-08 15:08:40 +01:00
|
|
|
try:
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.execute("ATTACH DATABASE '%s' AS cache" %
|
2018-04-25 21:26:02 +02:00
|
|
|
self._cache_db_path.replace("'", "''"))
|
2018-09-17 21:11:45 +02:00
|
|
|
except Exception:
|
2018-04-23 20:27:19 +02:00
|
|
|
log.exception('Error')
|
|
|
|
self._con.close()
|
|
|
|
sys.exit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
def _set_synchronous(self, sync):
|
2010-02-08 15:08:40 +01:00
|
|
|
try:
|
|
|
|
if sync:
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.execute("PRAGMA synchronous = NORMAL")
|
2010-02-08 15:08:40 +01:00
|
|
|
else:
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.execute("PRAGMA synchronous = OFF")
|
2018-09-17 21:11:45 +02:00
|
|
|
except sqlite.Error:
|
2018-04-23 20:27:19 +02:00
|
|
|
log.exception('Error')
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 03:54:13 +02:00
|
|
|
@staticmethod
|
|
|
|
def _get_timeout():
|
|
|
|
"""
|
|
|
|
returns the timeout in epoch
|
|
|
|
"""
|
2017-08-13 13:18:56 +02:00
|
|
|
timeout = app.config.get('restore_timeout')
|
2017-07-15 03:54:13 +02:00
|
|
|
|
|
|
|
now = int(time.time())
|
|
|
|
if timeout > 0:
|
|
|
|
timeout = now - (timeout * 60)
|
|
|
|
return timeout
|
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
@staticmethod
|
|
|
|
def _like(search_str):
|
|
|
|
return '%{}%'.format(search_str)
|
|
|
|
|
2017-07-10 16:25:26 +02:00
|
|
|
def commit(self):
|
2012-04-03 19:38:15 +02:00
|
|
|
try:
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.commit()
|
2013-01-01 23:18:36 +01:00
|
|
|
except sqlite.OperationalError as e:
|
2013-01-01 19:36:56 +01:00
|
|
|
print(str(e), file=sys.stderr)
|
2018-04-23 20:27:19 +02:00
|
|
|
self._commit_timout_id = None
|
2012-04-03 19:38:15 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
def _timeout_commit(self):
|
2018-04-23 20:27:19 +02:00
|
|
|
if self._commit_timout_id:
|
2012-04-03 19:38:15 +02:00
|
|
|
return
|
2018-04-23 20:27:19 +02:00
|
|
|
self._commit_timout_id = GLib.timeout_add(500, self.commit)
|
2012-04-03 19:38:15 +02:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
def simple_commit(self, sql_to_commit):
|
|
|
|
"""
|
|
|
|
Helper to commit
|
|
|
|
"""
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.execute(sql_to_commit)
|
2012-04-03 19:38:15 +02:00
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
def _get_jid_ids_from_db(self):
|
2017-10-22 01:11:42 +02:00
|
|
|
"""
|
|
|
|
Load all jid/jid_id tuples into a dict for faster access
|
|
|
|
"""
|
2018-04-23 20:27:19 +02:00
|
|
|
rows = self._con.execute(
|
2017-10-22 01:11:42 +02:00
|
|
|
'SELECT jid_id, jid, type FROM jids').fetchall()
|
|
|
|
for row in rows:
|
|
|
|
self._jid_ids[row.jid] = row
|
2018-05-08 19:20:11 +02:00
|
|
|
self._jid_ids_reversed[row.jid_id] = row
|
2017-10-22 01:11:42 +02:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
def get_jids_in_db(self):
|
2017-10-22 19:00:39 +02:00
|
|
|
return self._jid_ids.keys()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def jid_is_from_pm(self, jid):
|
|
|
|
"""
|
|
|
|
If jid is gajim@conf/nkour it's likely a pm one, how we know gajim@conf
|
|
|
|
is not a normal guy and nkour is not his resource? we ask if gajim@conf
|
|
|
|
is already in jids (with type room jid) this fails if user disables
|
|
|
|
logging for room and only enables for pm (so higly unlikely) and if we
|
|
|
|
fail we do not go chaos (user will see the first pm as if it was message
|
|
|
|
in room's public chat) and after that all okay
|
|
|
|
"""
|
|
|
|
if jid.find('/') > -1:
|
|
|
|
possible_room_jid = jid.split('/', 1)[0]
|
|
|
|
return self.jid_is_room_jid(possible_room_jid)
|
2018-09-18 10:14:04 +02:00
|
|
|
# it's not a full jid, so it's not a pm one
|
|
|
|
return False
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def jid_is_room_jid(self, jid):
|
2014-11-11 15:28:24 +01:00
|
|
|
"""
|
|
|
|
Return True if it's a room jid, False if it's not, None if we don't know
|
|
|
|
"""
|
2018-07-09 00:21:24 +02:00
|
|
|
jid_ = self._jid_ids.get(jid)
|
|
|
|
if jid_ is None:
|
|
|
|
return
|
|
|
|
return jid_.type == JIDConstant.ROOM_TYPE
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 03:54:13 +02:00
|
|
|
@staticmethod
|
|
|
|
def _get_family_jids(account, jid):
|
|
|
|
"""
|
|
|
|
Get all jids of the metacontacts family
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The JID
|
|
|
|
|
|
|
|
returns a list of JIDs'
|
|
|
|
"""
|
2017-08-13 13:18:56 +02:00
|
|
|
family = app.contacts.get_metacontacts_family(account, jid)
|
2017-07-15 03:54:13 +02:00
|
|
|
if family:
|
|
|
|
return [user['jid'] for user in family]
|
|
|
|
return [jid]
|
|
|
|
|
2017-11-17 21:42:44 +01:00
|
|
|
def get_account_id(self, account):
|
|
|
|
jid = app.get_jid_from_account(account)
|
|
|
|
return self.get_jid_id(jid, type_=JIDConstant.NORMAL_TYPE)
|
|
|
|
|
2017-10-22 19:11:27 +02:00
|
|
|
def get_jid_id(self, jid, kind=None, type_=None):
|
2017-10-22 01:11:42 +02:00
|
|
|
"""
|
|
|
|
Get the jid id from a jid.
|
|
|
|
In case the jid id is not found create a new one.
|
|
|
|
|
|
|
|
:param jid: The JID
|
|
|
|
|
2017-10-29 00:34:43 +02:00
|
|
|
:param kind: The KindConstant
|
|
|
|
|
|
|
|
:param type_: The JIDConstant
|
2017-10-22 01:11:42 +02:00
|
|
|
|
|
|
|
return the jid id
|
|
|
|
"""
|
|
|
|
|
2017-10-22 19:11:27 +02:00
|
|
|
if kind in (KindConstant.GC_MSG, KindConstant.GCSTATUS):
|
|
|
|
type_ = JIDConstant.ROOM_TYPE
|
2017-10-29 00:34:43 +02:00
|
|
|
elif kind is not None:
|
|
|
|
type_ = JIDConstant.NORMAL_TYPE
|
2017-10-22 19:11:27 +02:00
|
|
|
|
2017-10-22 01:11:42 +02:00
|
|
|
result = self._jid_ids.get(jid, None)
|
|
|
|
if result is not None:
|
|
|
|
return result.jid_id
|
|
|
|
|
|
|
|
sql = 'SELECT jid_id, jid, type FROM jids WHERE jid = ?'
|
2018-04-23 20:27:19 +02:00
|
|
|
row = self._con.execute(sql, [jid]).fetchone()
|
2017-10-22 01:11:42 +02:00
|
|
|
if row is not None:
|
|
|
|
self._jid_ids[jid] = row
|
|
|
|
return row.jid_id
|
|
|
|
|
|
|
|
if type_ is None:
|
|
|
|
raise ValueError(
|
|
|
|
'Unable to insert new JID because type is missing')
|
|
|
|
|
|
|
|
sql = 'INSERT INTO jids (jid, type) VALUES (?, ?)'
|
2018-04-23 20:27:19 +02:00
|
|
|
lastrowid = self._con.execute(sql, (jid, type_)).lastrowid
|
2017-10-22 01:11:42 +02:00
|
|
|
Row = namedtuple('Row', 'jid_id jid type')
|
|
|
|
self._jid_ids[jid] = Row(lastrowid, jid, type_)
|
|
|
|
self._timeout_commit()
|
|
|
|
return lastrowid
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-08-08 14:41:55 +02:00
|
|
|
def convert_kind_values_to_db_api_values(self, kind):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Convert from string style to constant ints for db
|
|
|
|
"""
|
|
|
|
if kind == 'status':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.STATUS
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'gcstatus':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.GCSTATUS
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'gc_msg':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.GC_MSG
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'single_msg_recv':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.SINGLE_MSG_RECV
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'single_msg_sent':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.SINGLE_MSG_SENT
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'chat_msg_recv':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.CHAT_MSG_RECV
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'chat_msg_sent':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.CHAT_MSG_SENT
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'error':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.ERROR
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-08-08 14:41:55 +02:00
|
|
|
return kind_col
|
|
|
|
|
|
|
|
def convert_show_values_to_db_api_values(self, show):
|
|
|
|
"""
|
|
|
|
Convert from string style to constant ints for db
|
|
|
|
"""
|
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
if show == 'online':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.ONLINE
|
2018-09-18 10:14:04 +02:00
|
|
|
if show == 'chat':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.CHAT
|
2018-09-18 10:14:04 +02:00
|
|
|
if show == 'away':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.AWAY
|
2018-09-18 10:14:04 +02:00
|
|
|
if show == 'xa':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.XA
|
2018-09-18 10:14:04 +02:00
|
|
|
if show == 'dnd':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.DND
|
2018-09-18 10:14:04 +02:00
|
|
|
if show == 'offline':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.OFFLINE
|
2018-09-18 10:14:04 +02:00
|
|
|
if show is None:
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.ONLINE
|
2018-09-18 10:14:04 +02:00
|
|
|
# invisible in GC when someone goes invisible
|
|
|
|
# it's a RFC violation .... but we should not crash
|
|
|
|
return None
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def convert_human_transport_type_to_db_api_values(self, type_):
|
|
|
|
"""
|
|
|
|
Convert from string style to constant ints for db
|
|
|
|
"""
|
|
|
|
if type_ == 'aim':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.AIM
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'gadu-gadu':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.GG
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'http-ws':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.HTTP_WS
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'icq':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.ICQ
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'msn':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.MSN
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'qq':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.QQ
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'sms':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.SMS
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'smtp':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.SMTP
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ in ('tlen', 'x-tlen'):
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.TLEN
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'newmail':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.NEWMAIL
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'rss':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.RSS
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'weather':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.WEATHER
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'mrim':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.MRIM
|
2013-06-02 21:02:33 +02:00
|
|
|
if type_ == 'jabber':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.NO_TRANSPORT
|
2010-02-08 15:08:40 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
def convert_api_values_to_human_transport_type(self, type_id):
|
|
|
|
"""
|
|
|
|
Convert from constant ints for db to string style
|
|
|
|
"""
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.AIM:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'aim'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.GG:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'gadu-gadu'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.HTTP_WS:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'http-ws'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.ICQ:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'icq'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.MSN:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'msn'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.QQ:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'qq'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.SMS:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'sms'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.SMTP:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'smtp'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.TLEN:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'tlen'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.NEWMAIL:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'newmail'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.RSS:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'rss'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.WEATHER:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'weather'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.MRIM:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'mrim'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.NO_TRANSPORT:
|
2013-06-02 21:02:33 +02:00
|
|
|
return 'jabber'
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2018-09-19 20:11:45 +02:00
|
|
|
def convert_xmpp_sub(self, sub):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Convert from string style to constant ints for db
|
|
|
|
"""
|
|
|
|
if sub == 'none':
|
2017-02-07 21:18:41 +01:00
|
|
|
return SubscriptionConstant.NONE
|
2010-02-08 15:08:40 +01:00
|
|
|
if sub == 'to':
|
2017-02-07 21:18:41 +01:00
|
|
|
return SubscriptionConstant.TO
|
2010-02-08 15:08:40 +01:00
|
|
|
if sub == 'from':
|
2017-02-07 21:18:41 +01:00
|
|
|
return SubscriptionConstant.FROM
|
2010-02-08 15:08:40 +01:00
|
|
|
if sub == 'both':
|
2017-02-07 21:18:41 +01:00
|
|
|
return SubscriptionConstant.BOTH
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2018-09-19 20:11:45 +02:00
|
|
|
def convert_db_sub(self, sub):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Convert from constant ints for db to string style
|
|
|
|
"""
|
2017-02-07 21:18:41 +01:00
|
|
|
if sub == SubscriptionConstant.NONE:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'none'
|
2017-02-07 21:18:41 +01:00
|
|
|
if sub == SubscriptionConstant.TO:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'to'
|
2017-02-07 21:18:41 +01:00
|
|
|
if sub == SubscriptionConstant.FROM:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'from'
|
2017-02-07 21:18:41 +01:00
|
|
|
if sub == SubscriptionConstant.BOTH:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'both'
|
|
|
|
|
|
|
|
def insert_unread_events(self, message_id, jid_id):
|
|
|
|
"""
|
|
|
|
Add unread message with id: message_id
|
|
|
|
"""
|
2018-09-19 20:11:45 +02:00
|
|
|
sql = '''INSERT INTO unread_messages (message_id, jid_id, shown)
|
|
|
|
VALUES (?, ?, 0)'''
|
|
|
|
self._con.execute(sql, (message_id, jid_id))
|
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def set_read_messages(self, message_ids):
|
|
|
|
"""
|
|
|
|
Mark all messages with ids in message_ids as read
|
|
|
|
"""
|
|
|
|
ids = ','.join([str(i) for i in message_ids])
|
|
|
|
sql = 'DELETE FROM unread_messages WHERE message_id IN (%s)' % ids
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
2016-02-27 19:52:46 +01:00
|
|
|
def set_shown_unread_msgs(self, msg_log_id):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Mark unread message as shown un GUI
|
|
|
|
"""
|
|
|
|
sql = 'UPDATE unread_messages SET shown = 1 where message_id = %s' % \
|
2016-02-27 19:52:46 +01:00
|
|
|
msg_log_id
|
2010-02-08 15:08:40 +01:00
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def reset_shown_unread_messages(self):
|
|
|
|
"""
|
|
|
|
Set shown field to False in unread_messages table
|
|
|
|
"""
|
|
|
|
sql = 'UPDATE unread_messages SET shown = 0'
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def get_unread_msgs(self):
|
|
|
|
"""
|
|
|
|
Get all unread messages
|
|
|
|
"""
|
|
|
|
all_messages = []
|
|
|
|
try:
|
2018-04-23 20:27:19 +02:00
|
|
|
unread_results = self._con.execute(
|
|
|
|
'SELECT message_id, shown from unread_messages').fetchall()
|
2010-02-08 15:08:40 +01:00
|
|
|
except Exception:
|
2016-09-05 03:25:37 +02:00
|
|
|
unread_results = []
|
|
|
|
for message in unread_results:
|
2017-07-12 17:56:28 +02:00
|
|
|
msg_log_id = message.message_id
|
|
|
|
shown = message.shown
|
2010-02-08 15:08:40 +01:00
|
|
|
# here we get infos for that message, and related jid from jids table
|
|
|
|
# do NOT change order of SELECTed things, unless you change function(s)
|
|
|
|
# that called this function
|
2018-04-23 20:27:19 +02:00
|
|
|
result = self._con.execute('''
|
2010-02-08 15:08:40 +01:00
|
|
|
SELECT logs.log_line_id, logs.message, logs.time, logs.subject,
|
2016-09-05 03:25:37 +02:00
|
|
|
jids.jid, logs.additional_data
|
2010-02-08 15:08:40 +01:00
|
|
|
FROM logs, jids
|
|
|
|
WHERE logs.log_line_id = %d AND logs.jid_id = jids.jid_id
|
2016-02-27 19:52:46 +01:00
|
|
|
''' % msg_log_id
|
2018-04-23 20:27:19 +02:00
|
|
|
).fetchone()
|
|
|
|
if result is None:
|
2010-02-08 15:08:40 +01:00
|
|
|
# Log line is no more in logs table. remove it from unread_messages
|
2016-02-27 19:52:46 +01:00
|
|
|
self.set_read_messages([msg_log_id])
|
2010-02-08 15:08:40 +01:00
|
|
|
continue
|
2017-07-12 17:56:28 +02:00
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
all_messages.append((result, shown))
|
2010-02-08 15:08:40 +01:00
|
|
|
return all_messages
|
|
|
|
|
2017-07-15 03:54:13 +02:00
|
|
|
def get_last_conversation_lines(self, account, jid, pending):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-15 03:54:13 +02:00
|
|
|
Get recent messages
|
|
|
|
|
|
|
|
Pending messages are already in queue to be printed when the
|
|
|
|
ChatControl is opened, so we dont want to request those messages.
|
|
|
|
How many messages are requested depends on the 'restore_lines'
|
|
|
|
config value. How far back in time messages are requested depends on
|
|
|
|
_get_timeout().
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid from which we request the conversation lines
|
|
|
|
|
|
|
|
:param pending: How many messages are currently pending so we dont
|
|
|
|
request those messages
|
|
|
|
|
|
|
|
returns a list of namedtuples
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
|
2017-08-13 13:18:56 +02:00
|
|
|
restore = app.config.get('restore_lines')
|
2017-07-15 03:54:13 +02:00
|
|
|
if restore <= 0:
|
2016-12-18 23:08:54 +01:00
|
|
|
return []
|
|
|
|
|
2017-07-15 03:54:13 +02:00
|
|
|
kinds = map(str, [KindConstant.SINGLE_MSG_RECV,
|
|
|
|
KindConstant.SINGLE_MSG_SENT,
|
|
|
|
KindConstant.CHAT_MSG_RECV,
|
|
|
|
KindConstant.CHAT_MSG_SENT,
|
|
|
|
KindConstant.ERROR])
|
|
|
|
|
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
sql = '''
|
|
|
|
SELECT time, kind, message, subject, additional_data
|
|
|
|
FROM logs NATURAL JOIN jids WHERE jid IN ({jids}) AND
|
|
|
|
kind IN ({kinds}) AND time > get_timeout()
|
|
|
|
ORDER BY time DESC, log_line_id DESC LIMIT ? OFFSET ?
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)),
|
|
|
|
kinds=', '.join(kinds))
|
2016-12-18 23:08:54 +01:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
try:
|
2018-04-23 20:27:19 +02:00
|
|
|
messages = self._con.execute(
|
2017-08-02 18:56:25 +02:00
|
|
|
sql, tuple(jids) + (restore, pending)).fetchall()
|
2010-02-08 15:08:40 +01:00
|
|
|
except sqlite.DatabaseError:
|
2017-04-08 22:53:50 +02:00
|
|
|
self.dispatch('DB_ERROR',
|
2018-04-25 21:26:02 +02:00
|
|
|
exceptions.DatabaseMalformed(self._log_db_path))
|
2017-04-08 22:53:50 +02:00
|
|
|
return []
|
2017-03-24 13:48:11 +01:00
|
|
|
|
|
|
|
messages.reverse()
|
|
|
|
return messages
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def get_unix_time_from_date(self, year, month, day):
|
|
|
|
# year (fe 2005), month (fe 11), day (fe 25)
|
|
|
|
# returns time in seconds for the second that starts that date since epoch
|
|
|
|
# gimme unixtime from year month day:
|
|
|
|
d = datetime.date(year, month, day)
|
2017-03-06 20:38:29 +01:00
|
|
|
local_time = d.timetuple() # time tuple (compat with time.localtime())
|
2010-02-08 15:08:40 +01:00
|
|
|
# we have time since epoch baby :)
|
|
|
|
start_of_day = int(time.mktime(local_time))
|
|
|
|
return start_of_day
|
|
|
|
|
2017-07-15 13:41:17 +02:00
|
|
|
def get_conversation_for_date(self, account, jid, date):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-03-30 21:24:31 +02:00
|
|
|
Load the complete conversation with a given jid on a specific date
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 13:41:17 +02:00
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the conversation
|
|
|
|
|
|
|
|
:param date: datetime.datetime instance
|
|
|
|
example: datetime.datetime(year, month, day)
|
2017-03-30 21:24:31 +02:00
|
|
|
|
2017-07-15 13:41:17 +02:00
|
|
|
returns a list of namedtuples
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
|
2017-07-15 13:41:17 +02:00
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
delta = datetime.timedelta(
|
|
|
|
hours=23, minutes=59, seconds=59, microseconds=999999)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 13:41:17 +02:00
|
|
|
sql = '''
|
2017-03-02 23:55:18 +01:00
|
|
|
SELECT contact_name, time, kind, show, message, subject,
|
|
|
|
additional_data, log_line_id
|
2017-07-15 13:41:17 +02:00
|
|
|
FROM logs NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND time BETWEEN ? AND ?
|
|
|
|
ORDER BY time, log_line_id
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)))
|
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
return self._con.execute(sql, tuple(jids) +
|
2017-08-02 18:56:25 +02:00
|
|
|
(date.timestamp(),
|
2017-07-15 13:41:17 +02:00
|
|
|
(date + delta).timestamp())).fetchall()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
def search_log(self, account, jid, query, date=None):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-03-23 22:56:54 +01:00
|
|
|
Search the conversation log for messages containing the `query` string.
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
The search can either span the complete log for the given
|
|
|
|
`account` and `jid` or be restriced to a single day by
|
|
|
|
specifying `date`.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the conversation
|
|
|
|
|
|
|
|
:param query: A search string
|
2017-03-23 22:56:54 +01:00
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
:param date: datetime.datetime instance
|
|
|
|
example: datetime.datetime(year, month, day)
|
|
|
|
|
|
|
|
returns a list of namedtuples
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-15 23:06:49 +02:00
|
|
|
jids = self._get_family_jids(account, jid)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
if date:
|
|
|
|
delta = datetime.timedelta(
|
|
|
|
hours=23, minutes=59, seconds=59, microseconds=999999)
|
|
|
|
|
|
|
|
between = '''
|
|
|
|
AND time BETWEEN {start} AND {end}
|
|
|
|
'''.format(start=date.timestamp(),
|
|
|
|
end=(date + delta).timestamp())
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
sql = '''
|
|
|
|
SELECT contact_name, time, kind, show, message, subject,
|
|
|
|
additional_data, log_line_id
|
|
|
|
FROM logs NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND message LIKE like(?) {date_search}
|
|
|
|
ORDER BY time, log_line_id
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)),
|
|
|
|
date_search=between if date else '')
|
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
return self._con.execute(sql, tuple(jids) + (query,)).fetchall()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-16 01:14:42 +02:00
|
|
|
def get_days_with_logs(self, account, jid, year, month):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 01:14:42 +02:00
|
|
|
Request the days in a month where we received messages
|
|
|
|
for a given `jid`.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the days
|
|
|
|
|
|
|
|
:param year: The year
|
|
|
|
|
|
|
|
:param month: The month
|
|
|
|
|
|
|
|
returns a list of namedtuples
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 01:14:42 +02:00
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
kinds = map(str, [KindConstant.STATUS,
|
|
|
|
KindConstant.GCSTATUS])
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-16 01:14:42 +02:00
|
|
|
# Calculate the start and end datetime of the month
|
|
|
|
date = datetime.datetime(year, month, 1)
|
|
|
|
days = calendar.monthrange(year, month)[1] - 1
|
|
|
|
delta = datetime.timedelta(
|
|
|
|
days=days, hours=23, minutes=59, seconds=59, microseconds=999999)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-16 01:14:42 +02:00
|
|
|
sql = """
|
|
|
|
SELECT DISTINCT
|
|
|
|
CAST(strftime('%d', time, 'unixepoch', 'localtime') AS INTEGER)
|
|
|
|
AS day FROM logs NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND time BETWEEN ? AND ?
|
|
|
|
AND kind NOT IN ({kinds})
|
|
|
|
ORDER BY time
|
|
|
|
""".format(jids=', '.join('?' * len(jids)),
|
|
|
|
kinds=', '.join(kinds))
|
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
return self._con.execute(sql, tuple(jids) +
|
2017-08-02 18:56:25 +02:00
|
|
|
(date.timestamp(),
|
2017-07-16 01:14:42 +02:00
|
|
|
(date + delta).timestamp())).fetchall()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-16 17:18:27 +02:00
|
|
|
def get_last_date_that_has_logs(self, account, jid):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 17:18:27 +02:00
|
|
|
Get the timestamp of the last message we received for the jid.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the last timestamp
|
|
|
|
|
|
|
|
returns a timestamp or None
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 17:18:27 +02:00
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
kinds = map(str, [KindConstant.STATUS,
|
|
|
|
KindConstant.GCSTATUS])
|
|
|
|
|
|
|
|
sql = '''
|
2017-07-12 17:56:28 +02:00
|
|
|
SELECT MAX(time) as time FROM logs
|
2017-07-16 17:18:27 +02:00
|
|
|
NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND kind NOT IN ({kinds})
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)),
|
|
|
|
kinds=', '.join(kinds))
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-16 17:18:27 +02:00
|
|
|
# fetchone() returns always at least one Row with all
|
|
|
|
# attributes set to None because of the MAX() function
|
2018-04-23 20:27:19 +02:00
|
|
|
return self._con.execute(sql, tuple(jids)).fetchone().time
|
2017-07-16 17:18:27 +02:00
|
|
|
|
2018-01-29 14:00:15 +01:00
|
|
|
def get_first_date_that_has_logs(self, account, jid):
|
|
|
|
"""
|
|
|
|
Get the timestamp of the first message we received for the jid.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the first timestamp
|
|
|
|
|
|
|
|
returns a timestamp or None
|
|
|
|
"""
|
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
kinds = map(str, [KindConstant.STATUS,
|
|
|
|
KindConstant.GCSTATUS])
|
|
|
|
|
|
|
|
sql = '''
|
|
|
|
SELECT MIN(time) as time FROM logs
|
|
|
|
NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND kind NOT IN ({kinds})
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)),
|
|
|
|
kinds=', '.join(kinds))
|
|
|
|
|
|
|
|
# fetchone() returns always at least one Row with all
|
|
|
|
# attributes set to None because of the MIN() function
|
2018-04-23 20:27:19 +02:00
|
|
|
return self._con.execute(sql, tuple(jids)).fetchone().time
|
2018-01-29 14:00:15 +01:00
|
|
|
|
|
|
|
def get_date_has_logs(self, account, jid, date):
|
|
|
|
"""
|
|
|
|
Get single timestamp of a message we received for the jid
|
|
|
|
in the time range of one day.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the first timestamp
|
|
|
|
|
|
|
|
:param date: datetime.datetime instance
|
|
|
|
example: datetime.datetime(year, month, day)
|
|
|
|
|
|
|
|
returns a timestamp or None
|
|
|
|
"""
|
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
delta = datetime.timedelta(
|
|
|
|
hours=23, minutes=59, seconds=59, microseconds=999999)
|
|
|
|
|
2018-09-17 21:11:45 +02:00
|
|
|
start = date.timestamp()
|
|
|
|
end = (date + delta).timestamp()
|
|
|
|
|
2018-01-29 14:00:15 +01:00
|
|
|
sql = '''
|
|
|
|
SELECT time
|
|
|
|
FROM logs NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND time BETWEEN ? AND ?
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)))
|
|
|
|
|
2018-09-17 21:11:45 +02:00
|
|
|
return self._con.execute(
|
|
|
|
sql, tuple(jids) + (start, end)).fetchone()
|
2018-01-29 14:00:15 +01:00
|
|
|
|
2017-07-16 17:18:27 +02:00
|
|
|
def get_room_last_message_time(self, account, jid):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 17:18:27 +02:00
|
|
|
Get the timestamp of the last message we received in a room.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the last timestamp
|
|
|
|
|
|
|
|
returns a timestamp or None
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 17:18:27 +02:00
|
|
|
sql = '''
|
|
|
|
SELECT time FROM rooms_last_message_time
|
|
|
|
NATURAL JOIN jids WHERE jid = ?
|
|
|
|
'''
|
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
row = self._con.execute(sql, (jid,)).fetchone()
|
2017-07-16 17:18:27 +02:00
|
|
|
if not row:
|
|
|
|
return self.get_last_date_that_has_logs(account, jid)
|
|
|
|
return row.time
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-08-08 17:41:38 +02:00
|
|
|
def set_room_last_message_time(self, jid, timestamp):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-08-08 17:41:38 +02:00
|
|
|
Set the timestamp of the last message we received in a room.
|
|
|
|
|
|
|
|
:param jid: The jid
|
|
|
|
|
|
|
|
:param timestamp: The timestamp in epoch
|
2017-10-22 00:39:56 +02:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-08-08 17:41:38 +02:00
|
|
|
|
2017-10-22 00:39:56 +02:00
|
|
|
jid_id = self.get_jid_id(jid, type_=JIDConstant.ROOM_TYPE)
|
2017-08-08 17:41:38 +02:00
|
|
|
sql = '''REPLACE INTO rooms_last_message_time
|
2017-10-22 00:39:56 +02:00
|
|
|
VALUES (:jid_id, COALESCE(
|
|
|
|
(SELECT time FROM rooms_last_message_time
|
|
|
|
WHERE jid_id = :jid_id AND time >= :time), :time))'''
|
2017-08-08 17:41:38 +02:00
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.execute(sql, {"jid_id": jid_id, "time": timestamp})
|
2017-08-08 17:41:38 +02:00
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def save_transport_type(self, jid, type_):
|
|
|
|
"""
|
|
|
|
Save the type of the transport in DB
|
|
|
|
"""
|
|
|
|
type_id = self.convert_human_transport_type_to_db_api_values(type_)
|
|
|
|
if not type_id:
|
|
|
|
# unknown type
|
|
|
|
return
|
2018-04-23 20:27:19 +02:00
|
|
|
result = self._con.execute(
|
|
|
|
'SELECT type from transports_cache WHERE transport = "%s"' % jid).fetchone()
|
|
|
|
if result:
|
|
|
|
if result.type == type_id:
|
2010-02-08 15:08:40 +01:00
|
|
|
return
|
|
|
|
sql = 'UPDATE transports_cache SET type = %d WHERE transport = "%s"' %\
|
|
|
|
(type_id, jid)
|
|
|
|
self.simple_commit(sql)
|
|
|
|
return
|
2018-09-19 20:11:45 +02:00
|
|
|
sql = 'INSERT INTO transports_cache (transport, type) VALUES (?, ?)'
|
|
|
|
self._con.execute(sql, (jid, type_id))
|
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def get_transports_type(self):
|
|
|
|
"""
|
|
|
|
Return all the type of the transports in DB
|
|
|
|
"""
|
2018-04-23 20:27:19 +02:00
|
|
|
results = self._con.execute('SELECT * from transports_cache').fetchall()
|
2010-02-08 15:08:40 +01:00
|
|
|
if not results:
|
|
|
|
return {}
|
|
|
|
answer = {}
|
|
|
|
for result in results:
|
2017-07-12 17:56:28 +02:00
|
|
|
answer[result.transport] = self.convert_api_values_to_human_transport_type(
|
|
|
|
result.type)
|
2010-02-08 15:08:40 +01:00
|
|
|
return answer
|
|
|
|
|
|
|
|
# A longer note here:
|
|
|
|
# The database contains a blob field. Pysqlite seems to need special care for
|
|
|
|
# such fields.
|
|
|
|
# When storing, we need to convert string into buffer object (1).
|
|
|
|
# When retrieving, we need to convert it back to a string to decompress it.
|
|
|
|
# (2)
|
|
|
|
# GzipFile needs a file-like object, StringIO emulates file for plain strings
|
|
|
|
def iter_caps_data(self):
|
|
|
|
"""
|
|
|
|
Iterate over caps cache data stored in the database
|
|
|
|
|
|
|
|
The iterator values are pairs of (node, ver, ext, identities, features):
|
|
|
|
identities == {'category':'foo', 'type':'bar', 'name':'boo'},
|
|
|
|
features being a list of feature namespaces.
|
|
|
|
"""
|
|
|
|
# get data from table
|
|
|
|
# the data field contains binary object (gzipped data), this is a hack
|
|
|
|
# to get that data without trying to convert it to unicode
|
|
|
|
try:
|
2018-04-23 20:27:19 +02:00
|
|
|
rows = self._con.execute('SELECT hash_method, hash, data FROM caps_cache;')
|
2010-02-08 15:08:40 +01:00
|
|
|
except sqlite.OperationalError:
|
|
|
|
# might happen when there's no caps_cache table yet
|
|
|
|
# -- there's no data to read anyway then
|
|
|
|
return
|
|
|
|
|
|
|
|
# list of corrupted entries that will be removed
|
|
|
|
to_be_removed = []
|
2018-04-23 20:27:19 +02:00
|
|
|
for row in rows:
|
2010-02-08 15:08:40 +01:00
|
|
|
# for each row: unpack the data field
|
|
|
|
# (format: (category, type, name, category, type, name, ...
|
|
|
|
# ..., 'FEAT', feature1, feature2, ...).join(' '))
|
|
|
|
# NOTE: if there's a need to do more gzip, put that to a function
|
|
|
|
try:
|
2017-07-12 17:56:28 +02:00
|
|
|
data = GzipFile(fileobj=BytesIO(row.data)).read().decode('utf-8').split('\0')
|
2010-02-08 15:08:40 +01:00
|
|
|
except IOError:
|
|
|
|
# This data is corrupted. It probably contains non-ascii chars
|
2017-07-12 17:56:28 +02:00
|
|
|
to_be_removed.append((row.hash_method, row.hash))
|
2010-02-08 15:08:40 +01:00
|
|
|
continue
|
|
|
|
i = 0
|
|
|
|
identities = list()
|
|
|
|
features = list()
|
|
|
|
while i < (len(data) - 3) and data[i] != 'FEAT':
|
|
|
|
category = data[i]
|
|
|
|
type_ = data[i + 1]
|
|
|
|
lang = data[i + 2]
|
|
|
|
name = data[i + 3]
|
|
|
|
identities.append({'category': category, 'type': type_,
|
2018-09-18 12:06:01 +02:00
|
|
|
'xml:lang': lang, 'name': name})
|
2010-02-08 15:08:40 +01:00
|
|
|
i += 4
|
2018-09-18 12:06:01 +02:00
|
|
|
i += 1
|
2010-02-08 15:08:40 +01:00
|
|
|
while i < len(data):
|
|
|
|
features.append(data[i])
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
# yield the row
|
2017-07-12 17:56:28 +02:00
|
|
|
yield row.hash_method, row.hash, identities, features
|
2010-02-08 15:08:40 +01:00
|
|
|
for hash_method, hash_ in to_be_removed:
|
|
|
|
sql = '''DELETE FROM caps_cache WHERE hash_method = "%s" AND
|
|
|
|
hash = "%s"''' % (hash_method, hash_)
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def add_caps_entry(self, hash_method, hash_, identities, features):
|
|
|
|
data = []
|
|
|
|
for identity in identities:
|
|
|
|
# there is no FEAT category
|
|
|
|
if identity['category'] == 'FEAT':
|
|
|
|
return
|
|
|
|
data.extend((identity.get('category'), identity.get('type', ''),
|
|
|
|
identity.get('xml:lang', ''), identity.get('name', '')))
|
|
|
|
data.append('FEAT')
|
|
|
|
data.extend(features)
|
|
|
|
data = '\0'.join(data)
|
|
|
|
# if there's a need to do more gzip, put that to a function
|
2013-01-08 10:17:09 +01:00
|
|
|
string = BytesIO()
|
2010-02-08 15:08:40 +01:00
|
|
|
gzip = GzipFile(fileobj=string, mode='w')
|
2013-01-11 10:34:49 +01:00
|
|
|
gzip.write(data.encode('utf-8'))
|
2010-02-08 15:08:40 +01:00
|
|
|
gzip.close()
|
|
|
|
data = string.getvalue()
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.execute('''
|
2010-02-08 15:08:40 +01:00
|
|
|
INSERT INTO caps_cache ( hash_method, hash, data, last_seen )
|
|
|
|
VALUES (?, ?, ?, ?);
|
2013-01-11 10:34:49 +01:00
|
|
|
''', (hash_method, hash_, memoryview(data), int(time.time())))
|
2010-02-08 15:08:40 +01:00
|
|
|
# (1) -- note above
|
2012-04-03 19:38:15 +02:00
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def update_caps_time(self, method, hash_):
|
|
|
|
sql = '''UPDATE caps_cache SET last_seen = %d
|
|
|
|
WHERE hash_method = "%s" and hash = "%s"''' % \
|
|
|
|
(int(time.time()), method, hash_)
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def clean_caps_table(self):
|
|
|
|
"""
|
|
|
|
Remove caps which was not seen for 3 months
|
|
|
|
"""
|
|
|
|
sql = '''DELETE FROM caps_cache WHERE last_seen < %d''' % \
|
|
|
|
int(time.time() - 3*30*24*3600)
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def replace_roster(self, account_name, roster_version, roster):
|
|
|
|
"""
|
|
|
|
Replace current roster in DB by a new one
|
|
|
|
|
|
|
|
accout_name is the name of the account to change.
|
|
|
|
roster_version is the version of the new roster.
|
|
|
|
roster is the new version.
|
|
|
|
"""
|
|
|
|
# First we must reset roster_version value to ensure that the server
|
|
|
|
# sends back all the roster at the next connexion if the replacement
|
|
|
|
# didn't work properly.
|
2017-08-13 13:18:56 +02:00
|
|
|
app.config.set_per('accounts', account_name, 'roster_version', '')
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-08-13 13:18:56 +02:00
|
|
|
account_jid = app.get_jid_from_account(account_name)
|
2017-11-02 15:59:45 +01:00
|
|
|
# Execute get_jid_id() because this ensures on new accounts that the
|
|
|
|
# jid_id will be created
|
|
|
|
self.get_jid_id(account_jid, type_=JIDConstant.NORMAL_TYPE)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
# Delete old roster
|
|
|
|
self.remove_roster(account_jid)
|
|
|
|
|
|
|
|
# Fill roster tables with the new roster
|
|
|
|
for jid in roster:
|
|
|
|
self.add_or_update_contact(account_jid, jid, roster[jid]['name'],
|
2012-04-03 19:38:15 +02:00
|
|
|
roster[jid]['subscription'], roster[jid]['ask'],
|
|
|
|
roster[jid]['groups'], commit=False)
|
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
# At this point, we are sure the replacement works properly so we can
|
|
|
|
# set the new roster_version value.
|
2017-08-13 13:18:56 +02:00
|
|
|
app.config.set_per('accounts', account_name, 'roster_version',
|
2010-07-22 20:56:50 +02:00
|
|
|
roster_version)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def del_contact(self, account_jid, jid):
|
|
|
|
"""
|
|
|
|
Remove jid from account_jid roster
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
account_jid_id = self.get_jid_id(account_jid)
|
|
|
|
jid_id = self.get_jid_id(jid)
|
2013-01-01 23:18:36 +01:00
|
|
|
except exceptions.PysqliteOperationalError as e:
|
2010-02-08 15:08:40 +01:00
|
|
|
raise exceptions.PysqliteOperationalError(str(e))
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.execute(
|
2010-02-08 15:08:40 +01:00
|
|
|
'DELETE FROM roster_group WHERE account_jid_id=? AND jid_id=?',
|
|
|
|
(account_jid_id, jid_id))
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.execute(
|
2010-02-08 15:08:40 +01:00
|
|
|
'DELETE FROM roster_entry WHERE account_jid_id=? AND jid_id=?',
|
|
|
|
(account_jid_id, jid_id))
|
2012-04-03 19:38:15 +02:00
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2010-07-22 20:56:50 +02:00
|
|
|
def add_or_update_contact(self, account_jid, jid, name, sub, ask, groups,
|
2018-09-19 20:11:45 +02:00
|
|
|
commit=True):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Add or update a contact from account_jid roster
|
|
|
|
"""
|
|
|
|
if sub == 'remove':
|
|
|
|
self.del_contact(account_jid, jid)
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
account_jid_id = self.get_jid_id(account_jid)
|
2017-10-22 01:11:42 +02:00
|
|
|
jid_id = self.get_jid_id(jid, type_=JIDConstant.NORMAL_TYPE)
|
2018-09-19 20:11:45 +02:00
|
|
|
except exceptions.PysqliteOperationalError as error:
|
|
|
|
raise exceptions.PysqliteOperationalError(str(error))
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
# Update groups information
|
|
|
|
# First we delete all previous groups information
|
2018-09-19 20:11:45 +02:00
|
|
|
sql = 'DELETE FROM roster_group WHERE account_jid_id=? AND jid_id=?'
|
|
|
|
self._con.execute(sql, (account_jid_id, jid_id))
|
2010-02-08 15:08:40 +01:00
|
|
|
# Then we add all new groups information
|
2018-09-19 20:11:45 +02:00
|
|
|
sql = '''INSERT INTO roster_group (account_jid_id, jid_id, group_name)
|
|
|
|
VALUES (?, ?, ?)'''
|
2010-02-08 15:08:40 +01:00
|
|
|
for group in groups:
|
2018-09-19 20:11:45 +02:00
|
|
|
self._con.execute(sql, (account_jid_id, jid_id, group))
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
if name is None:
|
|
|
|
name = ''
|
|
|
|
|
2018-09-19 20:11:45 +02:00
|
|
|
sql = '''REPLACE INTO roster_entry
|
|
|
|
(account_jid_id, jid_id, name, subscription, ask)
|
|
|
|
VALUES(?, ?, ?, ?, ?)'''
|
|
|
|
self._con.execute(sql, (account_jid_id,
|
|
|
|
jid_id,
|
|
|
|
name,
|
|
|
|
self.convert_xmpp_sub(sub),
|
|
|
|
bool(ask)))
|
2010-07-22 20:56:50 +02:00
|
|
|
if commit:
|
2012-04-03 19:38:15 +02:00
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def get_roster(self, account_jid):
|
|
|
|
"""
|
|
|
|
Return the accound_jid roster in NonBlockingRoster format
|
|
|
|
"""
|
|
|
|
data = {}
|
2017-10-22 01:11:42 +02:00
|
|
|
account_jid_id = self.get_jid_id(account_jid, type_=JIDConstant.NORMAL_TYPE)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
# First we fill data with roster_entry informations
|
2018-04-23 20:27:19 +02:00
|
|
|
rows = self._con.execute('''
|
2017-09-16 11:49:31 +02:00
|
|
|
SELECT j.jid, re.jid_id, re.name, re.subscription, re.ask, re.avatar_sha
|
2010-02-08 15:08:40 +01:00
|
|
|
FROM roster_entry re, jids j
|
|
|
|
WHERE re.account_jid_id=? AND j.jid_id=re.jid_id''', (account_jid_id,))
|
2018-04-23 20:27:19 +02:00
|
|
|
for row in rows:
|
2017-07-12 17:56:28 +02:00
|
|
|
#jid, jid_id, name, subscription, ask
|
|
|
|
jid = row.jid
|
|
|
|
name = row.name
|
2010-02-08 15:08:40 +01:00
|
|
|
data[jid] = {}
|
2017-09-16 11:49:31 +02:00
|
|
|
data[jid]['avatar_sha'] = row.avatar_sha
|
2010-02-08 15:08:40 +01:00
|
|
|
if name:
|
|
|
|
data[jid]['name'] = name
|
|
|
|
else:
|
|
|
|
data[jid]['name'] = None
|
2018-09-19 20:11:45 +02:00
|
|
|
data[jid]['subscription'] = self.convert_db_sub(row.subscription)
|
2010-02-08 15:08:40 +01:00
|
|
|
data[jid]['groups'] = []
|
|
|
|
data[jid]['resources'] = {}
|
2017-07-12 17:56:28 +02:00
|
|
|
if row.ask:
|
2010-02-08 15:08:40 +01:00
|
|
|
data[jid]['ask'] = 'subscribe'
|
|
|
|
else:
|
|
|
|
data[jid]['ask'] = None
|
2017-07-12 17:56:28 +02:00
|
|
|
data[jid]['id'] = row.jid_id
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
# Then we add group for roster entries
|
|
|
|
for jid in data:
|
2018-04-23 20:27:19 +02:00
|
|
|
rows = self._con.execute('''
|
2010-02-08 15:08:40 +01:00
|
|
|
SELECT group_name FROM roster_group
|
|
|
|
WHERE account_jid_id=? AND jid_id=?''',
|
|
|
|
(account_jid_id, data[jid]['id']))
|
2018-04-23 20:27:19 +02:00
|
|
|
for row in rows:
|
2017-07-12 17:56:28 +02:00
|
|
|
group_name = row.group_name
|
2010-02-08 15:08:40 +01:00
|
|
|
data[jid]['groups'].append(group_name)
|
|
|
|
del data[jid]['id']
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
def remove_roster(self, account_jid):
|
|
|
|
"""
|
2017-08-08 18:34:36 +02:00
|
|
|
Remove the roster of an account
|
|
|
|
|
|
|
|
:param account_jid: The jid of the account
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2018-03-17 00:20:11 +01:00
|
|
|
try:
|
|
|
|
jid_id = self.get_jid_id(account_jid)
|
|
|
|
except ValueError:
|
|
|
|
# This happens if the JID never made it to the Database
|
|
|
|
# because the account was never connected
|
|
|
|
return
|
2017-08-08 18:34:36 +02:00
|
|
|
|
|
|
|
sql = '''
|
|
|
|
DELETE FROM roster_entry WHERE account_jid_id = {jid_id};
|
|
|
|
DELETE FROM roster_group WHERE account_jid_id = {jid_id};
|
|
|
|
'''.format(jid_id=jid_id)
|
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.executescript(sql)
|
2012-04-03 19:38:15 +02:00
|
|
|
self._timeout_commit()
|
2010-02-10 17:59:17 +01:00
|
|
|
|
2018-01-25 00:46:32 +01:00
|
|
|
def search_for_duplicate(self, account, jid, timestamp, msg):
|
2017-07-31 15:43:30 +02:00
|
|
|
"""
|
|
|
|
Check if a message is already in the `logs` table
|
|
|
|
|
2018-01-25 00:46:32 +01:00
|
|
|
:param account: The account
|
|
|
|
|
2017-07-31 15:43:30 +02:00
|
|
|
:param jid: The jid as string
|
|
|
|
|
|
|
|
:param timestamp: The timestamp in UTC epoch
|
|
|
|
|
|
|
|
:param msg: The message text
|
|
|
|
"""
|
|
|
|
|
2017-11-01 23:07:11 +01:00
|
|
|
# Add 10 seconds around the timestamp
|
2018-06-19 19:05:03 +02:00
|
|
|
start_time = timestamp - 30
|
|
|
|
end_time = timestamp + 30
|
2017-07-31 15:43:30 +02:00
|
|
|
|
2018-01-25 00:46:32 +01:00
|
|
|
account_id = self.get_account_id(account)
|
2017-07-31 15:43:30 +02:00
|
|
|
log.debug('start: %s, end: %s, jid: %s, message: %s',
|
|
|
|
start_time, end_time, jid, msg)
|
|
|
|
|
|
|
|
sql = '''
|
|
|
|
SELECT * FROM logs
|
2018-01-25 00:46:32 +01:00
|
|
|
NATURAL JOIN jids WHERE jid = ? AND message = ? AND account_id = ?
|
2017-07-31 15:43:30 +02:00
|
|
|
AND time BETWEEN ? AND ?
|
|
|
|
'''
|
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
result = self._con.execute(
|
2018-01-25 00:46:32 +01:00
|
|
|
sql, (jid, msg, account_id, start_time, end_time)).fetchone()
|
2017-07-31 15:43:30 +02:00
|
|
|
|
|
|
|
if result is not None:
|
|
|
|
log.debug('Message already in DB')
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-01-25 00:46:32 +01:00
|
|
|
def find_stanza_id(self, account, archive_jid, stanza_id, origin_id=None,
|
2017-11-18 17:40:09 +01:00
|
|
|
groupchat=False):
|
2017-10-22 23:27:47 +02:00
|
|
|
"""
|
|
|
|
Checks if a stanza-id is already in the `logs` table
|
|
|
|
|
2018-01-25 00:46:32 +01:00
|
|
|
:param account: The account
|
|
|
|
|
2017-11-18 17:40:09 +01:00
|
|
|
:param archive_jid: The jid of the archive the stanza-id belongs to
|
2018-02-21 19:23:26 +01:00
|
|
|
only used if groupchat=True
|
2017-11-18 17:40:09 +01:00
|
|
|
|
2017-10-22 23:27:47 +02:00
|
|
|
:param stanza_id: The stanza-id
|
|
|
|
|
2017-11-18 17:40:09 +01:00
|
|
|
:param origin_id: The origin-id
|
|
|
|
|
|
|
|
:param groupchat: stanza-id is from a groupchat
|
|
|
|
|
2017-10-22 23:27:47 +02:00
|
|
|
return True if the stanza-id was found
|
|
|
|
"""
|
|
|
|
ids = []
|
|
|
|
if stanza_id is not None:
|
|
|
|
ids.append(stanza_id)
|
|
|
|
if origin_id is not None:
|
|
|
|
ids.append(origin_id)
|
|
|
|
|
2017-11-08 22:27:37 +01:00
|
|
|
if not ids:
|
|
|
|
return False
|
|
|
|
|
2018-06-01 17:46:26 +02:00
|
|
|
type_ = JIDConstant.NORMAL_TYPE
|
|
|
|
if groupchat:
|
|
|
|
type_ = JIDConstant.ROOM_TYPE
|
|
|
|
|
|
|
|
archive_id = self.get_jid_id(archive_jid, type_=type_)
|
2018-01-25 00:46:32 +01:00
|
|
|
account_id = self.get_account_id(account)
|
|
|
|
|
2017-11-18 17:40:09 +01:00
|
|
|
if groupchat:
|
2018-01-25 00:46:32 +01:00
|
|
|
# Stanza ID is only unique within a specific archive.
|
|
|
|
# So a Stanza ID could be repeated in different MUCs, so we
|
2018-02-21 19:23:26 +01:00
|
|
|
# filter also for the archive JID which is the bare MUC jid.
|
2018-01-25 00:46:32 +01:00
|
|
|
sql = '''
|
|
|
|
SELECT stanza_id FROM logs
|
|
|
|
WHERE stanza_id IN ({values})
|
|
|
|
AND jid_id = ? AND account_id = ? LIMIT 1
|
|
|
|
'''.format(values=', '.join('?' * len(ids)))
|
2018-04-23 20:27:19 +02:00
|
|
|
result = self._con.execute(
|
2018-01-25 00:46:32 +01:00
|
|
|
sql, tuple(ids) + (archive_id, account_id)).fetchone()
|
2017-11-18 17:40:09 +01:00
|
|
|
else:
|
2018-01-25 00:46:32 +01:00
|
|
|
sql = '''
|
|
|
|
SELECT stanza_id FROM logs
|
2018-02-21 19:23:26 +01:00
|
|
|
WHERE stanza_id IN ({values}) AND account_id = ? AND kind != ? LIMIT 1
|
2018-01-25 00:46:32 +01:00
|
|
|
'''.format(values=', '.join('?' * len(ids)))
|
2018-04-23 20:27:19 +02:00
|
|
|
result = self._con.execute(
|
2018-02-21 19:23:26 +01:00
|
|
|
sql, tuple(ids) + (account_id, KindConstant.GC_MSG)).fetchone()
|
2017-10-22 23:27:47 +02:00
|
|
|
|
|
|
|
if result is not None:
|
2018-01-25 00:46:32 +01:00
|
|
|
log.info('Found duplicated message, stanza-id: %s, origin-id: %s, '
|
2018-02-21 22:21:35 +01:00
|
|
|
'archive-jid: %s, account: %s', stanza_id, origin_id, archive_jid, account_id)
|
2017-10-22 23:27:47 +02:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2017-07-31 15:43:30 +02:00
|
|
|
def insert_jid(self, jid, kind=None, type_=JIDConstant.NORMAL_TYPE):
|
|
|
|
"""
|
|
|
|
Insert a new jid into the `jids` table.
|
2017-10-22 19:11:27 +02:00
|
|
|
This is an alias of get_jid_id() for better readablility.
|
2017-07-31 15:43:30 +02:00
|
|
|
|
|
|
|
:param jid: The jid as string
|
|
|
|
|
|
|
|
:param kind: A KindConstant
|
|
|
|
|
|
|
|
:param type_: A JIDConstant
|
|
|
|
"""
|
2017-10-22 19:11:27 +02:00
|
|
|
return self.get_jid_id(jid, kind, type_)
|
2017-07-31 15:43:30 +02:00
|
|
|
|
2017-11-17 21:42:44 +01:00
|
|
|
def insert_into_logs(self, account, jid, time_, kind,
|
|
|
|
unread=True, **kwargs):
|
2017-07-31 15:43:30 +02:00
|
|
|
"""
|
|
|
|
Insert a new message into the `logs` table
|
|
|
|
|
|
|
|
:param jid: The jid as string
|
|
|
|
|
|
|
|
:param time_: The timestamp in UTC epoch
|
|
|
|
|
|
|
|
:param kind: A KindConstant
|
|
|
|
|
|
|
|
:param unread: If True the message is added to the`unread_messages`
|
|
|
|
table. Only if kind == CHAT_MSG_RECV
|
|
|
|
|
|
|
|
:param kwargs: Every additional named argument must correspond to
|
|
|
|
a field in the `logs` table
|
|
|
|
"""
|
2017-10-22 19:11:27 +02:00
|
|
|
jid_id = self.get_jid_id(jid, kind=kind)
|
2017-11-17 21:42:44 +01:00
|
|
|
account_id = self.get_account_id(account)
|
|
|
|
|
2017-11-01 23:14:47 +01:00
|
|
|
if 'additional_data' in kwargs:
|
|
|
|
if not kwargs['additional_data']:
|
|
|
|
del kwargs['additional_data']
|
|
|
|
else:
|
|
|
|
kwargs['additional_data'] = json.dumps(kwargs["additional_data"])
|
2017-11-17 21:42:44 +01:00
|
|
|
|
2017-07-31 15:43:30 +02:00
|
|
|
sql = '''
|
2017-11-17 21:42:44 +01:00
|
|
|
INSERT INTO logs (account_id, jid_id, time, kind, {columns})
|
|
|
|
VALUES (?, ?, ?, ?, {values})
|
2017-07-31 15:43:30 +02:00
|
|
|
'''.format(columns=', '.join(kwargs.keys()),
|
|
|
|
values=', '.join('?' * len(kwargs)))
|
|
|
|
|
2018-04-23 20:27:19 +02:00
|
|
|
lastrowid = self._con.execute(
|
2017-11-17 21:42:44 +01:00
|
|
|
sql, (account_id, jid_id, time_, kind) + tuple(kwargs.values())).lastrowid
|
2017-07-31 15:43:30 +02:00
|
|
|
|
2017-10-22 23:27:47 +02:00
|
|
|
log.info('Insert into DB: jid: %s, time: %s, kind: %s, stanza_id: %s',
|
|
|
|
jid, time_, kind, kwargs.get('stanza_id', None))
|
|
|
|
|
2017-07-31 15:43:30 +02:00
|
|
|
if unread and kind == KindConstant.CHAT_MSG_RECV:
|
|
|
|
sql = '''INSERT INTO unread_messages (message_id, jid_id)
|
|
|
|
VALUES (?, (SELECT jid_id FROM jids WHERE jid = ?))'''
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.execute(sql, (lastrowid, jid))
|
2017-07-31 15:43:30 +02:00
|
|
|
|
|
|
|
self._timeout_commit()
|
|
|
|
|
|
|
|
return lastrowid
|
2017-09-16 11:49:31 +02:00
|
|
|
|
|
|
|
def set_avatar_sha(self, account_jid, jid, sha=None):
|
|
|
|
"""
|
|
|
|
Set the avatar sha of a jid on an account
|
|
|
|
|
|
|
|
:param account_jid: The jid of the account
|
|
|
|
|
|
|
|
:param jid: The jid that belongs to the avatar
|
|
|
|
|
|
|
|
:param sha: The sha of the avatar
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
account_jid_id = self.get_jid_id(account_jid)
|
2017-10-22 01:11:42 +02:00
|
|
|
jid_id = self.get_jid_id(jid, type_=JIDConstant.NORMAL_TYPE)
|
2017-09-16 11:49:31 +02:00
|
|
|
|
|
|
|
sql = '''
|
|
|
|
UPDATE roster_entry SET avatar_sha = ?
|
|
|
|
WHERE account_jid_id = ? AND jid_id = ?
|
|
|
|
'''
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.execute(sql, (sha, account_jid_id, jid_id))
|
2017-09-16 11:49:31 +02:00
|
|
|
self._timeout_commit()
|
2017-11-13 20:58:46 +01:00
|
|
|
|
2018-09-15 20:45:38 +02:00
|
|
|
def get_archive_infos(self, jid):
|
2017-11-13 20:58:46 +01:00
|
|
|
"""
|
2018-09-15 20:45:38 +02:00
|
|
|
Get the archive infos
|
2017-11-13 20:58:46 +01:00
|
|
|
|
|
|
|
:param jid: The jid that belongs to the avatar
|
|
|
|
|
|
|
|
"""
|
2018-09-15 20:45:38 +02:00
|
|
|
jid_id = self.get_jid_id(jid, type_=JIDConstant.ROOM_TYPE)
|
2017-11-13 20:58:46 +01:00
|
|
|
sql = '''SELECT * FROM last_archive_message WHERE jid_id = ?'''
|
2018-04-23 20:27:19 +02:00
|
|
|
return self._con.execute(sql, (jid_id,)).fetchone()
|
2017-11-13 20:58:46 +01:00
|
|
|
|
2018-09-15 20:45:38 +02:00
|
|
|
def set_archive_infos(self, jid, **kwargs):
|
2017-11-13 20:58:46 +01:00
|
|
|
"""
|
2018-09-15 20:45:38 +02:00
|
|
|
Set archive infos
|
2017-11-13 20:58:46 +01:00
|
|
|
|
|
|
|
:param jid: The jid that belongs to the avatar
|
|
|
|
|
|
|
|
:param last_mam_id: The last MAM result id
|
|
|
|
|
|
|
|
:param oldest_mam_timestamp: The oldest date we requested MAM
|
|
|
|
history for
|
|
|
|
|
|
|
|
:param last_muc_timestamp: The timestamp of the last message we
|
|
|
|
received in a MUC
|
|
|
|
|
2018-09-15 20:45:38 +02:00
|
|
|
:param sync_threshold: The max days that we request from a
|
|
|
|
MUC archive
|
|
|
|
|
2017-11-13 20:58:46 +01:00
|
|
|
"""
|
|
|
|
jid_id = self.get_jid_id(jid)
|
2018-09-15 20:45:38 +02:00
|
|
|
exists = self.get_archive_infos(jid)
|
2017-11-13 20:58:46 +01:00
|
|
|
if not exists:
|
2018-09-15 20:45:38 +02:00
|
|
|
sql = '''INSERT INTO last_archive_message
|
|
|
|
(jid_id, last_mam_id, oldest_mam_timestamp,
|
|
|
|
last_muc_timestamp, sync_threshold)
|
|
|
|
VALUES (?, ?, ?, ?, ?)'''
|
2018-07-29 19:36:02 +02:00
|
|
|
self._con.execute(sql, (
|
|
|
|
jid_id,
|
|
|
|
kwargs.get('last_mam_id', None),
|
|
|
|
kwargs.get('oldest_mam_timestamp', None),
|
2018-09-15 20:45:38 +02:00
|
|
|
kwargs.get('last_muc_timestamp', None),
|
|
|
|
kwargs.get('sync_threshold', None)
|
|
|
|
))
|
2017-11-13 20:58:46 +01:00
|
|
|
else:
|
|
|
|
args = ' = ?, '.join(kwargs.keys()) + ' = ?'
|
|
|
|
sql = '''UPDATE last_archive_message SET {}
|
|
|
|
WHERE jid_id = ?'''.format(args)
|
2018-04-23 20:27:19 +02:00
|
|
|
self._con.execute(sql, tuple(kwargs.values()) + (jid_id,))
|
2018-09-15 20:45:38 +02:00
|
|
|
log.info('Save archive infos: %s', kwargs)
|
2017-11-13 20:58:46 +01:00
|
|
|
self._timeout_commit()
|