2008-08-15 19:31:51 +02:00
|
|
|
# -*- coding:utf-8 -*-
|
2008-08-15 05:20:23 +02:00
|
|
|
## src/common/logger.py
|
2003-11-30 17:02:00 +01:00
|
|
|
##
|
2014-01-02 09:33:54 +01:00
|
|
|
## Copyright (C) 2003-2014 Yann Leboulanger <asterix AT lagaule.org>
|
2008-08-15 05:20:23 +02:00
|
|
|
## Copyright (C) 2004-2005 Vincent Hanquez <tab AT snarc.org>
|
|
|
|
## Copyright (C) 2005-2006 Nikos Kouremenos <kourem AT gmail.com>
|
|
|
|
## Copyright (C) 2006 Dimitur Kirov <dkirov AT gmail.com>
|
|
|
|
## Copyright (C) 2006-2008 Jean-Marie Traissard <jim AT lapin.org>
|
|
|
|
## Copyright (C) 2007 Tomasz Melcer <liori AT exroot.org>
|
|
|
|
## Julien Pivotto <roidelapluie AT gmail.com>
|
2003-11-30 17:02:00 +01:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## This file is part of Gajim.
|
|
|
|
##
|
|
|
|
## Gajim is free software; you can redistribute it and/or modify
|
2003-11-30 17:02:00 +01:00
|
|
|
## it under the terms of the GNU General Public License as published
|
2007-10-22 13:13:13 +02:00
|
|
|
## by the Free Software Foundation; version 3 only.
|
2003-11-30 17:02:00 +01:00
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## Gajim is distributed in the hope that it will be useful,
|
2003-11-30 17:02:00 +01:00
|
|
|
## but WITHOUT ANY WARRANTY; without even the implied warranty of
|
2008-08-15 05:20:23 +02:00
|
|
|
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
2003-11-30 17:02:00 +01:00
|
|
|
## GNU General Public License for more details.
|
|
|
|
##
|
2007-10-22 13:13:13 +02:00
|
|
|
## You should have received a copy of the GNU General Public License
|
2008-08-15 05:20:23 +02:00
|
|
|
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
|
2007-10-22 13:13:13 +02:00
|
|
|
##
|
2003-11-30 17:02:00 +01:00
|
|
|
|
2009-11-26 12:58:12 +01:00
|
|
|
"""
|
|
|
|
This module allows to access the on-disk database of logs
|
|
|
|
"""
|
2007-06-27 02:51:38 +02:00
|
|
|
|
2003-11-30 17:02:00 +01:00
|
|
|
import os
|
2005-11-23 20:12:52 +01:00
|
|
|
import sys
|
2003-11-30 17:02:00 +01:00
|
|
|
import time
|
2005-11-23 20:12:52 +01:00
|
|
|
import datetime
|
2017-07-16 01:14:42 +02:00
|
|
|
import calendar
|
2016-09-05 00:01:29 +02:00
|
|
|
import json
|
2017-03-23 22:56:54 +01:00
|
|
|
from collections import namedtuple
|
2007-07-09 23:24:47 +02:00
|
|
|
from gzip import GzipFile
|
2013-01-08 10:17:09 +01:00
|
|
|
from io import BytesIO
|
2013-07-28 20:50:30 +02:00
|
|
|
from gi.repository import GLib
|
2017-03-04 21:22:46 +01:00
|
|
|
from enum import IntEnum, unique
|
2005-08-25 18:50:02 +02:00
|
|
|
|
2017-06-13 23:58:06 +02:00
|
|
|
from gajim.common import exceptions
|
2017-08-13 13:18:56 +02:00
|
|
|
from gajim.common import app
|
2006-06-04 11:54:11 +02:00
|
|
|
|
2009-11-30 16:36:47 +01:00
|
|
|
import sqlite3 as sqlite
|
2005-12-01 18:16:53 +01:00
|
|
|
|
2017-08-13 13:18:56 +02:00
|
|
|
LOG_DB_PATH = app.gajimpaths['LOG_DB']
|
2007-01-02 18:56:26 +01:00
|
|
|
LOG_DB_FOLDER, LOG_DB_FILE = os.path.split(LOG_DB_PATH)
|
2017-08-13 13:18:56 +02:00
|
|
|
CACHE_DB_PATH = app.gajimpaths['CACHE_DB']
|
2005-09-02 23:45:35 +02:00
|
|
|
|
2009-11-25 19:53:17 +01:00
|
|
|
import logging
|
|
|
|
log = logging.getLogger('gajim.c.logger')
|
|
|
|
|
2017-03-04 21:22:46 +01:00
|
|
|
@unique
|
2017-02-07 21:18:41 +01:00
|
|
|
class JIDConstant(IntEnum):
|
|
|
|
NORMAL_TYPE = 0
|
|
|
|
ROOM_TYPE = 1
|
|
|
|
|
2017-03-04 21:22:46 +01:00
|
|
|
@unique
|
2017-02-07 21:18:41 +01:00
|
|
|
class KindConstant(IntEnum):
|
|
|
|
STATUS = 0
|
|
|
|
GCSTATUS = 1
|
|
|
|
GC_MSG = 2
|
|
|
|
SINGLE_MSG_RECV = 3
|
|
|
|
CHAT_MSG_RECV = 4
|
|
|
|
SINGLE_MSG_SENT = 5
|
|
|
|
CHAT_MSG_SENT = 6
|
|
|
|
ERROR = 7
|
|
|
|
|
2017-07-15 03:54:13 +02:00
|
|
|
def __str__(self):
|
|
|
|
return str(self.value)
|
|
|
|
|
2017-03-04 21:22:46 +01:00
|
|
|
@unique
|
2017-02-07 21:18:41 +01:00
|
|
|
class ShowConstant(IntEnum):
|
|
|
|
ONLINE = 0
|
|
|
|
CHAT = 1
|
|
|
|
AWAY = 2
|
|
|
|
XA = 3
|
|
|
|
DND = 4
|
|
|
|
OFFLINE = 5
|
|
|
|
|
2017-03-04 21:22:46 +01:00
|
|
|
@unique
|
2017-02-07 21:18:41 +01:00
|
|
|
class TypeConstant(IntEnum):
|
|
|
|
AIM = 0
|
|
|
|
GG = 1
|
|
|
|
HTTP_WS = 2
|
|
|
|
ICQ = 3
|
|
|
|
MSN = 4
|
|
|
|
QQ = 5
|
|
|
|
SMS = 6
|
|
|
|
SMTP = 7
|
|
|
|
TLEN = 8
|
|
|
|
YAHOO = 9
|
|
|
|
NEWMAIL = 10
|
|
|
|
RSS = 11
|
|
|
|
WEATHER = 12
|
|
|
|
MRIM = 13
|
|
|
|
NO_TRANSPORT = 14
|
|
|
|
|
2017-03-04 21:22:46 +01:00
|
|
|
@unique
|
2017-02-07 21:18:41 +01:00
|
|
|
class SubscriptionConstant(IntEnum):
|
|
|
|
NONE = 0
|
|
|
|
TO = 1
|
|
|
|
FROM = 2
|
|
|
|
BOTH = 3
|
2005-11-24 19:29:45 +01:00
|
|
|
|
2005-04-16 00:02:13 +02:00
|
|
|
class Logger:
|
2010-02-08 15:08:40 +01:00
|
|
|
def __init__(self):
|
2017-10-22 01:11:42 +02:00
|
|
|
self._jid_ids = {}
|
2010-02-08 15:08:40 +01:00
|
|
|
self.con = None
|
2012-04-03 19:38:15 +02:00
|
|
|
self.commit_timout_id = None
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
if not os.path.exists(LOG_DB_PATH):
|
|
|
|
# this can happen only the first time (the time we create the db)
|
|
|
|
# db is not created here but in src/common/checks_paths.py
|
|
|
|
return
|
|
|
|
self.init_vars()
|
|
|
|
if not os.path.exists(CACHE_DB_PATH):
|
|
|
|
# this can happen cache database is not present when gajim is launched
|
|
|
|
# db will be created in src/common/checks_paths.py
|
|
|
|
return
|
|
|
|
self.attach_cache_database()
|
|
|
|
|
2017-07-12 17:56:28 +02:00
|
|
|
@staticmethod
|
|
|
|
def namedtuple_factory(cursor, row):
|
|
|
|
"""
|
|
|
|
Usage:
|
|
|
|
con.row_factory = namedtuple_factory
|
|
|
|
"""
|
|
|
|
fields = [col[0] for col in cursor.description]
|
|
|
|
Row = namedtuple("Row", fields)
|
|
|
|
named_row = Row(*row)
|
|
|
|
if 'additional_data' in fields:
|
2017-11-01 23:14:47 +01:00
|
|
|
named_row = named_row._replace(
|
|
|
|
additional_data=json.loads(named_row.additional_data or '{}'))
|
2017-07-12 17:56:28 +02:00
|
|
|
return named_row
|
|
|
|
|
2017-04-08 22:53:50 +02:00
|
|
|
def dispatch(self, event, error):
|
2017-08-13 13:18:56 +02:00
|
|
|
app.ged.raise_event(event, None, str(error))
|
2017-04-08 22:53:50 +02:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
def close_db(self):
|
|
|
|
if self.con:
|
|
|
|
self.con.close()
|
|
|
|
self.con = None
|
|
|
|
self.cur = None
|
|
|
|
|
|
|
|
def open_db(self):
|
|
|
|
self.close_db()
|
|
|
|
|
|
|
|
# FIXME: sqlite3_open wants UTF8 strings. So a path with
|
|
|
|
# non-ascii chars doesn't work. See #2812 and
|
|
|
|
# http://lists.initd.org/pipermail/pysqlite/2005-August/000134.html
|
|
|
|
back = os.getcwd()
|
|
|
|
os.chdir(LOG_DB_FOLDER)
|
|
|
|
|
|
|
|
# if locked, wait up to 20 sec to unlock
|
|
|
|
# before raise (hopefully should be enough)
|
|
|
|
|
|
|
|
self.con = sqlite.connect(LOG_DB_FILE, timeout=20.0,
|
|
|
|
isolation_level='IMMEDIATE')
|
|
|
|
os.chdir(back)
|
2017-07-12 17:56:28 +02:00
|
|
|
self.con.row_factory = self.namedtuple_factory
|
2017-07-15 03:54:13 +02:00
|
|
|
|
|
|
|
# DB functions
|
2017-07-15 23:06:49 +02:00
|
|
|
self.con.create_function("like", 1, self._like)
|
2017-07-15 03:54:13 +02:00
|
|
|
self.con.create_function("get_timeout", 0, self._get_timeout)
|
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
self.cur = self.con.cursor()
|
|
|
|
self.set_synchronous(False)
|
|
|
|
|
|
|
|
def attach_cache_database(self):
|
|
|
|
try:
|
2013-08-14 18:58:48 +02:00
|
|
|
self.cur.execute("ATTACH DATABASE '%s' AS cache" % \
|
|
|
|
CACHE_DB_PATH.replace("'", "''"))
|
2013-01-01 23:18:36 +01:00
|
|
|
except sqlite.Error as e:
|
2010-05-25 16:33:40 +02:00
|
|
|
log.debug("Failed to attach cache database: %s" % str(e))
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def set_synchronous(self, sync):
|
|
|
|
try:
|
|
|
|
if sync:
|
|
|
|
self.cur.execute("PRAGMA synchronous = NORMAL")
|
|
|
|
else:
|
|
|
|
self.cur.execute("PRAGMA synchronous = OFF")
|
2013-01-01 23:18:36 +01:00
|
|
|
except sqlite.Error as e:
|
2010-02-10 17:59:17 +01:00
|
|
|
log.debug("Failed to set_synchronous(%s): %s" % (sync, str(e)))
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def init_vars(self):
|
|
|
|
self.open_db()
|
2017-10-22 01:11:42 +02:00
|
|
|
self.get_jid_ids_from_db()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 03:54:13 +02:00
|
|
|
@staticmethod
|
|
|
|
def _get_timeout():
|
|
|
|
"""
|
|
|
|
returns the timeout in epoch
|
|
|
|
"""
|
2017-08-13 13:18:56 +02:00
|
|
|
timeout = app.config.get('restore_timeout')
|
2017-07-15 03:54:13 +02:00
|
|
|
|
|
|
|
now = int(time.time())
|
|
|
|
if timeout > 0:
|
|
|
|
timeout = now - (timeout * 60)
|
|
|
|
return timeout
|
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
@staticmethod
|
|
|
|
def _like(search_str):
|
|
|
|
return '%{}%'.format(search_str)
|
|
|
|
|
2017-07-10 16:25:26 +02:00
|
|
|
def commit(self):
|
2012-04-03 19:38:15 +02:00
|
|
|
try:
|
|
|
|
self.con.commit()
|
2013-01-01 23:18:36 +01:00
|
|
|
except sqlite.OperationalError as e:
|
2013-01-01 19:36:56 +01:00
|
|
|
print(str(e), file=sys.stderr)
|
2012-04-03 19:38:15 +02:00
|
|
|
self.commit_timout_id = None
|
|
|
|
return False
|
|
|
|
|
|
|
|
def _timeout_commit(self):
|
|
|
|
if self.commit_timout_id:
|
|
|
|
return
|
2017-07-10 16:25:26 +02:00
|
|
|
self.commit_timout_id = GLib.timeout_add(500, self.commit)
|
2012-04-03 19:38:15 +02:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
def simple_commit(self, sql_to_commit):
|
|
|
|
"""
|
|
|
|
Helper to commit
|
|
|
|
"""
|
|
|
|
self.cur.execute(sql_to_commit)
|
2012-04-03 19:38:15 +02:00
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-10-22 01:11:42 +02:00
|
|
|
def get_jid_ids_from_db(self):
|
|
|
|
"""
|
|
|
|
Load all jid/jid_id tuples into a dict for faster access
|
|
|
|
"""
|
|
|
|
rows = self.con.execute(
|
|
|
|
'SELECT jid_id, jid, type FROM jids').fetchall()
|
|
|
|
for row in rows:
|
|
|
|
self._jid_ids[row.jid] = row
|
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
def get_jids_in_db(self):
|
2017-10-22 19:00:39 +02:00
|
|
|
return self._jid_ids.keys()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def jid_is_from_pm(self, jid):
|
|
|
|
"""
|
|
|
|
If jid is gajim@conf/nkour it's likely a pm one, how we know gajim@conf
|
|
|
|
is not a normal guy and nkour is not his resource? we ask if gajim@conf
|
|
|
|
is already in jids (with type room jid) this fails if user disables
|
|
|
|
logging for room and only enables for pm (so higly unlikely) and if we
|
|
|
|
fail we do not go chaos (user will see the first pm as if it was message
|
|
|
|
in room's public chat) and after that all okay
|
|
|
|
"""
|
|
|
|
if jid.find('/') > -1:
|
|
|
|
possible_room_jid = jid.split('/', 1)[0]
|
|
|
|
return self.jid_is_room_jid(possible_room_jid)
|
|
|
|
else:
|
|
|
|
# it's not a full jid, so it's not a pm one
|
|
|
|
return False
|
|
|
|
|
|
|
|
def jid_is_room_jid(self, jid):
|
2014-11-11 15:28:24 +01:00
|
|
|
"""
|
|
|
|
Return True if it's a room jid, False if it's not, None if we don't know
|
|
|
|
"""
|
|
|
|
self.cur.execute('SELECT type FROM jids WHERE jid=?', (jid,))
|
2010-02-08 15:08:40 +01:00
|
|
|
row = self.cur.fetchone()
|
|
|
|
if row is None:
|
2014-11-11 15:28:24 +01:00
|
|
|
return None
|
2010-02-08 15:08:40 +01:00
|
|
|
else:
|
2017-07-12 17:56:28 +02:00
|
|
|
if row.type == JIDConstant.ROOM_TYPE:
|
2014-11-11 15:28:24 +01:00
|
|
|
return True
|
|
|
|
return False
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 03:54:13 +02:00
|
|
|
@staticmethod
|
|
|
|
def _get_family_jids(account, jid):
|
|
|
|
"""
|
|
|
|
Get all jids of the metacontacts family
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The JID
|
|
|
|
|
|
|
|
returns a list of JIDs'
|
|
|
|
"""
|
2017-08-13 13:18:56 +02:00
|
|
|
family = app.contacts.get_metacontacts_family(account, jid)
|
2017-07-15 03:54:13 +02:00
|
|
|
if family:
|
|
|
|
return [user['jid'] for user in family]
|
|
|
|
return [jid]
|
|
|
|
|
2017-11-17 21:42:44 +01:00
|
|
|
def get_account_id(self, account):
|
|
|
|
jid = app.get_jid_from_account(account)
|
|
|
|
return self.get_jid_id(jid, type_=JIDConstant.NORMAL_TYPE)
|
|
|
|
|
2017-10-22 19:11:27 +02:00
|
|
|
def get_jid_id(self, jid, kind=None, type_=None):
|
2017-10-22 01:11:42 +02:00
|
|
|
"""
|
|
|
|
Get the jid id from a jid.
|
|
|
|
In case the jid id is not found create a new one.
|
|
|
|
|
|
|
|
:param jid: The JID
|
|
|
|
|
2017-10-29 00:34:43 +02:00
|
|
|
:param kind: The KindConstant
|
|
|
|
|
|
|
|
:param type_: The JIDConstant
|
2017-10-22 01:11:42 +02:00
|
|
|
|
|
|
|
return the jid id
|
|
|
|
"""
|
|
|
|
|
2017-10-22 19:11:27 +02:00
|
|
|
if kind in (KindConstant.GC_MSG, KindConstant.GCSTATUS):
|
|
|
|
type_ = JIDConstant.ROOM_TYPE
|
2017-10-29 00:34:43 +02:00
|
|
|
elif kind is not None:
|
|
|
|
type_ = JIDConstant.NORMAL_TYPE
|
2017-10-22 19:11:27 +02:00
|
|
|
|
2017-10-22 01:11:42 +02:00
|
|
|
result = self._jid_ids.get(jid, None)
|
|
|
|
if result is not None:
|
|
|
|
return result.jid_id
|
|
|
|
|
|
|
|
sql = 'SELECT jid_id, jid, type FROM jids WHERE jid = ?'
|
|
|
|
row = self.con.execute(sql, [jid]).fetchone()
|
|
|
|
if row is not None:
|
|
|
|
self._jid_ids[jid] = row
|
|
|
|
return row.jid_id
|
|
|
|
|
|
|
|
if type_ is None:
|
|
|
|
raise ValueError(
|
|
|
|
'Unable to insert new JID because type is missing')
|
|
|
|
|
|
|
|
sql = 'INSERT INTO jids (jid, type) VALUES (?, ?)'
|
|
|
|
lastrowid = self.con.execute(sql, (jid, type_)).lastrowid
|
|
|
|
Row = namedtuple('Row', 'jid_id jid type')
|
|
|
|
self._jid_ids[jid] = Row(lastrowid, jid, type_)
|
|
|
|
self._timeout_commit()
|
|
|
|
return lastrowid
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-08-08 14:41:55 +02:00
|
|
|
def convert_kind_values_to_db_api_values(self, kind):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Convert from string style to constant ints for db
|
|
|
|
"""
|
|
|
|
if kind == 'status':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.STATUS
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'gcstatus':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.GCSTATUS
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'gc_msg':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.GC_MSG
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'single_msg_recv':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.SINGLE_MSG_RECV
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'single_msg_sent':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.SINGLE_MSG_SENT
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'chat_msg_recv':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.CHAT_MSG_RECV
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'chat_msg_sent':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.CHAT_MSG_SENT
|
2010-02-08 15:08:40 +01:00
|
|
|
elif kind == 'error':
|
2017-02-07 21:18:41 +01:00
|
|
|
kind_col = KindConstant.ERROR
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-08-08 14:41:55 +02:00
|
|
|
return kind_col
|
|
|
|
|
|
|
|
def convert_show_values_to_db_api_values(self, show):
|
|
|
|
"""
|
|
|
|
Convert from string style to constant ints for db
|
|
|
|
"""
|
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
if show == 'online':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.ONLINE
|
2010-02-08 15:08:40 +01:00
|
|
|
elif show == 'chat':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.CHAT
|
2010-02-08 15:08:40 +01:00
|
|
|
elif show == 'away':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.AWAY
|
2010-02-08 15:08:40 +01:00
|
|
|
elif show == 'xa':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.XA
|
2010-02-08 15:08:40 +01:00
|
|
|
elif show == 'dnd':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.DND
|
2010-02-08 15:08:40 +01:00
|
|
|
elif show == 'offline':
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.OFFLINE
|
2010-02-08 15:08:40 +01:00
|
|
|
elif show is None:
|
2017-08-08 14:41:55 +02:00
|
|
|
return ShowConstant.ONLINE
|
2010-02-08 15:08:40 +01:00
|
|
|
else: # invisible in GC when someone goes invisible
|
|
|
|
# it's a RFC violation .... but we should not crash
|
2017-08-08 14:41:55 +02:00
|
|
|
return None
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def convert_human_transport_type_to_db_api_values(self, type_):
|
|
|
|
"""
|
|
|
|
Convert from string style to constant ints for db
|
|
|
|
"""
|
|
|
|
if type_ == 'aim':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.AIM
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'gadu-gadu':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.GG
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'http-ws':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.HTTP_WS
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'icq':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.ICQ
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'msn':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.MSN
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'qq':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.QQ
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'sms':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.SMS
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'smtp':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.SMTP
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ in ('tlen', 'x-tlen'):
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.TLEN
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'yahoo':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.YAHOO
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'newmail':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.NEWMAIL
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'rss':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.RSS
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'weather':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.WEATHER
|
2010-02-08 15:08:40 +01:00
|
|
|
if type_ == 'mrim':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.MRIM
|
2013-06-02 21:02:33 +02:00
|
|
|
if type_ == 'jabber':
|
2017-02-07 21:18:41 +01:00
|
|
|
return TypeConstant.NO_TRANSPORT
|
2010-02-08 15:08:40 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
def convert_api_values_to_human_transport_type(self, type_id):
|
|
|
|
"""
|
|
|
|
Convert from constant ints for db to string style
|
|
|
|
"""
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.AIM:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'aim'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.GG:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'gadu-gadu'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.HTTP_WS:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'http-ws'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.ICQ:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'icq'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.MSN:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'msn'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.QQ:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'qq'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.SMS:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'sms'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.SMTP:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'smtp'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.TLEN:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'tlen'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.YAHOO:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'yahoo'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.NEWMAIL:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'newmail'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.RSS:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'rss'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.WEATHER:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'weather'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.MRIM:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'mrim'
|
2017-02-07 21:18:41 +01:00
|
|
|
if type_id == TypeConstant.NO_TRANSPORT:
|
2013-06-02 21:02:33 +02:00
|
|
|
return 'jabber'
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def convert_human_subscription_values_to_db_api_values(self, sub):
|
|
|
|
"""
|
|
|
|
Convert from string style to constant ints for db
|
|
|
|
"""
|
|
|
|
if sub == 'none':
|
2017-02-07 21:18:41 +01:00
|
|
|
return SubscriptionConstant.NONE
|
2010-02-08 15:08:40 +01:00
|
|
|
if sub == 'to':
|
2017-02-07 21:18:41 +01:00
|
|
|
return SubscriptionConstant.TO
|
2010-02-08 15:08:40 +01:00
|
|
|
if sub == 'from':
|
2017-02-07 21:18:41 +01:00
|
|
|
return SubscriptionConstant.FROM
|
2010-02-08 15:08:40 +01:00
|
|
|
if sub == 'both':
|
2017-02-07 21:18:41 +01:00
|
|
|
return SubscriptionConstant.BOTH
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def convert_db_api_values_to_human_subscription_values(self, sub):
|
|
|
|
"""
|
|
|
|
Convert from constant ints for db to string style
|
|
|
|
"""
|
2017-02-07 21:18:41 +01:00
|
|
|
if sub == SubscriptionConstant.NONE:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'none'
|
2017-02-07 21:18:41 +01:00
|
|
|
if sub == SubscriptionConstant.TO:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'to'
|
2017-02-07 21:18:41 +01:00
|
|
|
if sub == SubscriptionConstant.FROM:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'from'
|
2017-02-07 21:18:41 +01:00
|
|
|
if sub == SubscriptionConstant.BOTH:
|
2010-02-08 15:08:40 +01:00
|
|
|
return 'both'
|
|
|
|
|
|
|
|
def insert_unread_events(self, message_id, jid_id):
|
|
|
|
"""
|
|
|
|
Add unread message with id: message_id
|
|
|
|
"""
|
|
|
|
sql = 'INSERT INTO unread_messages VALUES (%d, %d, 0)' % (message_id,
|
|
|
|
jid_id)
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def set_read_messages(self, message_ids):
|
|
|
|
"""
|
|
|
|
Mark all messages with ids in message_ids as read
|
|
|
|
"""
|
|
|
|
ids = ','.join([str(i) for i in message_ids])
|
|
|
|
sql = 'DELETE FROM unread_messages WHERE message_id IN (%s)' % ids
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
2016-02-27 19:52:46 +01:00
|
|
|
def set_shown_unread_msgs(self, msg_log_id):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Mark unread message as shown un GUI
|
|
|
|
"""
|
|
|
|
sql = 'UPDATE unread_messages SET shown = 1 where message_id = %s' % \
|
2016-02-27 19:52:46 +01:00
|
|
|
msg_log_id
|
2010-02-08 15:08:40 +01:00
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def reset_shown_unread_messages(self):
|
|
|
|
"""
|
|
|
|
Set shown field to False in unread_messages table
|
|
|
|
"""
|
|
|
|
sql = 'UPDATE unread_messages SET shown = 0'
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def get_unread_msgs(self):
|
|
|
|
"""
|
|
|
|
Get all unread messages
|
|
|
|
"""
|
|
|
|
all_messages = []
|
|
|
|
try:
|
|
|
|
self.cur.execute(
|
|
|
|
'SELECT message_id, shown from unread_messages')
|
2016-09-05 03:25:37 +02:00
|
|
|
unread_results = self.cur.fetchall()
|
2010-02-08 15:08:40 +01:00
|
|
|
except Exception:
|
2016-09-05 03:25:37 +02:00
|
|
|
unread_results = []
|
|
|
|
for message in unread_results:
|
2017-07-12 17:56:28 +02:00
|
|
|
msg_log_id = message.message_id
|
|
|
|
shown = message.shown
|
2010-02-08 15:08:40 +01:00
|
|
|
# here we get infos for that message, and related jid from jids table
|
|
|
|
# do NOT change order of SELECTed things, unless you change function(s)
|
|
|
|
# that called this function
|
|
|
|
self.cur.execute('''
|
|
|
|
SELECT logs.log_line_id, logs.message, logs.time, logs.subject,
|
2016-09-05 03:25:37 +02:00
|
|
|
jids.jid, logs.additional_data
|
2010-02-08 15:08:40 +01:00
|
|
|
FROM logs, jids
|
|
|
|
WHERE logs.log_line_id = %d AND logs.jid_id = jids.jid_id
|
2016-02-27 19:52:46 +01:00
|
|
|
''' % msg_log_id
|
2010-02-08 15:08:40 +01:00
|
|
|
)
|
2017-07-12 17:56:28 +02:00
|
|
|
results = self.cur.fetchone()
|
2018-03-24 17:13:15 +01:00
|
|
|
if results is None:
|
2010-02-08 15:08:40 +01:00
|
|
|
# Log line is no more in logs table. remove it from unread_messages
|
2016-02-27 19:52:46 +01:00
|
|
|
self.set_read_messages([msg_log_id])
|
2010-02-08 15:08:40 +01:00
|
|
|
continue
|
2017-07-12 17:56:28 +02:00
|
|
|
|
|
|
|
all_messages.append((results, shown))
|
2010-02-08 15:08:40 +01:00
|
|
|
return all_messages
|
|
|
|
|
2017-07-15 03:54:13 +02:00
|
|
|
def get_last_conversation_lines(self, account, jid, pending):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-15 03:54:13 +02:00
|
|
|
Get recent messages
|
|
|
|
|
|
|
|
Pending messages are already in queue to be printed when the
|
|
|
|
ChatControl is opened, so we dont want to request those messages.
|
|
|
|
How many messages are requested depends on the 'restore_lines'
|
|
|
|
config value. How far back in time messages are requested depends on
|
|
|
|
_get_timeout().
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid from which we request the conversation lines
|
|
|
|
|
|
|
|
:param pending: How many messages are currently pending so we dont
|
|
|
|
request those messages
|
|
|
|
|
|
|
|
returns a list of namedtuples
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
|
2017-08-13 13:18:56 +02:00
|
|
|
restore = app.config.get('restore_lines')
|
2017-07-15 03:54:13 +02:00
|
|
|
if restore <= 0:
|
2016-12-18 23:08:54 +01:00
|
|
|
return []
|
|
|
|
|
2017-07-15 03:54:13 +02:00
|
|
|
kinds = map(str, [KindConstant.SINGLE_MSG_RECV,
|
|
|
|
KindConstant.SINGLE_MSG_SENT,
|
|
|
|
KindConstant.CHAT_MSG_RECV,
|
|
|
|
KindConstant.CHAT_MSG_SENT,
|
|
|
|
KindConstant.ERROR])
|
|
|
|
|
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
sql = '''
|
|
|
|
SELECT time, kind, message, subject, additional_data
|
|
|
|
FROM logs NATURAL JOIN jids WHERE jid IN ({jids}) AND
|
|
|
|
kind IN ({kinds}) AND time > get_timeout()
|
|
|
|
ORDER BY time DESC, log_line_id DESC LIMIT ? OFFSET ?
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)),
|
|
|
|
kinds=', '.join(kinds))
|
2016-12-18 23:08:54 +01:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
try:
|
2017-07-15 03:54:13 +02:00
|
|
|
messages = self.con.execute(
|
2017-08-02 18:56:25 +02:00
|
|
|
sql, tuple(jids) + (restore, pending)).fetchall()
|
2010-02-08 15:08:40 +01:00
|
|
|
except sqlite.DatabaseError:
|
2017-04-08 22:53:50 +02:00
|
|
|
self.dispatch('DB_ERROR',
|
|
|
|
exceptions.DatabaseMalformed(LOG_DB_PATH))
|
|
|
|
return []
|
2017-03-24 13:48:11 +01:00
|
|
|
|
|
|
|
messages.reverse()
|
|
|
|
return messages
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def get_unix_time_from_date(self, year, month, day):
|
|
|
|
# year (fe 2005), month (fe 11), day (fe 25)
|
|
|
|
# returns time in seconds for the second that starts that date since epoch
|
|
|
|
# gimme unixtime from year month day:
|
|
|
|
d = datetime.date(year, month, day)
|
2017-03-06 20:38:29 +01:00
|
|
|
local_time = d.timetuple() # time tuple (compat with time.localtime())
|
2010-02-08 15:08:40 +01:00
|
|
|
# we have time since epoch baby :)
|
|
|
|
start_of_day = int(time.mktime(local_time))
|
|
|
|
return start_of_day
|
|
|
|
|
2017-07-15 13:41:17 +02:00
|
|
|
def get_conversation_for_date(self, account, jid, date):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-03-30 21:24:31 +02:00
|
|
|
Load the complete conversation with a given jid on a specific date
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 13:41:17 +02:00
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the conversation
|
|
|
|
|
|
|
|
:param date: datetime.datetime instance
|
|
|
|
example: datetime.datetime(year, month, day)
|
2017-03-30 21:24:31 +02:00
|
|
|
|
2017-07-15 13:41:17 +02:00
|
|
|
returns a list of namedtuples
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
|
2017-07-15 13:41:17 +02:00
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
delta = datetime.timedelta(
|
|
|
|
hours=23, minutes=59, seconds=59, microseconds=999999)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 13:41:17 +02:00
|
|
|
sql = '''
|
2017-03-02 23:55:18 +01:00
|
|
|
SELECT contact_name, time, kind, show, message, subject,
|
|
|
|
additional_data, log_line_id
|
2017-07-15 13:41:17 +02:00
|
|
|
FROM logs NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND time BETWEEN ? AND ?
|
|
|
|
ORDER BY time, log_line_id
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)))
|
|
|
|
|
2017-08-02 18:56:25 +02:00
|
|
|
return self.con.execute(sql, tuple(jids) +
|
|
|
|
(date.timestamp(),
|
2017-07-15 13:41:17 +02:00
|
|
|
(date + delta).timestamp())).fetchall()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
def search_log(self, account, jid, query, date=None):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-03-23 22:56:54 +01:00
|
|
|
Search the conversation log for messages containing the `query` string.
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
The search can either span the complete log for the given
|
|
|
|
`account` and `jid` or be restriced to a single day by
|
|
|
|
specifying `date`.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the conversation
|
|
|
|
|
|
|
|
:param query: A search string
|
2017-03-23 22:56:54 +01:00
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
:param date: datetime.datetime instance
|
|
|
|
example: datetime.datetime(year, month, day)
|
|
|
|
|
|
|
|
returns a list of namedtuples
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-15 23:06:49 +02:00
|
|
|
jids = self._get_family_jids(account, jid)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
if date:
|
|
|
|
delta = datetime.timedelta(
|
|
|
|
hours=23, minutes=59, seconds=59, microseconds=999999)
|
|
|
|
|
|
|
|
between = '''
|
|
|
|
AND time BETWEEN {start} AND {end}
|
|
|
|
'''.format(start=date.timestamp(),
|
|
|
|
end=(date + delta).timestamp())
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-15 23:06:49 +02:00
|
|
|
sql = '''
|
|
|
|
SELECT contact_name, time, kind, show, message, subject,
|
|
|
|
additional_data, log_line_id
|
|
|
|
FROM logs NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND message LIKE like(?) {date_search}
|
|
|
|
ORDER BY time, log_line_id
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)),
|
|
|
|
date_search=between if date else '')
|
|
|
|
|
2017-08-02 18:56:25 +02:00
|
|
|
return self.con.execute(sql, tuple(jids) + (query,)).fetchall()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-16 01:14:42 +02:00
|
|
|
def get_days_with_logs(self, account, jid, year, month):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 01:14:42 +02:00
|
|
|
Request the days in a month where we received messages
|
|
|
|
for a given `jid`.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the days
|
|
|
|
|
|
|
|
:param year: The year
|
|
|
|
|
|
|
|
:param month: The month
|
|
|
|
|
|
|
|
returns a list of namedtuples
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 01:14:42 +02:00
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
kinds = map(str, [KindConstant.STATUS,
|
|
|
|
KindConstant.GCSTATUS])
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-16 01:14:42 +02:00
|
|
|
# Calculate the start and end datetime of the month
|
|
|
|
date = datetime.datetime(year, month, 1)
|
|
|
|
days = calendar.monthrange(year, month)[1] - 1
|
|
|
|
delta = datetime.timedelta(
|
|
|
|
days=days, hours=23, minutes=59, seconds=59, microseconds=999999)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-16 01:14:42 +02:00
|
|
|
sql = """
|
|
|
|
SELECT DISTINCT
|
|
|
|
CAST(strftime('%d', time, 'unixepoch', 'localtime') AS INTEGER)
|
|
|
|
AS day FROM logs NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND time BETWEEN ? AND ?
|
|
|
|
AND kind NOT IN ({kinds})
|
|
|
|
ORDER BY time
|
|
|
|
""".format(jids=', '.join('?' * len(jids)),
|
|
|
|
kinds=', '.join(kinds))
|
|
|
|
|
2017-08-02 18:56:25 +02:00
|
|
|
return self.con.execute(sql, tuple(jids) +
|
|
|
|
(date.timestamp(),
|
2017-07-16 01:14:42 +02:00
|
|
|
(date + delta).timestamp())).fetchall()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-16 17:18:27 +02:00
|
|
|
def get_last_date_that_has_logs(self, account, jid):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 17:18:27 +02:00
|
|
|
Get the timestamp of the last message we received for the jid.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the last timestamp
|
|
|
|
|
|
|
|
returns a timestamp or None
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 17:18:27 +02:00
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
kinds = map(str, [KindConstant.STATUS,
|
|
|
|
KindConstant.GCSTATUS])
|
|
|
|
|
|
|
|
sql = '''
|
2017-07-12 17:56:28 +02:00
|
|
|
SELECT MAX(time) as time FROM logs
|
2017-07-16 17:18:27 +02:00
|
|
|
NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND kind NOT IN ({kinds})
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)),
|
|
|
|
kinds=', '.join(kinds))
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-07-16 17:18:27 +02:00
|
|
|
# fetchone() returns always at least one Row with all
|
|
|
|
# attributes set to None because of the MAX() function
|
2017-08-02 18:56:25 +02:00
|
|
|
return self.con.execute(sql, tuple(jids)).fetchone().time
|
2017-07-16 17:18:27 +02:00
|
|
|
|
2018-01-29 14:00:15 +01:00
|
|
|
def get_first_date_that_has_logs(self, account, jid):
|
|
|
|
"""
|
|
|
|
Get the timestamp of the first message we received for the jid.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the first timestamp
|
|
|
|
|
|
|
|
returns a timestamp or None
|
|
|
|
"""
|
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
kinds = map(str, [KindConstant.STATUS,
|
|
|
|
KindConstant.GCSTATUS])
|
|
|
|
|
|
|
|
sql = '''
|
|
|
|
SELECT MIN(time) as time FROM logs
|
|
|
|
NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND kind NOT IN ({kinds})
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)),
|
|
|
|
kinds=', '.join(kinds))
|
|
|
|
|
|
|
|
# fetchone() returns always at least one Row with all
|
|
|
|
# attributes set to None because of the MIN() function
|
|
|
|
return self.con.execute(sql, tuple(jids)).fetchone().time
|
|
|
|
|
|
|
|
def get_date_has_logs(self, account, jid, date):
|
|
|
|
"""
|
|
|
|
Get single timestamp of a message we received for the jid
|
|
|
|
in the time range of one day.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the first timestamp
|
|
|
|
|
|
|
|
:param date: datetime.datetime instance
|
|
|
|
example: datetime.datetime(year, month, day)
|
|
|
|
|
|
|
|
returns a timestamp or None
|
|
|
|
"""
|
|
|
|
jids = self._get_family_jids(account, jid)
|
|
|
|
|
|
|
|
kinds = map(str, [KindConstant.STATUS,
|
|
|
|
KindConstant.GCSTATUS])
|
|
|
|
|
|
|
|
delta = datetime.timedelta(
|
|
|
|
hours=23, minutes=59, seconds=59, microseconds=999999)
|
|
|
|
|
|
|
|
sql = '''
|
|
|
|
SELECT time
|
|
|
|
FROM logs NATURAL JOIN jids WHERE jid IN ({jids})
|
|
|
|
AND time BETWEEN ? AND ?
|
|
|
|
'''.format(jids=', '.join('?' * len(jids)))
|
|
|
|
|
|
|
|
return self.con.execute(sql, tuple(jids) +
|
|
|
|
(date.timestamp(),
|
|
|
|
(date + delta).timestamp())).fetchone()
|
|
|
|
|
2017-07-16 17:18:27 +02:00
|
|
|
def get_room_last_message_time(self, account, jid):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 17:18:27 +02:00
|
|
|
Get the timestamp of the last message we received in a room.
|
|
|
|
|
|
|
|
:param account: The account
|
|
|
|
|
|
|
|
:param jid: The jid for which we request the last timestamp
|
|
|
|
|
|
|
|
returns a timestamp or None
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-07-16 17:18:27 +02:00
|
|
|
sql = '''
|
|
|
|
SELECT time FROM rooms_last_message_time
|
|
|
|
NATURAL JOIN jids WHERE jid = ?
|
|
|
|
'''
|
|
|
|
|
|
|
|
row = self.con.execute(sql, (jid,)).fetchone()
|
|
|
|
if not row:
|
|
|
|
return self.get_last_date_that_has_logs(account, jid)
|
|
|
|
return row.time
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-08-08 17:41:38 +02:00
|
|
|
def set_room_last_message_time(self, jid, timestamp):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-08-08 17:41:38 +02:00
|
|
|
Set the timestamp of the last message we received in a room.
|
|
|
|
|
|
|
|
:param jid: The jid
|
|
|
|
|
|
|
|
:param timestamp: The timestamp in epoch
|
2017-10-22 00:39:56 +02:00
|
|
|
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2017-08-08 17:41:38 +02:00
|
|
|
|
2017-10-22 00:39:56 +02:00
|
|
|
jid_id = self.get_jid_id(jid, type_=JIDConstant.ROOM_TYPE)
|
2017-08-08 17:41:38 +02:00
|
|
|
sql = '''REPLACE INTO rooms_last_message_time
|
2017-10-22 00:39:56 +02:00
|
|
|
VALUES (:jid_id, COALESCE(
|
|
|
|
(SELECT time FROM rooms_last_message_time
|
|
|
|
WHERE jid_id = :jid_id AND time >= :time), :time))'''
|
2017-08-08 17:41:38 +02:00
|
|
|
|
2017-10-22 00:39:56 +02:00
|
|
|
self.con.execute(sql, {"jid_id": jid_id, "time": timestamp})
|
2017-08-08 17:41:38 +02:00
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def save_transport_type(self, jid, type_):
|
|
|
|
"""
|
|
|
|
Save the type of the transport in DB
|
|
|
|
"""
|
|
|
|
type_id = self.convert_human_transport_type_to_db_api_values(type_)
|
|
|
|
if not type_id:
|
|
|
|
# unknown type
|
|
|
|
return
|
|
|
|
self.cur.execute(
|
|
|
|
'SELECT type from transports_cache WHERE transport = "%s"' % jid)
|
2017-07-12 17:56:28 +02:00
|
|
|
results = self.cur.fetchone()
|
2010-02-08 15:08:40 +01:00
|
|
|
if results:
|
2017-07-12 17:56:28 +02:00
|
|
|
if results.type == type_id:
|
2010-02-08 15:08:40 +01:00
|
|
|
return
|
|
|
|
sql = 'UPDATE transports_cache SET type = %d WHERE transport = "%s"' %\
|
|
|
|
(type_id, jid)
|
|
|
|
self.simple_commit(sql)
|
|
|
|
return
|
|
|
|
sql = 'INSERT INTO transports_cache VALUES ("%s", %d)' % (jid, type_id)
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def get_transports_type(self):
|
|
|
|
"""
|
|
|
|
Return all the type of the transports in DB
|
|
|
|
"""
|
|
|
|
self.cur.execute(
|
|
|
|
'SELECT * from transports_cache')
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
if not results:
|
|
|
|
return {}
|
|
|
|
answer = {}
|
|
|
|
for result in results:
|
2017-07-12 17:56:28 +02:00
|
|
|
answer[result.transport] = self.convert_api_values_to_human_transport_type(
|
|
|
|
result.type)
|
2010-02-08 15:08:40 +01:00
|
|
|
return answer
|
|
|
|
|
|
|
|
# A longer note here:
|
|
|
|
# The database contains a blob field. Pysqlite seems to need special care for
|
|
|
|
# such fields.
|
|
|
|
# When storing, we need to convert string into buffer object (1).
|
|
|
|
# When retrieving, we need to convert it back to a string to decompress it.
|
|
|
|
# (2)
|
|
|
|
# GzipFile needs a file-like object, StringIO emulates file for plain strings
|
|
|
|
def iter_caps_data(self):
|
|
|
|
"""
|
|
|
|
Iterate over caps cache data stored in the database
|
|
|
|
|
|
|
|
The iterator values are pairs of (node, ver, ext, identities, features):
|
|
|
|
identities == {'category':'foo', 'type':'bar', 'name':'boo'},
|
|
|
|
features being a list of feature namespaces.
|
|
|
|
"""
|
|
|
|
# get data from table
|
|
|
|
# the data field contains binary object (gzipped data), this is a hack
|
|
|
|
# to get that data without trying to convert it to unicode
|
|
|
|
try:
|
|
|
|
self.cur.execute('SELECT hash_method, hash, data FROM caps_cache;')
|
|
|
|
except sqlite.OperationalError:
|
|
|
|
# might happen when there's no caps_cache table yet
|
|
|
|
# -- there's no data to read anyway then
|
|
|
|
return
|
|
|
|
|
|
|
|
# list of corrupted entries that will be removed
|
|
|
|
to_be_removed = []
|
2017-07-12 17:56:28 +02:00
|
|
|
for row in self.cur:
|
2010-02-08 15:08:40 +01:00
|
|
|
# for each row: unpack the data field
|
|
|
|
# (format: (category, type, name, category, type, name, ...
|
|
|
|
# ..., 'FEAT', feature1, feature2, ...).join(' '))
|
|
|
|
# NOTE: if there's a need to do more gzip, put that to a function
|
|
|
|
try:
|
2017-07-12 17:56:28 +02:00
|
|
|
data = GzipFile(fileobj=BytesIO(row.data)).read().decode('utf-8').split('\0')
|
2010-02-08 15:08:40 +01:00
|
|
|
except IOError:
|
|
|
|
# This data is corrupted. It probably contains non-ascii chars
|
2017-07-12 17:56:28 +02:00
|
|
|
to_be_removed.append((row.hash_method, row.hash))
|
2010-02-08 15:08:40 +01:00
|
|
|
continue
|
|
|
|
i = 0
|
|
|
|
identities = list()
|
|
|
|
features = list()
|
|
|
|
while i < (len(data) - 3) and data[i] != 'FEAT':
|
|
|
|
category = data[i]
|
|
|
|
type_ = data[i + 1]
|
|
|
|
lang = data[i + 2]
|
|
|
|
name = data[i + 3]
|
|
|
|
identities.append({'category': category, 'type': type_,
|
|
|
|
'xml:lang': lang, 'name': name})
|
|
|
|
i += 4
|
|
|
|
i+=1
|
|
|
|
while i < len(data):
|
|
|
|
features.append(data[i])
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
# yield the row
|
2017-07-12 17:56:28 +02:00
|
|
|
yield row.hash_method, row.hash, identities, features
|
2010-02-08 15:08:40 +01:00
|
|
|
for hash_method, hash_ in to_be_removed:
|
|
|
|
sql = '''DELETE FROM caps_cache WHERE hash_method = "%s" AND
|
|
|
|
hash = "%s"''' % (hash_method, hash_)
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def add_caps_entry(self, hash_method, hash_, identities, features):
|
|
|
|
data = []
|
|
|
|
for identity in identities:
|
|
|
|
# there is no FEAT category
|
|
|
|
if identity['category'] == 'FEAT':
|
|
|
|
return
|
|
|
|
data.extend((identity.get('category'), identity.get('type', ''),
|
|
|
|
identity.get('xml:lang', ''), identity.get('name', '')))
|
|
|
|
data.append('FEAT')
|
|
|
|
data.extend(features)
|
|
|
|
data = '\0'.join(data)
|
|
|
|
# if there's a need to do more gzip, put that to a function
|
2013-01-08 10:17:09 +01:00
|
|
|
string = BytesIO()
|
2010-02-08 15:08:40 +01:00
|
|
|
gzip = GzipFile(fileobj=string, mode='w')
|
2013-01-11 10:34:49 +01:00
|
|
|
gzip.write(data.encode('utf-8'))
|
2010-02-08 15:08:40 +01:00
|
|
|
gzip.close()
|
|
|
|
data = string.getvalue()
|
|
|
|
self.cur.execute('''
|
|
|
|
INSERT INTO caps_cache ( hash_method, hash, data, last_seen )
|
|
|
|
VALUES (?, ?, ?, ?);
|
2013-01-11 10:34:49 +01:00
|
|
|
''', (hash_method, hash_, memoryview(data), int(time.time())))
|
2010-02-08 15:08:40 +01:00
|
|
|
# (1) -- note above
|
2012-04-03 19:38:15 +02:00
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def update_caps_time(self, method, hash_):
|
|
|
|
sql = '''UPDATE caps_cache SET last_seen = %d
|
|
|
|
WHERE hash_method = "%s" and hash = "%s"''' % \
|
|
|
|
(int(time.time()), method, hash_)
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def clean_caps_table(self):
|
|
|
|
"""
|
|
|
|
Remove caps which was not seen for 3 months
|
|
|
|
"""
|
|
|
|
sql = '''DELETE FROM caps_cache WHERE last_seen < %d''' % \
|
|
|
|
int(time.time() - 3*30*24*3600)
|
|
|
|
self.simple_commit(sql)
|
|
|
|
|
|
|
|
def replace_roster(self, account_name, roster_version, roster):
|
|
|
|
"""
|
|
|
|
Replace current roster in DB by a new one
|
|
|
|
|
|
|
|
accout_name is the name of the account to change.
|
|
|
|
roster_version is the version of the new roster.
|
|
|
|
roster is the new version.
|
|
|
|
"""
|
|
|
|
# First we must reset roster_version value to ensure that the server
|
|
|
|
# sends back all the roster at the next connexion if the replacement
|
|
|
|
# didn't work properly.
|
2017-08-13 13:18:56 +02:00
|
|
|
app.config.set_per('accounts', account_name, 'roster_version', '')
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2017-08-13 13:18:56 +02:00
|
|
|
account_jid = app.get_jid_from_account(account_name)
|
2017-11-02 15:59:45 +01:00
|
|
|
# Execute get_jid_id() because this ensures on new accounts that the
|
|
|
|
# jid_id will be created
|
|
|
|
self.get_jid_id(account_jid, type_=JIDConstant.NORMAL_TYPE)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
# Delete old roster
|
|
|
|
self.remove_roster(account_jid)
|
|
|
|
|
|
|
|
# Fill roster tables with the new roster
|
|
|
|
for jid in roster:
|
|
|
|
self.add_or_update_contact(account_jid, jid, roster[jid]['name'],
|
2012-04-03 19:38:15 +02:00
|
|
|
roster[jid]['subscription'], roster[jid]['ask'],
|
|
|
|
roster[jid]['groups'], commit=False)
|
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
# At this point, we are sure the replacement works properly so we can
|
|
|
|
# set the new roster_version value.
|
2017-08-13 13:18:56 +02:00
|
|
|
app.config.set_per('accounts', account_name, 'roster_version',
|
2010-07-22 20:56:50 +02:00
|
|
|
roster_version)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def del_contact(self, account_jid, jid):
|
|
|
|
"""
|
|
|
|
Remove jid from account_jid roster
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
account_jid_id = self.get_jid_id(account_jid)
|
|
|
|
jid_id = self.get_jid_id(jid)
|
2013-01-01 23:18:36 +01:00
|
|
|
except exceptions.PysqliteOperationalError as e:
|
2010-02-08 15:08:40 +01:00
|
|
|
raise exceptions.PysqliteOperationalError(str(e))
|
|
|
|
self.cur.execute(
|
|
|
|
'DELETE FROM roster_group WHERE account_jid_id=? AND jid_id=?',
|
|
|
|
(account_jid_id, jid_id))
|
|
|
|
self.cur.execute(
|
|
|
|
'DELETE FROM roster_entry WHERE account_jid_id=? AND jid_id=?',
|
|
|
|
(account_jid_id, jid_id))
|
2012-04-03 19:38:15 +02:00
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
2010-07-22 20:56:50 +02:00
|
|
|
def add_or_update_contact(self, account_jid, jid, name, sub, ask, groups,
|
|
|
|
commit=True):
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
|
|
|
Add or update a contact from account_jid roster
|
|
|
|
"""
|
|
|
|
if sub == 'remove':
|
|
|
|
self.del_contact(account_jid, jid)
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
account_jid_id = self.get_jid_id(account_jid)
|
2017-10-22 01:11:42 +02:00
|
|
|
jid_id = self.get_jid_id(jid, type_=JIDConstant.NORMAL_TYPE)
|
2013-01-01 23:18:36 +01:00
|
|
|
except exceptions.PysqliteOperationalError as e:
|
2010-02-08 15:08:40 +01:00
|
|
|
raise exceptions.PysqliteOperationalError(str(e))
|
|
|
|
|
|
|
|
# Update groups information
|
|
|
|
# First we delete all previous groups information
|
|
|
|
self.cur.execute(
|
|
|
|
'DELETE FROM roster_group WHERE account_jid_id=? AND jid_id=?',
|
|
|
|
(account_jid_id, jid_id))
|
|
|
|
# Then we add all new groups information
|
|
|
|
for group in groups:
|
|
|
|
self.cur.execute('INSERT INTO roster_group VALUES(?, ?, ?)',
|
|
|
|
(account_jid_id, jid_id, group))
|
|
|
|
|
|
|
|
if name is None:
|
|
|
|
name = ''
|
|
|
|
|
2017-09-16 17:29:34 +02:00
|
|
|
self.cur.execute('''
|
|
|
|
REPLACE INTO roster_entry
|
|
|
|
(account_jid_id, jid_id, name, subscription, ask)
|
|
|
|
VALUES(?, ?, ?, ?, ?)''', (
|
|
|
|
account_jid_id, jid_id, name,
|
2010-02-08 15:08:40 +01:00
|
|
|
self.convert_human_subscription_values_to_db_api_values(sub),
|
|
|
|
bool(ask)))
|
2010-07-22 20:56:50 +02:00
|
|
|
if commit:
|
2012-04-03 19:38:15 +02:00
|
|
|
self._timeout_commit()
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
def get_roster(self, account_jid):
|
|
|
|
"""
|
|
|
|
Return the accound_jid roster in NonBlockingRoster format
|
|
|
|
"""
|
|
|
|
data = {}
|
2017-10-22 01:11:42 +02:00
|
|
|
account_jid_id = self.get_jid_id(account_jid, type_=JIDConstant.NORMAL_TYPE)
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
# First we fill data with roster_entry informations
|
|
|
|
self.cur.execute('''
|
2017-09-16 11:49:31 +02:00
|
|
|
SELECT j.jid, re.jid_id, re.name, re.subscription, re.ask, re.avatar_sha
|
2010-02-08 15:08:40 +01:00
|
|
|
FROM roster_entry re, jids j
|
|
|
|
WHERE re.account_jid_id=? AND j.jid_id=re.jid_id''', (account_jid_id,))
|
2017-07-12 17:56:28 +02:00
|
|
|
for row in self.cur:
|
|
|
|
#jid, jid_id, name, subscription, ask
|
|
|
|
jid = row.jid
|
|
|
|
name = row.name
|
2010-02-08 15:08:40 +01:00
|
|
|
data[jid] = {}
|
2017-09-16 11:49:31 +02:00
|
|
|
data[jid]['avatar_sha'] = row.avatar_sha
|
2010-02-08 15:08:40 +01:00
|
|
|
if name:
|
|
|
|
data[jid]['name'] = name
|
|
|
|
else:
|
|
|
|
data[jid]['name'] = None
|
|
|
|
data[jid]['subscription'] = \
|
|
|
|
self.convert_db_api_values_to_human_subscription_values(
|
2017-07-12 17:56:28 +02:00
|
|
|
row.subscription)
|
2010-02-08 15:08:40 +01:00
|
|
|
data[jid]['groups'] = []
|
|
|
|
data[jid]['resources'] = {}
|
2017-07-12 17:56:28 +02:00
|
|
|
if row.ask:
|
2010-02-08 15:08:40 +01:00
|
|
|
data[jid]['ask'] = 'subscribe'
|
|
|
|
else:
|
|
|
|
data[jid]['ask'] = None
|
2017-07-12 17:56:28 +02:00
|
|
|
data[jid]['id'] = row.jid_id
|
2010-02-08 15:08:40 +01:00
|
|
|
|
|
|
|
# Then we add group for roster entries
|
|
|
|
for jid in data:
|
|
|
|
self.cur.execute('''
|
|
|
|
SELECT group_name FROM roster_group
|
|
|
|
WHERE account_jid_id=? AND jid_id=?''',
|
|
|
|
(account_jid_id, data[jid]['id']))
|
2017-07-12 17:56:28 +02:00
|
|
|
for row in self.cur:
|
|
|
|
group_name = row.group_name
|
2010-02-08 15:08:40 +01:00
|
|
|
data[jid]['groups'].append(group_name)
|
|
|
|
del data[jid]['id']
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
def remove_roster(self, account_jid):
|
|
|
|
"""
|
2017-08-08 18:34:36 +02:00
|
|
|
Remove the roster of an account
|
|
|
|
|
|
|
|
:param account_jid: The jid of the account
|
2010-02-08 15:08:40 +01:00
|
|
|
"""
|
2018-03-17 00:20:11 +01:00
|
|
|
try:
|
|
|
|
jid_id = self.get_jid_id(account_jid)
|
|
|
|
except ValueError:
|
|
|
|
# This happens if the JID never made it to the Database
|
|
|
|
# because the account was never connected
|
|
|
|
return
|
2017-08-08 18:34:36 +02:00
|
|
|
|
|
|
|
sql = '''
|
|
|
|
DELETE FROM roster_entry WHERE account_jid_id = {jid_id};
|
|
|
|
DELETE FROM roster_group WHERE account_jid_id = {jid_id};
|
|
|
|
'''.format(jid_id=jid_id)
|
|
|
|
|
|
|
|
self.con.executescript(sql)
|
2012-04-03 19:38:15 +02:00
|
|
|
self._timeout_commit()
|
2010-02-10 17:59:17 +01:00
|
|
|
|
2018-01-25 00:46:32 +01:00
|
|
|
def search_for_duplicate(self, account, jid, timestamp, msg):
|
2017-07-31 15:43:30 +02:00
|
|
|
"""
|
|
|
|
Check if a message is already in the `logs` table
|
|
|
|
|
2018-01-25 00:46:32 +01:00
|
|
|
:param account: The account
|
|
|
|
|
2017-07-31 15:43:30 +02:00
|
|
|
:param jid: The jid as string
|
|
|
|
|
|
|
|
:param timestamp: The timestamp in UTC epoch
|
|
|
|
|
|
|
|
:param msg: The message text
|
|
|
|
"""
|
|
|
|
|
2017-11-01 23:07:11 +01:00
|
|
|
# Add 10 seconds around the timestamp
|
|
|
|
start_time = timestamp - 10
|
|
|
|
end_time = timestamp + 10
|
2017-07-31 15:43:30 +02:00
|
|
|
|
2018-01-25 00:46:32 +01:00
|
|
|
account_id = self.get_account_id(account)
|
2017-07-31 15:43:30 +02:00
|
|
|
log.debug('start: %s, end: %s, jid: %s, message: %s',
|
|
|
|
start_time, end_time, jid, msg)
|
|
|
|
|
|
|
|
sql = '''
|
|
|
|
SELECT * FROM logs
|
2018-01-25 00:46:32 +01:00
|
|
|
NATURAL JOIN jids WHERE jid = ? AND message = ? AND account_id = ?
|
2017-07-31 15:43:30 +02:00
|
|
|
AND time BETWEEN ? AND ?
|
|
|
|
'''
|
|
|
|
|
2018-01-25 00:46:32 +01:00
|
|
|
result = self.con.execute(
|
|
|
|
sql, (jid, msg, account_id, start_time, end_time)).fetchone()
|
2017-07-31 15:43:30 +02:00
|
|
|
|
|
|
|
if result is not None:
|
|
|
|
log.debug('Message already in DB')
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-01-25 00:46:32 +01:00
|
|
|
def find_stanza_id(self, account, archive_jid, stanza_id, origin_id=None,
|
2017-11-18 17:40:09 +01:00
|
|
|
groupchat=False):
|
2017-10-22 23:27:47 +02:00
|
|
|
"""
|
|
|
|
Checks if a stanza-id is already in the `logs` table
|
|
|
|
|
2018-01-25 00:46:32 +01:00
|
|
|
:param account: The account
|
|
|
|
|
2017-11-18 17:40:09 +01:00
|
|
|
:param archive_jid: The jid of the archive the stanza-id belongs to
|
2018-02-21 19:23:26 +01:00
|
|
|
only used if groupchat=True
|
2017-11-18 17:40:09 +01:00
|
|
|
|
2017-10-22 23:27:47 +02:00
|
|
|
:param stanza_id: The stanza-id
|
|
|
|
|
2017-11-18 17:40:09 +01:00
|
|
|
:param origin_id: The origin-id
|
|
|
|
|
|
|
|
:param groupchat: stanza-id is from a groupchat
|
|
|
|
|
2017-10-22 23:27:47 +02:00
|
|
|
return True if the stanza-id was found
|
|
|
|
"""
|
|
|
|
ids = []
|
|
|
|
if stanza_id is not None:
|
|
|
|
ids.append(stanza_id)
|
|
|
|
if origin_id is not None:
|
|
|
|
ids.append(origin_id)
|
|
|
|
|
2017-11-08 22:27:37 +01:00
|
|
|
if not ids:
|
|
|
|
return False
|
|
|
|
|
2017-11-18 17:40:09 +01:00
|
|
|
archive_id = self.get_jid_id(archive_jid)
|
2018-01-25 00:46:32 +01:00
|
|
|
account_id = self.get_account_id(account)
|
|
|
|
|
2017-11-18 17:40:09 +01:00
|
|
|
if groupchat:
|
2018-01-25 00:46:32 +01:00
|
|
|
# Stanza ID is only unique within a specific archive.
|
|
|
|
# So a Stanza ID could be repeated in different MUCs, so we
|
2018-02-21 19:23:26 +01:00
|
|
|
# filter also for the archive JID which is the bare MUC jid.
|
2018-01-25 00:46:32 +01:00
|
|
|
sql = '''
|
|
|
|
SELECT stanza_id FROM logs
|
|
|
|
WHERE stanza_id IN ({values})
|
|
|
|
AND jid_id = ? AND account_id = ? LIMIT 1
|
|
|
|
'''.format(values=', '.join('?' * len(ids)))
|
|
|
|
result = self.con.execute(
|
|
|
|
sql, tuple(ids) + (archive_id, account_id)).fetchone()
|
2017-11-18 17:40:09 +01:00
|
|
|
else:
|
2018-01-25 00:46:32 +01:00
|
|
|
sql = '''
|
|
|
|
SELECT stanza_id FROM logs
|
2018-02-21 19:23:26 +01:00
|
|
|
WHERE stanza_id IN ({values}) AND account_id = ? AND kind != ? LIMIT 1
|
2018-01-25 00:46:32 +01:00
|
|
|
'''.format(values=', '.join('?' * len(ids)))
|
|
|
|
result = self.con.execute(
|
2018-02-21 19:23:26 +01:00
|
|
|
sql, tuple(ids) + (account_id, KindConstant.GC_MSG)).fetchone()
|
2017-10-22 23:27:47 +02:00
|
|
|
|
|
|
|
if result is not None:
|
2018-01-25 00:46:32 +01:00
|
|
|
log.info('Found duplicated message, stanza-id: %s, origin-id: %s, '
|
2018-02-21 22:21:35 +01:00
|
|
|
'archive-jid: %s, account: %s', stanza_id, origin_id, archive_jid, account_id)
|
2017-10-22 23:27:47 +02:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2017-07-31 15:43:30 +02:00
|
|
|
def insert_jid(self, jid, kind=None, type_=JIDConstant.NORMAL_TYPE):
|
|
|
|
"""
|
|
|
|
Insert a new jid into the `jids` table.
|
2017-10-22 19:11:27 +02:00
|
|
|
This is an alias of get_jid_id() for better readablility.
|
2017-07-31 15:43:30 +02:00
|
|
|
|
|
|
|
:param jid: The jid as string
|
|
|
|
|
|
|
|
:param kind: A KindConstant
|
|
|
|
|
|
|
|
:param type_: A JIDConstant
|
|
|
|
"""
|
2017-10-22 19:11:27 +02:00
|
|
|
return self.get_jid_id(jid, kind, type_)
|
2017-07-31 15:43:30 +02:00
|
|
|
|
2017-11-17 21:42:44 +01:00
|
|
|
def insert_into_logs(self, account, jid, time_, kind,
|
|
|
|
unread=True, **kwargs):
|
2017-07-31 15:43:30 +02:00
|
|
|
"""
|
|
|
|
Insert a new message into the `logs` table
|
|
|
|
|
|
|
|
:param jid: The jid as string
|
|
|
|
|
|
|
|
:param time_: The timestamp in UTC epoch
|
|
|
|
|
|
|
|
:param kind: A KindConstant
|
|
|
|
|
|
|
|
:param unread: If True the message is added to the`unread_messages`
|
|
|
|
table. Only if kind == CHAT_MSG_RECV
|
|
|
|
|
|
|
|
:param kwargs: Every additional named argument must correspond to
|
|
|
|
a field in the `logs` table
|
|
|
|
"""
|
2017-10-22 19:11:27 +02:00
|
|
|
jid_id = self.get_jid_id(jid, kind=kind)
|
2017-11-17 21:42:44 +01:00
|
|
|
account_id = self.get_account_id(account)
|
|
|
|
|
2017-11-01 23:14:47 +01:00
|
|
|
if 'additional_data' in kwargs:
|
|
|
|
if not kwargs['additional_data']:
|
|
|
|
del kwargs['additional_data']
|
|
|
|
else:
|
|
|
|
kwargs['additional_data'] = json.dumps(kwargs["additional_data"])
|
2017-11-17 21:42:44 +01:00
|
|
|
|
2017-07-31 15:43:30 +02:00
|
|
|
sql = '''
|
2017-11-17 21:42:44 +01:00
|
|
|
INSERT INTO logs (account_id, jid_id, time, kind, {columns})
|
|
|
|
VALUES (?, ?, ?, ?, {values})
|
2017-07-31 15:43:30 +02:00
|
|
|
'''.format(columns=', '.join(kwargs.keys()),
|
|
|
|
values=', '.join('?' * len(kwargs)))
|
|
|
|
|
2017-11-17 21:42:44 +01:00
|
|
|
lastrowid = self.con.execute(
|
|
|
|
sql, (account_id, jid_id, time_, kind) + tuple(kwargs.values())).lastrowid
|
2017-07-31 15:43:30 +02:00
|
|
|
|
2017-10-22 23:27:47 +02:00
|
|
|
log.info('Insert into DB: jid: %s, time: %s, kind: %s, stanza_id: %s',
|
|
|
|
jid, time_, kind, kwargs.get('stanza_id', None))
|
|
|
|
|
2017-07-31 15:43:30 +02:00
|
|
|
if unread and kind == KindConstant.CHAT_MSG_RECV:
|
|
|
|
sql = '''INSERT INTO unread_messages (message_id, jid_id)
|
|
|
|
VALUES (?, (SELECT jid_id FROM jids WHERE jid = ?))'''
|
|
|
|
self.con.execute(sql, (lastrowid, jid))
|
|
|
|
|
|
|
|
self._timeout_commit()
|
|
|
|
|
|
|
|
return lastrowid
|
2017-09-16 11:49:31 +02:00
|
|
|
|
|
|
|
def set_avatar_sha(self, account_jid, jid, sha=None):
|
|
|
|
"""
|
|
|
|
Set the avatar sha of a jid on an account
|
|
|
|
|
|
|
|
:param account_jid: The jid of the account
|
|
|
|
|
|
|
|
:param jid: The jid that belongs to the avatar
|
|
|
|
|
|
|
|
:param sha: The sha of the avatar
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
account_jid_id = self.get_jid_id(account_jid)
|
2017-10-22 01:11:42 +02:00
|
|
|
jid_id = self.get_jid_id(jid, type_=JIDConstant.NORMAL_TYPE)
|
2017-09-16 11:49:31 +02:00
|
|
|
|
|
|
|
sql = '''
|
|
|
|
UPDATE roster_entry SET avatar_sha = ?
|
|
|
|
WHERE account_jid_id = ? AND jid_id = ?
|
|
|
|
'''
|
|
|
|
self.con.execute(sql, (sha, account_jid_id, jid_id))
|
|
|
|
self._timeout_commit()
|
2017-11-13 20:58:46 +01:00
|
|
|
|
|
|
|
def get_archive_timestamp(self, jid, type_=None):
|
|
|
|
"""
|
|
|
|
Get the last archive id/timestamp for a jid
|
|
|
|
|
|
|
|
:param jid: The jid that belongs to the avatar
|
|
|
|
|
|
|
|
"""
|
|
|
|
jid_id = self.get_jid_id(jid, type_=type_)
|
|
|
|
sql = '''SELECT * FROM last_archive_message WHERE jid_id = ?'''
|
|
|
|
return self.con.execute(sql, (jid_id,)).fetchone()
|
|
|
|
|
|
|
|
def set_archive_timestamp(self, jid, **kwargs):
|
|
|
|
"""
|
|
|
|
Set the last archive id/timestamp
|
|
|
|
|
|
|
|
:param jid: The jid that belongs to the avatar
|
|
|
|
|
|
|
|
:param last_mam_id: The last MAM result id
|
|
|
|
|
|
|
|
:param oldest_mam_timestamp: The oldest date we requested MAM
|
|
|
|
history for
|
|
|
|
|
|
|
|
:param last_muc_timestamp: The timestamp of the last message we
|
|
|
|
received in a MUC
|
|
|
|
|
|
|
|
"""
|
|
|
|
jid_id = self.get_jid_id(jid)
|
|
|
|
exists = self.get_archive_timestamp(jid)
|
|
|
|
if not exists:
|
|
|
|
sql = '''INSERT INTO last_archive_message VALUES (?, ?, ?, ?)'''
|
|
|
|
self.con.execute(sql, (jid_id,
|
|
|
|
kwargs.get('last_mam_id', None),
|
|
|
|
kwargs.get('oldest_mam_timestamp', None),
|
|
|
|
kwargs.get('last_muc_timestamp', None)))
|
|
|
|
else:
|
|
|
|
args = ' = ?, '.join(kwargs.keys()) + ' = ?'
|
|
|
|
sql = '''UPDATE last_archive_message SET {}
|
|
|
|
WHERE jid_id = ?'''.format(args)
|
|
|
|
self.con.execute(sql, tuple(kwargs.values()) + (jid_id,))
|
|
|
|
log.info('Save archive timestamps: %s', kwargs)
|
|
|
|
self._timeout_commit()
|