[tmolitor] ability to download bigger files with higher timeouts. Fixes #8130

This commit is contained in:
Yann Leboulanger 2015-08-20 21:53:59 +02:00
parent a44a71d6a5
commit 55402bdc50

View file

@ -1433,8 +1433,11 @@ def _get_img_direct(attrs):
""" """
mem = b'' mem = b''
alt = '' alt = ''
# Wait maximum 5s for connection max_size = 2*1024*1024
socket.setdefaulttimeout(5) if 'max_size' in attrs:
max_size = attrs['max_size']
# Wait maximum 10s for connection
socket.setdefaulttimeout(10)
try: try:
req = urllib.request.Request(attrs['src']) req = urllib.request.Request(attrs['src'])
req.add_header('User-Agent', 'Gajim ' + gajim.version) req.add_header('User-Agent', 'Gajim ' + gajim.version)
@ -1444,13 +1447,13 @@ def _get_img_direct(attrs):
pixbuf = None pixbuf = None
alt = attrs.get('alt', 'Broken image') alt = attrs.get('alt', 'Broken image')
else: else:
# Wait 0.5s between each byte # Wait 2s between each byte
try: try:
f.fp._sock.fp._sock.settimeout(0.5) f.fp._sock.fp._sock.settimeout(2)
except Exception: except Exception:
pass pass
# Max image size = 2 MB (to try to prevent DoS) # On a slow internet connection with ~1000kbps you need ~10 seconds for 1 MB
deadline = time.time() + 3 deadline = time.time() + (10 * (max_size / 1048576))
while True: while True:
if time.time() > deadline: if time.time() > deadline:
log.debug('Timeout loading image %s ' % attrs['src']) log.debug('Timeout loading image %s ' % attrs['src'])
@ -1473,7 +1476,7 @@ def _get_img_direct(attrs):
mem += temp mem += temp
else: else:
break break
if len(mem) > 2*1024*1024: if len(mem) > max_size:
alt = attrs.get('alt', '') alt = attrs.get('alt', '')
if alt: if alt:
alt += '\n' alt += '\n'
@ -1489,15 +1492,19 @@ def _get_img_proxy(attrs, proxy):
""" """
if not gajim.HAVE_PYCURL: if not gajim.HAVE_PYCURL:
return '', _('PyCURL is not installed') return '', _('PyCURL is not installed')
mem, alt = '', '' mem, alt, max_size = '', '', 2*1024*1024
if 'max_size' in attrs:
max_size = attrs['max_size']
try: try:
b = StringIO() b = StringIO()
c = pycurl.Curl() c = pycurl.Curl()
c.setopt(pycurl.URL, attrs['src'].encode('utf-8')) c.setopt(pycurl.URL, attrs['src'].encode('utf-8'))
c.setopt(pycurl.FOLLOWLOCATION, 1) c.setopt(pycurl.FOLLOWLOCATION, 1)
c.setopt(pycurl.CONNECTTIMEOUT, 5) # Wait maximum 10s for connection
c.setopt(pycurl.TIMEOUT, 10) c.setopt(pycurl.CONNECTTIMEOUT, 10)
c.setopt(pycurl.MAXFILESIZE, 2000000) # On a slow internet connection with ~1000kbps you need ~10 seconds for 1 MB
c.setopt(pycurl.TIMEOUT, 10 * (max_size / 1048576))
c.setopt(pycurl.MAXFILESIZE, max_size)
c.setopt(pycurl.WRITEFUNCTION, b.write) c.setopt(pycurl.WRITEFUNCTION, b.write)
c.setopt(pycurl.USERAGENT, 'Gajim ' + gajim.version) c.setopt(pycurl.USERAGENT, 'Gajim ' + gajim.version)
# set proxy # set proxy