SAAS-602: Fix getaddrinfo deadlock (again)
Conflicts: python_apps/airtime_analyzer/airtime_analyzer/cloud_storage_uploader.py
This commit is contained in:
parent
8aaf300491
commit
702de0f5f5
1 changed files with 6 additions and 6 deletions
|
@ -5,6 +5,12 @@ import socket
|
||||||
from boto.s3.connection import S3Connection
|
from boto.s3.connection import S3Connection
|
||||||
from boto.s3.key import Key
|
from boto.s3.key import Key
|
||||||
|
|
||||||
|
# Fix for getaddrinfo deadlock. See these issues for details:
|
||||||
|
# https://github.com/gevent/gevent/issues/349
|
||||||
|
# https://github.com/docker/docker-registry/issues/400
|
||||||
|
u'fix getaddrinfo deadlock'.encode('idna')
|
||||||
|
|
||||||
|
CLOUD_CONFIG_PATH = '/etc/airtime-saas/cloud_storage.conf'
|
||||||
STORAGE_BACKEND_FILE = "file"
|
STORAGE_BACKEND_FILE = "file"
|
||||||
SOCKET_TIMEOUT = 240
|
SOCKET_TIMEOUT = 240
|
||||||
|
|
||||||
|
@ -93,12 +99,6 @@ class CloudStorageUploader:
|
||||||
# turning into deadlocks, we explicitly set the global default timeout period here:
|
# turning into deadlocks, we explicitly set the global default timeout period here:
|
||||||
socket.setdefaulttimeout(SOCKET_TIMEOUT)
|
socket.setdefaulttimeout(SOCKET_TIMEOUT)
|
||||||
|
|
||||||
# Crazy workaround for a deadlock inside Python 2.7 where unicode hostname resolution can
|
|
||||||
# cause a deadlock because the import spins up a separate thread:
|
|
||||||
# http://emptysqua.re/blog/weird-green-bug/
|
|
||||||
# https://jira.mongodb.org/browse/PYTHON-607
|
|
||||||
unicode('foo').encode('idna')
|
|
||||||
|
|
||||||
conn = S3Connection(self._api_key, self._api_key_secret, host=self._host)
|
conn = S3Connection(self._api_key, self._api_key_secret, host=self._host)
|
||||||
bucket = conn.get_bucket(self._bucket)
|
bucket = conn.get_bucket(self._bucket)
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue