[Author Prev][Author Next][Thread Prev][Thread Next][Author Index][Thread Index]
[tor-commits] [ooni-probe/master] Share code from ooni-backend via the ooni.common module
commit 3aa17317012aa0c85cff77d6942249f0bd68f8e6
Author: Arturo Filastò <arturo@xxxxxxxxxxx>
Date: Sat May 28 15:45:05 2016 +0200
Share code from ooni-backend via the ooni.common module
---
ooni/backend_client.py | 2 +-
ooni/common/__init__.py | 10 +
ooni/common/http_utils.py | 55 +++++
ooni/common/tcp_utils.py | 10 +
ooni/common/txextra.py | 202 +++++++++++++++++
ooni/nettests/blocking/web_connectivity.py | 25 +--
.../manipulation/http_header_field_manipulation.py | 2 +-
ooni/templates/httpt.py | 42 +---
ooni/templates/scapyt.py | 8 +-
ooni/tests/test_trueheaders.py | 2 +-
ooni/utils/__init__.py | 8 -
ooni/utils/net.py | 8 -
ooni/utils/socks.py | 25 +++
ooni/utils/trueheaders.py | 241 ---------------------
14 files changed, 322 insertions(+), 318 deletions(-)
diff --git a/ooni/backend_client.py b/ooni/backend_client.py
index de01ceb..d4c463e 100644
--- a/ooni/backend_client.py
+++ b/ooni/backend_client.py
@@ -16,7 +16,7 @@ from ooni import errors as e
from ooni.settings import config
from ooni.utils import log, onion
from ooni.utils.net import BodyReceiver, StringProducer, Downloader
-from ooni.utils.trueheaders import TrueHeadersSOCKS5Agent
+from ooni.utils.socks import TrueHeadersSOCKS5Agent
class OONIBClient(object):
diff --git a/ooni/common/__init__.py b/ooni/common/__init__.py
new file mode 100644
index 0000000..7f6cf73
--- /dev/null
+++ b/ooni/common/__init__.py
@@ -0,0 +1,10 @@
+"""
+This modules contains functionality that is shared amongst ooni-probe and
+ooni-backend. If the code in here starts growing too much I think it would
+make sense to either:
+
+ * Make the code in here into it's own package that is imported by
+ ooni-probe and ooni-backend.
+
+ * Merge ooniprobe with oonibackend.
+"""
diff --git a/ooni/common/http_utils.py b/ooni/common/http_utils.py
new file mode 100644
index 0000000..6d636d5
--- /dev/null
+++ b/ooni/common/http_utils.py
@@ -0,0 +1,55 @@
+import re
+import codecs
+from base64 import b64encode
+
+META_CHARSET_REGEXP = re.compile('<meta(?!\s*(?:name|value)\s*=)[^>]*?charset\s*=[\s"\']*([^\s"\'/>]*)')
+
+def representBody(body):
+ if not body:
+ return body
+ # XXX perhaps add support for decoding gzip in the future.
+ body = body.replace('\0', '')
+ decoded = False
+ charsets = ['ascii', 'utf-8']
+
+ # If we are able to detect the charset of body from the meta tag
+ # try to decode using that one first
+ charset = META_CHARSET_REGEXP.search(body, re.IGNORECASE)
+ if charset:
+ try:
+ encoding = charset.group(1).lower()
+ codecs.lookup(encoding)
+ charsets.insert(0, encoding)
+ except (LookupError, IndexError):
+ # Skip invalid codecs and partial regexp match
+ pass
+
+ for encoding in charsets:
+ try:
+ body = unicode(body, encoding)
+ decoded = True
+ break
+ except UnicodeDecodeError:
+ pass
+ if not decoded:
+ body = {
+ 'data': b64encode(body),
+ 'format': 'base64'
+ }
+ return body
+
+TITLE_REGEXP = re.compile("<title>(.*?)</title>", re.IGNORECASE | re.DOTALL)
+
+def extractTitle(body):
+ m = TITLE_REGEXP.search(body, re.IGNORECASE | re.DOTALL)
+ if m:
+ return unicode(m.group(1), errors='ignore')
+ return ''
+
+REQUEST_HEADERS = {
+ 'User-Agent': ['Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, '
+ 'like Gecko) Chrome/47.0.2526.106 Safari/537.36'],
+ 'Accept-Language': ['en-US;q=0.8,en;q=0.5'],
+ 'Accept': ['text/html,application/xhtml+xml,application/xml;q=0.9,'
+ '*/*;q=0.8']
+}
diff --git a/ooni/common/tcp_utils.py b/ooni/common/tcp_utils.py
new file mode 100644
index 0000000..7b7a8a4
--- /dev/null
+++ b/ooni/common/tcp_utils.py
@@ -0,0 +1,10 @@
+from twisted.internet.protocol import Factory, Protocol
+
+class TCPConnectProtocol(Protocol):
+ def connectionMade(self):
+ self.transport.loseConnection()
+
+class TCPConnectFactory(Factory):
+ noisy = False
+ def buildProtocol(self, addr):
+ return TCPConnectProtocol()
diff --git a/ooni/common/txextra.py b/ooni/common/txextra.py
new file mode 100644
index 0000000..7a84592
--- /dev/null
+++ b/ooni/common/txextra.py
@@ -0,0 +1,202 @@
+import itertools
+from copy import copy
+
+from twisted.web.http_headers import Headers
+from twisted.web import error
+
+from twisted.web.client import BrowserLikeRedirectAgent
+from twisted.web._newclient import ResponseFailed
+from twisted.web._newclient import HTTPClientParser, ParseError
+from twisted.python.failure import Failure
+
+from twisted.web import client, _newclient
+
+from twisted.web._newclient import RequestNotSent, RequestGenerationFailed
+from twisted.web._newclient import TransportProxyProducer, STATUS
+
+from twisted.internet import reactor
+from twisted.internet.defer import Deferred, fail, maybeDeferred, failure
+
+from twisted.python import log
+
+class TrueHeaders(Headers):
+ def __init__(self, rawHeaders=None):
+ self._rawHeaders = dict()
+ if rawHeaders is not None:
+ for name, values in rawHeaders.iteritems():
+ if type(values) is list:
+ self.setRawHeaders(name, values[:])
+ elif type(values) is str:
+ self.setRawHeaders(name, values)
+
+ def setRawHeaders(self, name, values):
+ if name.lower() not in self._rawHeaders:
+ self._rawHeaders[name.lower()] = dict()
+ self._rawHeaders[name.lower()]['name'] = name
+ self._rawHeaders[name.lower()]['values'] = values
+
+ def getAllRawHeaders(self):
+ for _, v in self._rawHeaders.iteritems():
+ yield v['name'], v['values']
+
+ def getRawHeaders(self, name, default=None):
+ if name.lower() in self._rawHeaders:
+ return self._rawHeaders[name.lower()]['values']
+ return default
+
+
+ def getDiff(self, headers, ignore=[]):
+ """
+
+ Args:
+
+ headers: a TrueHeaders object
+
+ ignore: specify a list of header fields to ignore
+
+ Returns:
+
+ a set containing the header names that are not present in
+ header_dict or not present in self.
+ """
+ diff = set()
+ field_names = []
+
+ headers_a = copy(self)
+ headers_b = copy(headers)
+ for name in ignore:
+ try:
+ del headers_a._rawHeaders[name.lower()]
+ except KeyError:
+ pass
+ try:
+ del headers_b._rawHeaders[name.lower()]
+ except KeyError:
+ pass
+
+ for k, v in itertools.chain(headers_a.getAllRawHeaders(),
+ headers_b.getAllRawHeaders()):
+ field_names.append(k)
+
+ for name in field_names:
+ if self.getRawHeaders(name) and headers.getRawHeaders(name):
+ pass
+ else:
+ diff.add(name)
+ return list(diff)
+
+class HTTPClientParser(_newclient.HTTPClientParser):
+ def logPrefix(self):
+ return 'HTTPClientParser'
+
+ def connectionMade(self):
+ self.headers = TrueHeaders()
+ self.connHeaders = TrueHeaders()
+ self.state = STATUS
+ self._partialHeader = None
+
+ def headerReceived(self, name, value):
+ if self.isConnectionControlHeader(name.lower()):
+ headers = self.connHeaders
+ else:
+ headers = self.headers
+ headers.addRawHeader(name, value)
+
+ def statusReceived(self, status):
+ # This is a fix for invalid number of parts
+ try:
+ return _newclient.HTTPClientParser.statusReceived(self, status)
+ except ParseError as exc:
+ if exc.args[0] == 'wrong number of parts':
+ return _newclient.HTTPClientParser.statusReceived(self,
+ status + " XXX")
+ raise
+
+class HTTP11ClientProtocol(_newclient.HTTP11ClientProtocol):
+ def request(self, request):
+ if self._state != 'QUIESCENT':
+ return fail(RequestNotSent())
+
+ self._state = 'TRANSMITTING'
+ _requestDeferred = maybeDeferred(request.writeTo, self.transport)
+ self._finishedRequest = Deferred()
+
+ self._currentRequest = request
+
+ self._transportProxy = TransportProxyProducer(self.transport)
+ self._parser = HTTPClientParser(request, self._finishResponse)
+ self._parser.makeConnection(self._transportProxy)
+ self._responseDeferred = self._parser._responseDeferred
+
+ def cbRequestWrotten(ignored):
+ if self._state == 'TRANSMITTING':
+ self._state = 'WAITING'
+ self._responseDeferred.chainDeferred(self._finishedRequest)
+
+ def ebRequestWriting(err):
+ if self._state == 'TRANSMITTING':
+ self._state = 'GENERATION_FAILED'
+ self.transport.loseConnection()
+ self._finishedRequest.errback(
+ failure.Failure(RequestGenerationFailed([err])))
+ else:
+ log.err(err, 'Error writing request, but not in valid state '
+ 'to finalize request: %s' % self._state)
+
+ _requestDeferred.addCallbacks(cbRequestWrotten, ebRequestWriting)
+
+ return self._finishedRequest
+
+
+class _HTTP11ClientFactory(client._HTTP11ClientFactory):
+ noisy = False
+
+ def buildProtocol(self, addr):
+ return HTTP11ClientProtocol(self._quiescentCallback)
+
+
+class HTTPConnectionPool(client.HTTPConnectionPool):
+ _factory = _HTTP11ClientFactory
+
+class TrueHeadersAgent(client.Agent):
+ def __init__(self, *args, **kw):
+ super(TrueHeadersAgent, self).__init__(*args, **kw)
+ self._pool = HTTPConnectionPool(reactor, False)
+
+class FixedRedirectAgent(BrowserLikeRedirectAgent):
+ """
+ This is a redirect agent with this patch manually applied:
+ https://twistedmatrix.com/trac/ticket/8265
+ """
+ def _handleRedirect(self, response, method, uri, headers, redirectCount):
+ """
+ Handle a redirect response, checking the number of redirects already
+ followed, and extracting the location header fields.
+
+ This is patched to fix a bug in infinite redirect loop.
+ """
+ if redirectCount >= self._redirectLimit:
+ err = error.InfiniteRedirection(
+ response.code,
+ b'Infinite redirection detected',
+ location=uri)
+ raise ResponseFailed([Failure(err)], response)
+ locationHeaders = response.headers.getRawHeaders(b'location', [])
+ if not locationHeaders:
+ err = error.RedirectWithNoLocation(
+ response.code, b'No location header field', uri)
+ raise ResponseFailed([Failure(err)], response)
+ location = self._resolveLocation(
+ # This is the fix to properly handle redirects
+ response.request.absoluteURI,
+ locationHeaders[0]
+ )
+ deferred = self._agent.request(method, location, headers)
+
+ def _chainResponse(newResponse):
+ newResponse.setPreviousResponse(response)
+ return newResponse
+
+ deferred.addCallback(_chainResponse)
+ return deferred.addCallback(
+ self._handleResponse, method, uri, headers, redirectCount + 1)
diff --git a/ooni/nettests/blocking/web_connectivity.py b/ooni/nettests/blocking/web_connectivity.py
index 41f6c2a..655bf76 100644
--- a/ooni/nettests/blocking/web_connectivity.py
+++ b/ooni/nettests/blocking/web_connectivity.py
@@ -7,7 +7,6 @@ from ipaddr import IPv4Address, AddressValueError
from twisted.web.client import GzipDecoder
from twisted.internet import reactor
-from twisted.internet.protocol import Factory, Protocol
from twisted.internet.endpoints import TCP4ClientEndpoint
from twisted.names import client, dns
@@ -19,31 +18,17 @@ from ooni.utils import log
from ooni.backend_client import WebConnectivityClient
-from ooni.utils.net import COMMON_SERVER_HEADERS, extract_title
+from ooni.common.http_utils import extractTitle
+from ooni.utils.net import COMMON_SERVER_HEADERS
from ooni.templates import httpt, dnst
from ooni.errors import failureToString
-REQUEST_HEADERS = {
- 'User-Agent': ['Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, '
- 'like Gecko) Chrome/47.0.2526.106 Safari/537.36'],
- 'Accept-Language': ['en-US;q=0.8,en;q=0.5'],
- 'Accept': ['text/html,application/xhtml+xml,application/xml;q=0.9,'
- '*/*;q=0.8']
-}
+from ooni.common.tcp_utils import TCPConnectFactory
+from ooni.common.http_utils import REQUEST_HEADERS
class InvalidControlResponse(Exception):
pass
-class TCPConnectProtocol(Protocol):
- def connectionMade(self):
- self.transport.loseConnection()
-
-class TCPConnectFactory(Factory):
- noisy = False
- def buildProtocol(self, addr):
- return TCPConnectProtocol()
-
-
class UsageOptions(usage.Options):
optParameters = [
['url', 'u', None, 'Specify a single URL to test'],
@@ -306,7 +291,7 @@ class WebConnectivityTest(httpt.HTTPTest, dnst.DNSTest):
return False
def compare_titles(self, experiment_http_response):
- experiment_title = extract_title(experiment_http_response.body).strip()
+ experiment_title = extractTitle(experiment_http_response.body).strip()
control_title = self.control['http_request']['title'].strip()
first_exp_word = experiment_title.split(' ')[0]
first_ctrl_word = control_title.split(' ')[0]
diff --git a/ooni/nettests/manipulation/http_header_field_manipulation.py b/ooni/nettests/manipulation/http_header_field_manipulation.py
index 6fbaf7d..fcd5e0e 100644
--- a/ooni/nettests/manipulation/http_header_field_manipulation.py
+++ b/ooni/nettests/manipulation/http_header_field_manipulation.py
@@ -11,7 +11,7 @@ from twisted.python import usage
from ooni.utils import log, net, randomStr
from ooni.templates import httpt
-from ooni.utils.trueheaders import TrueHeaders
+from ooni.common.txextra import TrueHeaders
def random_capitalization(string):
diff --git a/ooni/templates/httpt.py b/ooni/templates/httpt.py
index 6e0154c..f8ea941 100644
--- a/ooni/templates/httpt.py
+++ b/ooni/templates/httpt.py
@@ -10,15 +10,16 @@ from twisted.web.client import ContentDecoderAgent
from twisted.internet import reactor
from twisted.internet.endpoints import TCP4ClientEndpoint
-from ooni.utils.trueheaders import TrueHeadersAgent, TrueHeadersSOCKS5Agent
-from ooni.utils.trueheaders import FixedRedirectAgent
+from ooni.utils.socks import TrueHeadersSOCKS5Agent
from ooni.nettest import NetTestCase
-from ooni.utils import log, base64Dict
+from ooni.utils import log
from ooni.settings import config
from ooni.utils.net import StringProducer, userAgents
-from ooni.utils.trueheaders import TrueHeaders
+from ooni.common.txextra import TrueHeaders
+from ooni.common.txextra import FixedRedirectAgent, TrueHeadersAgent
+from ooni.common.http_utils import representBody
from ooni.errors import handleAllFailures
META_CHARSET_REGEXP = re.compile('<meta(?!\s*(?:name|value)\s*=)[^>]*?charset\s*=[\s"\']*([^\s"\'/>!;]+)')
@@ -48,37 +49,6 @@ def _representHeaders(headers):
represented_headers[name] = unicode(value[0], errors='ignore')
return represented_headers
-def _representBody(body):
- if not body:
- return body
- # XXX perhaps add support for decoding gzip in the future.
- body = body.replace('\0', '')
- decoded = False
- charsets = ['ascii', 'utf-8']
-
- # If we are able to detect the charset of body from the meta tag
- # try to decode using that one first
- charset = META_CHARSET_REGEXP.search(body, re.IGNORECASE)
- if charset:
- try:
- encoding = charset.group(1).lower()
- codecs.lookup(encoding)
- charsets.insert(0, encoding)
- except (LookupError, IndexError):
- # Skip invalid codecs and partial regexp match
- pass
-
- for encoding in charsets:
- try:
- body = unicode(body, encoding)
- decoded = True
- break
- except UnicodeDecodeError:
- pass
- if not decoded:
- body = base64Dict(body)
- return body
-
class HTTPTest(NetTestCase):
"""
A utility class for dealing with HTTP based testing. It provides methods to
@@ -189,7 +159,7 @@ class HTTPTest(NetTestCase):
}
if response:
if self.localOptions.get('withoutbody', 0) is 0:
- response_body = _representBody(response_body)
+ response_body = representBody(response_body)
else:
response_body = ''
# Attempt to redact the IP address of the probe from the responses
diff --git a/ooni/templates/scapyt.py b/ooni/templates/scapyt.py
index 2bb9139..1c726d8 100644
--- a/ooni/templates/scapyt.py
+++ b/ooni/templates/scapyt.py
@@ -1,5 +1,6 @@
+from base64 import b64encode
from ooni.nettest import NetTestCase
-from ooni.utils import log, base64Dict
+from ooni.utils import log
from ooni.settings import config
from ooni.utils.net import hasRawSocketPermission
@@ -8,7 +9,10 @@ from ooni.utils.txscapy import ScapySender, ScapyFactory
def _representPacket(packet):
return {
- "raw_packet": base64Dict(str(packet)),
+ "raw_packet": {
+ 'data': b64encode(str(packet)),
+ 'format': 'base64'
+ },
"summary": repr(packet)
}
diff --git a/ooni/tests/test_trueheaders.py b/ooni/tests/test_trueheaders.py
index f6c7812..191dafd 100644
--- a/ooni/tests/test_trueheaders.py
+++ b/ooni/tests/test_trueheaders.py
@@ -1,6 +1,6 @@
from twisted.trial import unittest
-from ooni.utils.trueheaders import TrueHeaders
+from ooni.common.txextra import TrueHeaders
dummy_headers_dict = {
'Header1': ['Value1', 'Value2'],
diff --git a/ooni/utils/__init__.py b/ooni/utils/__init__.py
index 22af062..f85cb38 100644
--- a/ooni/utils/__init__.py
+++ b/ooni/utils/__init__.py
@@ -1,12 +1,10 @@
import shutil
import string
import random
-import glob
import os
from datetime import datetime
import gzip
-from base64 import b64encode
from zipfile import ZipFile
from ooni import errors
@@ -155,9 +153,3 @@ def gunzip(filename, dst):
def get_ooni_root():
script = os.path.join(__file__, '..')
return os.path.dirname(os.path.realpath(script))
-
-def base64Dict(data):
- return {
- 'format': 'base64',
- 'data': b64encode(data)
- }
diff --git a/ooni/utils/net.py b/ooni/utils/net.py
index 20f5a42..9918b60 100644
--- a/ooni/utils/net.py
+++ b/ooni/utils/net.py
@@ -81,14 +81,6 @@ COMMON_SERVER_HEADERS = (
# fetching URLs over some proxy.
GOOGLE_HUMANS = ('http://www.google.com/humans.txt', 'Google is built by a large')
-TITLE_REGEXP = re.compile("<title>(.*?)</title>", re.IGNORECASE | re.DOTALL)
-
-def extract_title(body):
- m = TITLE_REGEXP.search(body)
- if m:
- return m.group(1)
- return ''
-
class StringProducer(object):
implements(IBodyProducer)
diff --git a/ooni/utils/socks.py b/ooni/utils/socks.py
new file mode 100644
index 0000000..783db55
--- /dev/null
+++ b/ooni/utils/socks.py
@@ -0,0 +1,25 @@
+from twisted.internet import reactor
+from ooni.common.txextra import HTTPConnectionPool
+
+from twisted import version as twisted_version
+from twisted.python.versions import Version
+_twisted_15_0 = Version('twisted', 15, 0, 0)
+
+from txsocksx.http import SOCKS5Agent
+from txsocksx.client import SOCKS5ClientFactory
+
+SOCKS5ClientFactory.noisy = False
+
+class TrueHeadersSOCKS5Agent(SOCKS5Agent):
+ def __init__(self, *args, **kw):
+ super(TrueHeadersSOCKS5Agent, self).__init__(*args, **kw)
+ pool = HTTPConnectionPool(reactor, False)
+ #
+ # With Twisted > 15.0 txsocksx wraps the twisted agent using a
+ # wrapper class, hence we must set the _pool attribute in the
+ # inner class rather than into its external wrapper.
+ #
+ if twisted_version >= _twisted_15_0:
+ self._wrappedAgent._pool = pool
+ else:
+ self._pool = pool
diff --git a/ooni/utils/trueheaders.py b/ooni/utils/trueheaders.py
deleted file mode 100644
index 5f2b399..0000000
--- a/ooni/utils/trueheaders.py
+++ /dev/null
@@ -1,241 +0,0 @@
-# :authors: Giovanni Pellerano
-# :licence: see LICENSE
-#
-# Here we make sure that the HTTP Headers sent and received are True. By this
-# we mean that they are not normalized and that the ordering is maintained.
-
-import itertools
-from copy import copy
-
-from twisted.python.failure import Failure
-
-from twisted.web import client, _newclient, http_headers, error
-
-from twisted.web._newclient import RequestNotSent, RequestGenerationFailed
-from twisted.web._newclient import TransportProxyProducer, STATUS
-from twisted.web._newclient import ResponseFailed
-
-from twisted.internet import reactor
-from twisted.internet.defer import Deferred, fail, maybeDeferred, failure
-
-from txsocksx.http import SOCKS5Agent
-from txsocksx.client import SOCKS5ClientFactory
-
-SOCKS5ClientFactory.noisy = False
-
-from ooni.utils import log
-
-import twisted
-from twisted.python.versions import Version
-
-
-class TrueHeaders(http_headers.Headers):
- def __init__(self, rawHeaders=None):
- self._rawHeaders = dict()
- if rawHeaders is not None:
- for name, values in rawHeaders.iteritems():
- if type(values) is list:
- self.setRawHeaders(name, values[:])
- elif type(values) is dict:
- self._rawHeaders[name.lower()] = values
- elif type(values) is str:
- self.setRawHeaders(name, values)
-
- def setRawHeaders(self, name, values):
- if name.lower() not in self._rawHeaders:
- self._rawHeaders[name.lower()] = dict()
- self._rawHeaders[name.lower()]['name'] = name
- self._rawHeaders[name.lower()]['values'] = values
-
- def getDiff(self, headers, ignore=[]):
- """
-
- Args:
-
- headers: a TrueHeaders object
-
- ignore: specify a list of header fields to ignore
-
- Returns:
-
- a set containing the header names that are not present in
- header_dict or not present in self.
- """
- diff = set()
- field_names = []
-
- headers_a = copy(self)
- headers_b = copy(headers)
- for name in ignore:
- try:
- del headers_a._rawHeaders[name.lower()]
- except KeyError:
- pass
- try:
- del headers_b._rawHeaders[name.lower()]
- except KeyError:
- pass
-
- for k, v in itertools.chain(headers_a.getAllRawHeaders(),
- headers_b.getAllRawHeaders()):
- field_names.append(k)
-
- for name in field_names:
- if self.getRawHeaders(name) and headers.getRawHeaders(name):
- pass
- else:
- diff.add(name)
- return list(diff)
-
- def getAllRawHeaders(self):
- for k, v in self._rawHeaders.iteritems():
- yield v['name'], v['values']
-
- def getRawHeaders(self, name, default=None):
- if name.lower() in self._rawHeaders:
- return self._rawHeaders[name.lower()]['values']
- return default
-
-
-class HTTPClientParser(_newclient.HTTPClientParser):
- def logPrefix(self):
- return 'HTTPClientParser'
-
- def connectionMade(self):
- self.headers = TrueHeaders()
- self.connHeaders = TrueHeaders()
- self.state = STATUS
- self._partialHeader = None
-
- def headerReceived(self, name, value):
- if self.isConnectionControlHeader(name.lower()):
- headers = self.connHeaders
- else:
- headers = self.headers
- headers.addRawHeader(name, value)
-
-
- def statusReceived(self, status):
- parts = status.split(b' ', 2)
- if len(parts) != 3:
- # Here we add the extra missing part.
- parts.append("XXX")
-
- try:
- statusCode = int(parts[1])
- except ValueError:
- raise _newclient.ParseError(u"non-integer status code", status)
-
- self.response = _newclient.Response._construct(
- self.parseVersion(parts[0]),
- statusCode,
- parts[2],
- self.headers,
- self.transport,
- self.request)
-
-
-class HTTP11ClientProtocol(_newclient.HTTP11ClientProtocol):
- def request(self, request):
- if self._state != 'QUIESCENT':
- return fail(RequestNotSent())
-
- self._state = 'TRANSMITTING'
- _requestDeferred = maybeDeferred(request.writeTo, self.transport)
- self._finishedRequest = Deferred()
-
- self._currentRequest = request
-
- self._transportProxy = TransportProxyProducer(self.transport)
- self._parser = HTTPClientParser(request, self._finishResponse)
- self._parser.makeConnection(self._transportProxy)
- self._responseDeferred = self._parser._responseDeferred
-
- def cbRequestWrotten(ignored):
- if self._state == 'TRANSMITTING':
- self._state = 'WAITING'
- self._responseDeferred.chainDeferred(self._finishedRequest)
-
- def ebRequestWriting(err):
- if self._state == 'TRANSMITTING':
- self._state = 'GENERATION_FAILED'
- self.transport.loseConnection()
- self._finishedRequest.errback(
- failure.Failure(RequestGenerationFailed([err])))
- else:
- log.err(err, 'Error writing request, but not in valid state '
- 'to finalize request: %s' % self._state)
-
- _requestDeferred.addCallbacks(cbRequestWrotten, ebRequestWriting)
-
- return self._finishedRequest
-
-
-class _HTTP11ClientFactory(client._HTTP11ClientFactory):
- noisy = False
-
- def buildProtocol(self, addr):
- return HTTP11ClientProtocol(self._quiescentCallback)
-
-
-class HTTPConnectionPool(client.HTTPConnectionPool):
- _factory = _HTTP11ClientFactory
-
-
-class TrueHeadersAgent(client.Agent):
- def __init__(self, *args, **kw):
- super(TrueHeadersAgent, self).__init__(*args, **kw)
- self._pool = HTTPConnectionPool(reactor, False)
-
-
-_twisted_15_0 = Version('twisted', 15, 0, 0)
-
-
-class TrueHeadersSOCKS5Agent(SOCKS5Agent):
- def __init__(self, *args, **kw):
- super(TrueHeadersSOCKS5Agent, self).__init__(*args, **kw)
- pool = HTTPConnectionPool(reactor, False)
- #
- # With Twisted > 15.0 txsocksx wraps the twisted agent using a
- # wrapper class, hence we must set the _pool attribute in the
- # inner class rather than into its external wrapper.
- #
- if twisted.version >= _twisted_15_0:
- self._wrappedAgent._pool = pool
- else:
- self._pool = pool
-
-class FixedRedirectAgent(client.BrowserLikeRedirectAgent):
- """
- This is a redirect agent with this patch manually applied:
- https://twistedmatrix.com/trac/ticket/8265
- """
- def _handleRedirect(self, response, method, uri, headers, redirectCount):
- """
- Handle a redirect response, checking the number of redirects already
- followed, and extracting the location header fields.
-
- This is pathed to fix a bug in infinite redirect loop.
- """
- if redirectCount >= self._redirectLimit:
- err = error.InfiniteRedirection(
- response.code,
- b'Infinite redirection detected',
- location=uri)
- raise ResponseFailed([Failure(err)], response)
- locationHeaders = response.headers.getRawHeaders(b'location', [])
- if not locationHeaders:
- err = error.RedirectWithNoLocation(
- response.code, b'No location header field', uri)
- raise ResponseFailed([Failure(err)], response)
- location = self._resolveLocation(response.request.absoluteURI, locationHeaders[0])
- deferred = self._agent.request(method, location, headers)
-
- def _chainResponse(newResponse):
- newResponse.setPreviousResponse(response)
- return newResponse
-
- deferred.addCallback(_chainResponse)
- # This is the fix to properly handle redirects
- return deferred.addCallback(
- self._handleResponse, method, uri, headers, redirectCount + 1)
_______________________________________________
tor-commits mailing list
tor-commits@xxxxxxxxxxxxxxxxxxxx
https://lists.torproject.org/cgi-bin/mailman/listinfo/tor-commits