@@ -44,14 +44,16 @@ def _search(dork):
4444 if not dork :
4545 return None
4646
47+ page = None
4748 data = None
48- headers = {}
49+ requestHeaders = {}
50+ responseHeaders = {}
4951
50- headers [HTTP_HEADER .USER_AGENT ] = dict (conf .httpHeaders ).get (HTTP_HEADER .USER_AGENT , DUMMY_SEARCH_USER_AGENT )
51- headers [HTTP_HEADER .ACCEPT_ENCODING ] = HTTP_ACCEPT_ENCODING_HEADER_VALUE
52+ requestHeaders [HTTP_HEADER .USER_AGENT ] = dict (conf .httpHeaders ).get (HTTP_HEADER .USER_AGENT , DUMMY_SEARCH_USER_AGENT )
53+ requestHeaders [HTTP_HEADER .ACCEPT_ENCODING ] = HTTP_ACCEPT_ENCODING_HEADER_VALUE
5254
5355 try :
54- req = _urllib .request .Request ("https://www.google.com/ncr" , headers = headers )
56+ req = _urllib .request .Request ("https://www.google.com/ncr" , headers = requestHeaders )
5557 conn = _urllib .request .urlopen (req )
5658 except Exception as ex :
5759 errMsg = "unable to connect to Google ('%s')" % getSafeExString (ex )
@@ -66,7 +68,7 @@ def _search(dork):
6668 url += "&start=%d" % ((gpage - 1 ) * 100 )
6769
6870 try :
69- req = _urllib .request .Request (url , headers = headers )
71+ req = _urllib .request .Request (url , headers = requestHeaders )
7072 conn = _urllib .request .urlopen (req )
7173
7274 requestMsg = "HTTP request:\n GET %s" % url
@@ -77,7 +79,6 @@ def _search(dork):
7779 code = conn .code
7880 status = conn .msg
7981 responseHeaders = conn .info ()
80- page = decodePage (page , responseHeaders .get ("Content-Encoding" ), responseHeaders .get ("Content-Type" ))
8182
8283 responseMsg = "HTTP response (%s - %d):\n " % (status , code )
8384
@@ -90,6 +91,7 @@ def _search(dork):
9091 except _urllib .error .HTTPError as ex :
9192 try :
9293 page = ex .read ()
94+ responseHeaders = ex .info ()
9395 except Exception as _ :
9496 warnMsg = "problem occurred while trying to get "
9597 warnMsg += "an error page information (%s)" % getSafeExString (_ )
@@ -99,6 +101,8 @@ def _search(dork):
99101 errMsg = "unable to connect to Google"
100102 raise SqlmapConnectionException (errMsg )
101103
104+ page = decodePage (page , responseHeaders .get (HTTP_HEADER .CONTENT_ENCODING ), responseHeaders .get (HTTP_HEADER .CONTENT_TYPE ))
105+
102106 retVal = [_urllib .parse .unquote (match .group (1 ) or match .group (2 )) for match in re .finditer (GOOGLE_REGEX , page , re .I )]
103107
104108 if not retVal and "detected unusual traffic" in page :
0 commit comments