2015-11-24 6 views
6

'रिमोट अंत प्रतिक्रिया के बिना कनेक्शन बंद कर दिया' मैं एनाकोंडा अजगर 3,5अजगर अनुरोध - नहीं कर सकते किसी भी यूआरएल लोड:

import requests 
url ='http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi' 
r = requests.get(url) 
r.content 

यूआरएल ब्राउज़र में कोई समस्या नहीं के साथ खोला जा सकता है के साथ यूआरएल से डेटा लाने की कोशिश कर रहा हूँ। ..

लेकिन मैं (इस यूआरएल और किसी भी अन्य यूआरएल मैं कोशिश के लिए) एक त्रुटि हो रही है:

-------------------------------------------------------------------------- TypeError Traceback (most recent call last) C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, **httplib_request_kw) 375 try: # Python 2.7, use buffering of HTTP responses --> 376 httplib_response = conn.getresponse(buffering=True) 377 except TypeError: # Python 2.6 and older

TypeError: getresponse() got an unexpected keyword argument 'buffering'

During handling of the above exception, another exception occurred:

RemoteDisconnected Traceback (most recent call last) C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw) 558 timeout=timeout_obj, --> 559 body=body, headers=headers) 560

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, **httplib_request_kw) 377 except TypeError: # Python 2.6 and older --> 378 httplib_response = conn.getresponse() 379 except (SocketTimeout, BaseSSLError, SocketError) as e:

C:\Anaconda3\lib\http\client.py in getresponse(self) 1173
try: -> 1174 response.begin() 1175 except ConnectionError:

C:\Anaconda3\lib\http\client.py in begin(self) 281 while True: --> 282 version, status, reason = self._read_status() 283 if status != CONTINUE:

C:\Anaconda3\lib\http\client.py in _read_status(self) 250 # sending a valid response. --> 251 raise RemoteDisconnected("Remote end closed connection without" 252 " response")

RemoteDisconnected: Remote end closed connection without response

During handling of the above exception, another exception occurred:

ProtocolError Traceback (most recent call last) C:\Anaconda3\lib\site-packages\requests\adapters.py in send(self, request, stream, timeout, verify, cert, proxies) 369 retries=self.max_retries, --> 370 timeout=timeout 371 )

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw) 608 retries = retries.increment(method, url, error=e, _pool=self, --> 609 _stacktrace=sys.exc_info()[2]) 610 retries.sleep()

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\util\retry.py in increment(self, method, url, response, error, _pool, _stacktrace) 244 if read is False: --> 245 raise six.reraise(type(error), error, _stacktrace) 246 elif read is not None:

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\packages\six.py in reraise(tp, value, tb) 308 if value.traceback is not tb: --> 309 raise value.with_traceback(tb) 310 raise value

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw) 558 timeout=timeout_obj, --> 559 body=body, headers=headers) 560

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, **httplib_request_kw) 377 except TypeError: # Python 2.6 and older --> 378 httplib_response = conn.getresponse() 379 except (SocketTimeout, BaseSSLError, SocketError) as e:

C:\Anaconda3\lib\http\client.py in getresponse(self) 1173
try: -> 1174 response.begin() 1175 except ConnectionError:

C:\Anaconda3\lib\http\client.py in begin(self) 281 while True: --> 282 version, status, reason = self._read_status() 283 if status != CONTINUE:

C:\Anaconda3\lib\http\client.py in _read_status(self) 250 # sending a valid response. --> 251 raise RemoteDisconnected("Remote end closed connection without" 252 " response")

ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response',))

During handling of the above exception, another exception occurred:

ConnectionError Traceback (most recent call last) in() 3 import requests 4 url =' http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi ' ----> 5 r = requests.get(url) 6 r.content

C:\Anaconda3\lib\site-packages\requests\api.py in get(url, params, **kwargs) 67 68 kwargs.setdefault('allow_redirects', True) ---> 69 return request('get', url, params=params, **kwargs) 70 71

C:\Anaconda3\lib\site-packages\requests\api.py in request(method, url, **kwargs) 48 49 session = sessions.Session() ---> 50 response = session.request(method=method, url=url, **kwargs) 51 # By explicitly closing the session, we avoid leaving sockets open which 52 # can trigger a ResourceWarning in some cases, and look like a memory leak

C:\Anaconda3\lib\site-packages\requests\sessions.py in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json) 466 } 467 send_kwargs.update(settings) --> 468 resp = self.send(prep, **send_kwargs) 469 470 return resp

C:\Anaconda3\lib\site-packages\requests\sessions.py in send(self, request, **kwargs) 574 575 # Send the request --> 576 r = adapter.send(request, **kwargs) 577 578 # Total elapsed time of the request (approximately)

C:\Anaconda3\lib\site-packages\requests\adapters.py in send(self, request, stream, timeout, verify, cert, proxies) 410 411 except (ProtocolError, socket.error) as err: --> 412 raise ConnectionError(err, request=request) 413 414 except MaxRetryError as e:

ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response',))

उत्तर

8

कोशिश उपयोग अनुरोध सत्र।

import requests 
MAX_RETRIES = 20 
url ='http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi' 

session = requests.Session() 
adapter = requests.adapters.HTTPAdapter(max_retries=MAX_RETRIES) 
session.mount('https://', adapter) 
session.mount('http://', adapter) 

r = session.get(url) 
print(r.content) 
+11

क्या आप कोड को समझा सकते हैं और यह वास्तव में क्या करता है? –

+2

max_retries - प्रत्येक कनेक्शन को पुनः प्रयास करने की अधिकतम संख्या का प्रयास करना चाहिए। नोट, यह केवल असफल DNS लुकअप, सॉकेट कनेक्शन और कनेक्शन टाइमआउट पर लागू होता है, अनुरोध करने के लिए कभी भी अनुरोध नहीं करता है कि डेटा ने सर्वर पर इसे कहाँ बनाया है। डिफ़ॉल्ट रूप से, अनुरोध विफल कनेक्शन पुनः प्रयास नहीं करता है। यदि आपको उन शर्तों पर ग्रेन्युलर नियंत्रण की आवश्यकता है जिसके अंतर्गत हम एक अनुरोध पुनः प्रयास करते हैं, तो urllib3 की पुनः प्रयास कक्षा आयात करें और इसके बजाय इसे पास करें। स्रोत: http://docs.python-requests.org/en/master/api/#requests.adapters.HTTP एडाप्टर – Baks

संबंधित मुद्दे