I'using Requests
to scrape webpages and have encounter in a couple of instances issues with the website SSL certificate. I would like to implement a logic whereas the first request is done with verify=true
but if there is a SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED]
then it retries with verify=false
.
Here is my initial code, what I'm struggling with is catching the error and passing it to the retry
function.
#MAKE FIRST REQUEST
r = requests_retry_session().get(url, headers=headers, timeout=10)
#RETRY FUNCTION
def requests_retry_session(
retries=5,
backoff_factor= 10,
status_forcelist=(500, 502, 504),
session=None,
):
session = session or requests.Session()
retry = Retry(
total=retries,
read=retries,
connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist,
)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
#IF SSLERROR set verify to false
session.verify = False
return session