This is my code and it is always priting 'something wrong' which I assigned if url does not open . How could I make my code more effective in terms of memory usage and time because my code is taking forever to execute and printing nothing more than 'something wrong'. I will end my data I am working on if anyone is interested
import requests
import re
import urllib2
import time
from bs4 import BeautifulSoup
iteration=0
a=0
b=0
links=list()
links2=list()
emails=dict()
while (iteration<1):
a=b
b=a+2
links2=links[a:b]
def extract_emails(links2):
for url in links2:
try:
response=requests.get(url)
if response.status_code!=200:
print 'connection refused'
else:
contents=requests.get(url).content.decode('utf-8')
emails[url]= re.findall(r'[\w\.-]+@[\w\.-]+',contents)
except Exception as e:
print(e)
return emails
def main():
extract_links(r)
extract_emails(extract_links(r))
main()
iteration=iteration+1