I wrote a script to scrape trough multiple urls, add the useful information with help of BeautifulSoup to two arrays (ids and names) and than add the values of these arrays to a MySQL Table where ids[0] and names[0] is row0 of the table and so on...
However my code is very ugly and i am sure there is a way better approaches than mine.
Can anybody give me a hint? I specificly need an input on how to iterate trough the two arrays...
Thanks in advance!
#!/usr/bin/env python
from bs4 import BeautifulSoup
from urllib import urlopen
import MySQLdb
#MySQL Connection
mysql_opts = {
'host': "localhost",
'user': "********",
'pass': "********",
'db': "somedb"
}
mysql = MySQLdb.connect(mysql_opts['host'], mysql_opts['user'], mysql_opts['pass'], mysql_opts['db'])
#Add Data SQL Query
data_query = ("INSERT INTO tablename "
"(id, name) "
"VALUES (%s, %s)")
#Urls to scrape
url1 = 'http://somepage.com'
url2 = 'http://someotherpage.com'
url3 = 'http://athirdpage.com'
#URL Array
urls = (url1,url2,url3)
#Url loop
for url in urls:
soupPage = urlopen(url)
soup = BeautifulSoup (soupPage)
ids = soup.find_all('a', style="display:block")
names = soup.find_all('a', style="display:block")
i = 0
print ids.count
while (i < len(ids)):
try:
id = ids[i]
vid = id['href'].split('=')
vid = vid[1]
except IndexError:
id = "leer"
try:
name = names[i]
name = name.contents[0]
name = name.encode('iso-8859-1')
except IndexError:
name = ""
data_content = (vid, name)
cursor.execute(data_query, data_content)
emp_no = cursor.lastrowid
i = i + 1