I'm not sure what the problem is. But I have a small script using Selenium and Beautifulsoup 4 to visit and parse contents of www.oddsportal.com
Code below not looping for league
The row no is [1]
for game_data.league.append(count[1].text)
but the value is repeating for that webpage instead for every row.
My code:
import pandas as pd
from selenium import webdriver
from datetime import datetime
from bs4 import BeautifulSoup as bs
from math import nan
browser = webdriver.Chrome()
class GameData:
def __init__(self):
self.score = []
self.date = []
self.time = []
self.country = []
self.league = []
self.game = []
self.home_odds = []
self.draw_odds = []
self.away_odds = []
def append(self, score):
pass
def get_urls(browser, landing_page):
browser.get(landing_page)
urls = [i.get_attribute('href') for i in
browser.find_elements_by_css_selector(
'.next-games-date > a:nth-child(1), .next-games-date > a:nth-child(n+3)')]
return urls
def parse_data(html):
df = pd.read_html(html, header=0)[0]
html = browser.page_source
soup = bs(html, "lxml")
cont = soup.find('div', {'id': 'wrap'})
content = cont.find('div', {'id': 'col-content'})
content = content.find('table', {'class': 'table-main'}, {'id': 'table-matches'})
main = content.find('th', {'class': 'first2 tl'})
if main is None:
return None
count = main.findAll('a')
country = count[0].text
game_data = GameData()
game_date = datetime.strptime(soup.select_one('.bold')['href'].split('/')[-2], '%Y%m%d').date()
for row in df.itertuples():
if not isinstance(row[1], str):
continue
elif ':' not in row[1]:
country = row[1].split('»')[0]
continue
game_time = row[1]
score = row[3] if row[3] else nan
game_data.date.append(game_date)
game_data.time.append(game_time)
game_data.country.append(country)
game_data.league.append(count[1].text)
game_data.game.append(row[2])
game_data.score.append(score)
game_data.home_odds.append(row[4])
game_data.draw_odds.append(row[5])
game_data.away_odds.append(row[6])
return game_data
if __name__ == '__main__':
start_url = "https://www.oddsportal.com/matches/soccer/"
urls = []
browser = webdriver.Chrome()
results = None
urls = get_urls(browser, start_url)
urls.insert(0, start_url)
for number, url in enumerate(urls):
if number > 0:
browser.get(url)
html = browser.page_source
game_data = parse_data(html)
if game_data is None:
continue
result = pd.DataFrame(game_data.__dict__)
if results is None:
results = result
else:
results = results.append(result, ignore_index=True)
results:
+-----+-------------------------+------------+--------+-----------+---------------+-------------------------+-------------+-------------+-------------+
| | score | date | time | country | league | game | home_odds | draw_odds | away_odds |
+=====+=========================+============+========+===========+===============+=========================+=============+=============+=============+
| 496 | Inter Turku - Mariehamn | 2021-06-10 | 15:00 | Finland | Veikkausliiga | Inter Turku - Mariehamn | 1.4 | 4.6 | 7.49 |
+-----+-------------------------+------------+--------+-----------+---------------+-------------------------+-------------+-------------+-------------+
| 497 | KTP - HIFK | 2021-06-10 | 15:30 | Finland | Veikkausliiga | KTP - HIFK | 3.42 | 3.17 | 2.18 |
+-----+-------------------------+------------+--------+-----------+---------------+-------------------------+-------------+-------------+-------------+
| 498 | Haka - HJK | 2021-06-10 | 15:30 | Finland | Veikkausliiga | Haka - HJK | 6.56 | 4.25 | 1.47 |
+-----+-------------------------+------------+--------+-----------+---------------+-------------------------+-------------+-------------+-------------+
| 499 | SJK - KuPS | 2021-06-10 | 15:30 | Finland | Veikkausliiga | SJK - KuPS | 3.34 | 3.25 | 2.18 |
+-----+-------------------------+------------+--------+-----------+---------------+-------------------------+-------------+-------------+-------------+
| 500 | Lahti - Ilves | 2021-06-10 | 15:30 | Finland | Veikkausliiga | Lahti - Ilves | 2.5 | 3.08 | 2.93 |
+-----+-------------------------+------------+--------+-----------+---------------+-------------------------+-------------+-------------+-------------+
How do I get to loop the correct values for every row instead of the same value for the entire page?