Our Python code worked 6 months ago completely fine. Now half a year later we get the following error:
SessionNotCreatedException: Message: session not created: This version of ChromeDriver only supports Chrome version 74 (Driver info: chromedriver=74.0.3729.6 (255758eccf3d244491b8a1317aa76e1ce10d57e9-refs/branch-heads/3729@{#29}),platform=Windows NT 10.0.17763 x86_64)
According to this article, it sounds like Chrome updates every few months and we may have the wrong Chrome version. However, many of the solutions from multiple articles did not change much (will go in more detail on bottom).
Here is my code where the issue most likely is:
import requests #API package for Chrome
import json #Package to send json data variables JavaScript variables
import pandas as pd #Data frames
import csv # Excel *.csv IO
import datetime as dt # date and time
import os #DOS operating system
from time import sleep #sleep function
from selenium import webdriver #internet web scraping
from selenium.webdriver.common.keys import Keys #internet web scraping
# set up the profile
profile = webdriver.ChromeOptions() # This is how we configure the Chom browser
prefs = {"download.default_directory": r'C:\Users\Trading\Documents\Python\dcm_trading\weather',
"download.prompt_for_download": False,
"download.directory_upgrade": True,
"safebrowsing.enabled": True}
profile.add_experimental_option('prefs',prefs) #configureig Chrome browser
driver = webdriver.Chrome(executable_path=r'C:\Users\Trading\Documents\Python\dcm_trading\chromedriver.exe', options=profile) # crg ariver.
os.chdir(r'C:\Users\Trading\Documents\Python\dcm_trading\weather')
What I tried: Replacing the Chromedriver version on the desktop location here C:\Users\Trading\Documents\Python\dcm_trading\chromedriver.exe'. This is in case we now have the wrong version).
Full Code (this was working 6 months ago!):
import requests #API package for Chrome
import json #Package to send json data variables JavaScript variables
import pandas as pd #Data frames
import csv # Excel *.csv IO
import datetime as dt # date and time
import os #DOS operating system
from time import sleep #sleep function
from selenium import webdriver #internet web scraping
from selenium.webdriver.common.keys import Keys #internet web scraping
api_key = '48d0585eb93c6d74105b8707996390e1' #Open weather API, ot used
# The largest cities in North America
city_name = ['Atlanta','Baltimore','Boston','Charlotte','Chicago','Dallas','Denver','Detroit','Houston','Indianapolis','Kansas City','Los Angeles','Miami','Minneapolis','Montreal','Nashville','New York','Philadelphia','Phoenix','Pittsburgh','Portland','Riverside','Saint Louis','San Diego','San Francisco','Seattle','Tampa','Toronoto','Vancouver','Washingotn DC']
#indicies for each city on weather.com
city_id = [4180439,4347778,4930956,4460243,4887398,4684888,5419384,4990729,4699066,4259418,4393217,5368361,4164138,5037649,6077243,4644585,4560349,5128581,5308655,5206379,5746545,5387877,4407066,5391811,5391959,5809844,4174757,6167865,6173331,4366164]
#urls for each city in weather.com
city_url = ['https://weather.com/weather/tenday/l/USGA0028:1:US',
'https://weather.com/weather/tenday/l/USMD0018:1:US',
'https://weather.com/weather/tenday/l/USMA0046:1:US',
'https://weather.com/weather/tenday/l/USNC0121:1:US',
'https://weather.com/weather/tenday/l/USIL0225:1:US',
'https://weather.com/weather/tenday/l/USTX0327:1:US',
'https://weather.com/weather/tenday/l/USCO0105:1:US',
'https://weather.com/weather/tenday/l/USMI0229:1:US',
'https://weather.com/weather/tenday/l/USTX0617:1:US',
'https://weather.com/weather/tenday/l/USIN0305:1:US',
'https://weather.com/weather/tenday/l/USMO0460:1:US',
'https://weather.com/weather/tenday/l/USCA0638:1:US',
'https://weather.com/weather/tenday/l/USFL0316:1:US',
'https://weather.com/weather/tenday/l/USMN0503:1:US',
'https://weather.com/weather/tenday/l/CAXX0301:1:CA',
'https://weather.com/weather/tenday/l/USTN0357:1:US',
'https://weather.com/weather/tenday/l/USNY0981:1:US',
'https://weather.com/weather/tenday/l/USPA1276:1:US',
'https://weather.com/weather/tenday/l/USAZ0166:1:US',
'https://weather.com/weather/tenday/l/USPA1290:1:US',
'https://weather.com/weather/tenday/l/USOR0275:1:US',
'https://weather.com/weather/tenday/l/USCA0949:1:US',
'https://weather.com/weather/tenday/l/USMO0787:1:US',
'https://weather.com/weather/tenday/l/USCA0982:1:US',
'https://weather.com/weather/tenday/l/USCA0987:1:US',
'https://weather.com/weather/tenday/l/USWA0395:1:US',
'https://weather.com/weather/tenday/l/USFL0481:1:US',
'https://weather.com/weather/tenday/l/CAXX0504:1:CA',
'https://weather.com/weather/tenday/l/CAXX0518:1:CA',
'https://weather.com/weather/tenday/l/USDC0001:1:US']
# set up the profile
profile = webdriver.ChromeOptions() # This is how we configure the Chom browser
prefs = {"download.default_directory": r'C:\Users\Trading\Documents\Python\dcm_trading\weather',
"download.prompt_for_download": False,
"download.directory_upgrade": True,
"safebrowsing.enabled": True}
profile.add_experimental_option('prefs',prefs) #configureig Chrome browser
driver = webdriver.Chrome(executable_path=r'C:\Users\Trading\Documents\Python\dcm_trading\chromedriver.exe', options=profile) # crg ariver.
os.chdir(r'C:\Users\Trading\Documents\Python\dcm_trading\weather')
with open('weather_' + str(pd.Timestamp.today())[0:10] + '.csv','w',newline='') as csvfile: #output file for weather data.
spamwriter = csv.writer(csvfile, delimiter=',') # class to write to csv files.
spamwriter.writerow(['city_id','city_name','datetime','offset','temp_high','temp_low']) # header row of the output file.
for i in range(0,len(city_url)): #Grab the data from the site (Cycle through each city)
print(pd.Timestamp.today())
driver.get(city_url[i]) #Go to the url for each city.
print(city_url[i])
print(pd.Timestamp.today())
for j in range(1,16): # cycling through the 16 dayts.
# Grab high, low, and date for each city.
high = driver.find_element_by_css_selector('#twc-scrollabe > table > tbody > tr:nth-child(' + str(j) + ') > td.temp > div > span:nth-child(1)').text
low = driver.find_element_by_css_selector('#twc-scrollabe > table > tbody > tr:nth-child(' + str(j) + ') > td.temp > div > span:nth-child(3)').text
date = driver.find_element_by_css_selector('#twc-scrollabe > table > tbody > tr:nth-child(' + str(j) + ') > td:nth-child(2) > div > span').text
#auth > div > div > div > div > div > form > button
#twc-scrollabe > table > tbody > tr:nth-child(1) > td:nth-child(2) > div > span
#twc-scrollabe > table > tbody > tr:nth-child(2) > td:nth-child(2) > div > span
#converts a string to an integer
if high[0] == '-':
temp_high = 0
else:
temp_high = int(high[0:-1])
if low[0] == '-':
temp_low = 0
else:
temp_low = int(low[0:-1])
#write out the row for each day.
spamwriter.writerow([ city_id[i] , city_name[i] , date , (j-1) , temp_high , temp_low ])
#file closed