I'mt trying to append some scraped values to a dataframe. I have this code:
import time
import requests
import pandas
import pandas as pd
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
import json
# Grab content from URL
url = "https://www.remax.pt/comprar?searchQueryState={%22regionName%22:%22%22,%22businessType%22:1,%22listingClass%22:1,%22page%22:1,%22sort%22:{%22fieldToSort%22:%22ContractDate%22,%22order%22:1},%22mapIsOpen%22:false,%22listingTypes%22:[],%22prn%22:%22%22}"
PATH = 'C:\DRIVERS\chromedriver.exe'
driver = webdriver.Chrome(PATH)
option = Options()
option.headless = False
#chromedriver =
#driver = webdriver.Chrome(chromedriver)
#driver = webdriver.Firefox() #(options=option)
#driver.get(url)
#driver.implicitly_wait(10) # in seconds
time.sleep(1)
wait = WebDriverWait(driver, 10)
driver.get(url)
rows = driver.find_elements_by_xpath("//div[@class='row results-list ']/div")
data=[]
for row in rows:
price=row.find_element_by_xpath(".//p[@class='listing-price']").text
print(price)
address=row.find_element_by_xpath(".//p[@class='listing-address']").text
print(address)
Tipo=row.find_element_by_xpath(".//p[@class='listing-type']").text
print(Tipo)
Area=row.find_element_by_xpath(".//p[@class='listing-area']").text
print(Area)
Quartos=row.find_element_by_xpath(".//p[@class='icon-bedroom-full']").text
print(Quartos)
data.append([price],[address],[Tipo],[Area],[Quartos])
#driver.quit()
The problem is that it returns the following error:
NoSuchElementException Traceback (most recent call last)
<ipython-input-16-9e4d01985cda> in <module>
49 price=row.find_element_by_xpath(".//p[@class='listing-price']").text
50 print(price)
---> 51 address=row.find_element_by_xpath(".//p[@class='listing-address']").text
52 print(address)
53 Tipo=row.find_element_by_xpath(".//p[@class='listing-type']").text
~\anaconda3\lib\site-packages\selenium\webdriver\remote\webelement.py in find_element_by_xpath(self, xpath)
349 element = element.find_element_by_xpath('//div/td[1]')
350 """
--> 351 return self.find_element(by=By.XPATH, value=xpath)
352
353 def find_elements_by_xpath(self, xpath):
~\anaconda3\lib\site-packages\selenium\webdriver\remote\webelement.py in find_element(self, by, value)
656 value = '[name="%s"]' % value
657
--> 658 return self._execute(Command.FIND_CHILD_ELEMENT,
659 {"using": by, "value": value})['value']
660
~\anaconda3\lib\site-packages\selenium\webdriver\remote\webelement.py in _execute(self, command, params)
631 params = {}
632 params['id'] = self._id
--> 633 return self._parent.execute(command, params)
634
635 def find_element(self, by=By.ID, value=None):
~\anaconda3\lib\site-packages\selenium\webdriver\remote\webdriver.py in execute(self, driver_command, params)
319 response = self.command_executor.execute(driver_command, params)
320 if response:
--> 321 self.error_handler.check_response(response)
322 response['value'] = self._unwrap_value(
323 response.get('value', None))
~\anaconda3\lib\site-packages\selenium\webdriver\remote\errorhandler.py in check_response(self, response)
240 alert_text = value['alert'].get('text')
241 raise exception_class(message, screen, stacktrace, alert_text)
--> 242 raise exception_class(message, screen, stacktrace)
243
244 def _value_or_default(self, obj, key, default):
NoSuchElementException: Message: no such element: Unable to locate element: {"method":"xpath","selector":".//p[@class='listing-address']"}
(Session info: chrome=90.0.4430.72)
But when I try only with the first element it returns a list of prices. What is the difference if I'm giving it the differente places in the dataframe and I use the same type of path?