pythonseleniumweb-scrapingbeautifulsoupscrapy

Scrape Data from Map using selenium


Hi I am trying to scrape the data from this website appear on google map It will run fine for 2 times but they give an error for third time while using selenium. here is my code :

import selenium
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from shutil import which
import time
import pandas as pd
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException
Names = []
Emails = []
chrome_path = which('chromedriver')
driver = webdriver.Chrome(executable_path=chrome_path)
driver.maximize_window()
error = []
Address = []
Debug = []
for a in range(1,72):

    driver.get('https://www.boogsport.vlaanderen/zoek-een-club/')

    try:

        driver.find_element_by_xpath('//button[@id="oc_cb_btn"]').click()
    except:
        error.append("NOne")
    WebDriverWait(driver, 10).until(EC.frame_to_be_available_and_switch_to_it((By.XPATH,'//div[@class="entry-content"]/iframe')))

    xpath = "//div[@aria-label='Map']/div[3]/div/div/div["
    xpath += str(a)
    xpath += "]"
    

    driver.find_element_by_xpath(xpath).click()
    # WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.XPATH, xpath))).click()
    # time.sleep(2)

    name = driver.find_element_by_xpath('//div[@class="qqvbed-bN97Pc"]/div[1]/div[1]/div[2]')
    Names.append(name.text)
    try:

        mail = driver.find_element_by_xpath('//div[@class="qqvbed-bN97Pc"]/div[1]/div[2]/div[2]/a')
        Emails.append(mail.get_attribute('href'))
    except:
        Emails.append("None")
    try:

        adress = driver.find_element_by_xpath('//div[@class="qqvbed-bN97Pc"]/div[2]/div[2]')
        Address.append(adress.text)
    except:
        Address.append("None")

  


df = pd.DataFrame(Names , columns = ['Name'])
df['Email'] = Emails
df['Address'] = Address
print(df)

and I am getting this error:

raise exception_class(message, screen, stacktrace) selenium.common.exceptions.ElementClickInterceptedException: Message: element click intercepted: Element ... is not clickable at point (616, 132). Other element would receive the click: (Session info: chrome=87.0.4280.88)

Anybody have ideas what is the problem?


Solution

  • You can't handle an Element exception without NoSuchElementException

    If the first click doesn't work, try this:

    from selenium.common.exceptions import NoSuchElementException
    
    try:
        element = driver.find_element_by_xpath(".//button[@id="oc_cb_btn"]")
        elem.click()
    except NoSuchElementException:
        error.append("None")
    

    for the second click try this:

    from selenium.common.exceptions import NoSuchElementException
    
    try:
        driver.find_element_by_xpath(xpath).click()
    except NoSuchElementException:
        pass
    

    I hope it will work. Let us know.