from selenium import webdriver
from bs4 import BeautifulSoup as bs
import time
url = 'https://curecity.in/vendor-list.php?category=Doctor&filters_location=Jaipur&filters%5Bsubareas_global%5D=&filters_speciality='
driver = webdriver.Chrome('C:\chromedriver.exe')
driver.get(url)
driver.maximize_window()
next_page_number=1
next_page = True
while next_page == True:
soup = bs(driver.page_source, 'html.parser')
for link in soup.find_all('div',class_='col-md-9 feature-info'):
link1 = link.find('a')
print(link1['href'])
try:
driver.find_element_by_link_text(">").click()
next_page_number+=1
time.sleep(1)
except:
print ('No more pages')
next_page=False
driver.close()
我正试图点击下一页,但上面写的代码是点击替代页..我想点击从第一页到最后一页。 问题来源StackOverflow 地址:/questions/59386434/selenium-webdriver-i-want-to-click-on-the-next-page-till-last-page
我在这页上发现了两个问题。 1)它加载页面非常慢,所以我不得不在获取数据和点击()按钮之前睡眠10秒。 2)按钮>的工作方式与我预期的不同——它会跳转3个页面(即使我在浏览器中手动点击它),所以我用下一页的编号搜索按钮,然后点击它。
driver.find_element_by_xpath('//a[@data-page="{}"]'.format(next_page_number)).click()
完整的代码。即使没有漂亮的汤也能工作
from selenium import webdriver
#from bs4 import BeautifulSoup as bs
import time
url = 'https://curecity.in/vendor-list.php?category=Doctor&filters_location=Jaipur&filters%5Bsubareas_global%5D=&filters_speciality='
driver = webdriver.Chrome('C:\chromedriver.exe')
#driver = webdriver.Firefox()
driver.maximize_window()
driver.get(url)
next_page_number = 1
while True:
print('page:', next_page_number)
time.sleep(10)
#soup = bs(driver.page_source, 'html.parser')
#for link in soup.find_all('div',class_='col-md-9 feature-info'):
# link1 = link.find('a')
# print(link1['href'])
for link in driver.find_elements_by_xpath('//div[@class="col-md-2 feature-icon"]/a'):
print(link.get_attribute('href'))
try:
next_page_number += 1
driver.find_element_by_xpath('//a[@data-page="{}"]'.format(next_page_number)).click()
except:
print('No more pages')
break # exit loop
#driver.close()
版权声明:本文内容由阿里云实名注册用户自发贡献,版权归原作者所有,阿里云开发者社区不拥有其著作权,亦不承担相应法律责任。具体规则请查看《阿里云开发者社区用户服务协议》和《阿里云开发者社区知识产权保护指引》。如果您发现本社区中有涉嫌抄袭的内容,填写侵权投诉表单进行举报,一经查实,本社区将立刻删除涉嫌侵权内容。