from selenium import webdriver, common
import time
from bs4 import BeautifulSoup
from selenium import webdriver
import time
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.by import By
import json
url = f'https://store.steampowered.com/specials/'
s = Service(executable_path='C:\Пользователи\Олег\PycharmProjects\TelegramBot\chromedriver_win32\chromedriver.exe')
options = webdriver.ChromeOptions()
driver = webdriver.Chrome(service=s, options=options)
driver.get(url)
time.sleep(3)
try:
for i in range(650):
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
time.sleep(2)
driver.find_element(By.XPATH, '//div[@class="saleitembrowser_ShowContentsContainer_3IRkb"]//button[text()="Показать больше"]').click()
except common.exceptions.NoSuchElementException:
pass
time.sleep(3)
html = driver.page_source
soup = BeautifulSoup(html, "lxml")
games_names = soup.find_all('div', class_='salepreviewwidgets_StoreSaleWidgetRight_1lRFu')
all_sales_dict = {}
for game in games_names:
try:
name = game.find('div', 'salepreviewwidgets_TitleCtn_1F4bc').find('a').text
price_sale = game.find('div', 'salepreviewwidgets_StoreSalePriceBox_Wh0L8').text
price_orig = game.find('div', 'salepreviewwidgets_StoreOriginalPrice_1EKGZ').text
sale = game.find('div', 'salepreviewwidgets_StoreSaleDiscountBox_2fpFv').text
price = (f"Цена без скидки: {price_orig}. Скидка: {sale}. Цена со скидкой: {price_sale}")
all_sales_dict[name] = price
except AttributeError:
pass
with open("all_sales.json", "w", encoding="utf-8") as file:
json.dump(all_sales_dict, file, indent=4, ensure_ascii=False)
看起来现在的问题是页面没有时间加载,我更改了代码并留下了评论。将数据收集传输到我们将在循环中调用的函数。在该函数中,我们将等到按钮加载完毕或 10 秒超时结束,然后按下它,收集数据并将其写入文件。不要忘记进口。重复的次数必须以帝国的方式确定,我认为5-7次就足够了。为方便起见,我还添加了循环输出,以便了解程序现在的位置,在程序完成后,我添加了收集结果数量的输出。请检查它是如何工作的,写下结果。