ЛГ
Size: a a a
ЛГ
ЛГ
ЛГ
ЛГ
ЛГ
ЛГ
class RapserSpider(scrapy.Spider):
name = "quotes"
allowed_domains = ['de.bizin.eu']
start_urls = ['https://de.bizin.eu/rus/cat/oborudovanie#.X474I9AzaUl']
def parse(self, response):
# Give data of css
product_url = response.xpath('//a[@itemprop="name"]/@href').extract()
full_product_url = []
for url in product_url:
full_product_url.append("https://de.bizin.eu" + url)
for i in range(len(full_product_url)):
yield scrapy.Request(full_product_url[i], callback=self.price_parse,
meta={'product_url': full_product_url[i]})
def price_parse(self, response):
data = json.loads(response.xpath('//script[@type="application/ld+json"]/text()').get())
price = data['email']
scrap_info = {
'product_url': response.meta['product_url'],
'price': price
}
time.sleep(2)
return scrap_info
ЛГ
ЛГ
next_page = response.xpath('//div[@class="pagination span12"]//li[last()]/a/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
К
import scrapy
import time
import json
class RapserSpider(scrapy.Spider):
name = "quotes"
allowed_domains = ['de.bizin.eu']
start_urls = ['https://de.bizin.eu/rus/cat/oborudovanie#.X474I9AzaUl']
def parse(self, response):
# Give data of css
product_url = response.xpath('//a[@itemprop="name"]/@href').extract()
full_product_url = []
for url in product_url:
full_product_url.append("https://de.bizin.eu" + url)
for i in range(len(full_product_url)):
yield scrapy.Request(full_product_url[i], callback=self.price_parse,
meta={'product_url': full_product_url[i]})
next_page = response.xpath('//div[@class="pagination span12"]//li[last()]/a/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
def price_parse(self, response):
data = json.loads(response.xpath('//script[@type="application/ld+json"]/text()').get())
price = data['email']
scrap_info = {
'product_url': response.meta['product_url'],
'price': price
}
# time.sleep(2)
return scrap_info
ЛГ
ЛГ
ЛГ
ЛГ
ЛГ
ЛГ
ЛГ
ЛГ
ЛГ