-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathspider.py
33 lines (26 loc) · 1.22 KB
/
spider.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import scrapy
from scrapy.crawler import CrawlerProcess
import json
class PythonListingSpider(scrapy.Spider):
name = 'pythonlistingssspider'
start_urls = ['https://www.arizonarealestate.com/maricopa/', ]
found_listings = []
def parse(self, response):
gallery = response.xpath('//div[@class="si-listings-column"]')
for listing in gallery:
listing_details = dict()
listing_details['name'] = listing.xpath(
'.//div[@class="si-listing__title-main"]/text()').get()
listing_details['description'] = listing.xpath(
'.//div[@class="si-listing__title-description"]/text()').get()
listing_details['price'] = listing.xpath(
'.//div[@class="si-listing__photo-price"]/span/text()').get()
listing_details['agency'] = listing.xpath(
'.//div[@class="si-listing__footer"]/div/text()').get()
self.found_listings.append(listing_details)
if __name__ == "__main__":
process = CrawlerProcess({'LOG_LEVEL': 'ERROR'})
process.crawl(PythonListingSpider)
spider = next(iter(process.crawlers)).spider
process.start()
print(json.dumps(PythonListingSpider.found_listings, indent=4))