-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path02_Intermediate_Web_Scraper.py
More file actions
23 lines (18 loc) · 943 Bytes
/
02_Intermediate_Web_Scraper.py
File metadata and controls
23 lines (18 loc) · 943 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
from bs4 import BeautifulSoup
import requests
from csv import writer
url= "https://www.pararius.com/apartments/amsterdam?ac=1"
page = requests.get(url)
soup = BeautifulSoup(page.content, 'html.parser')
lists = soup.find_all('section', class_="listing-search-item")
with open('housing.csv', 'w', encoding='utf8', newline='') as f:
thewriter = writer(f)
header = ['Title', 'Location', 'Price', 'Area']
thewriter.writerow(header)
for list in lists:
title = list.find('a', class_="listing-search-item__link--title").text.replace('\n', '')
location = list.find('div', class_="listing-search-item__location").text.replace('\n', '')
price = list.find('span', class_="listing-search-item__price").text.replace('\n', '')
area = list.find('span', class_="illustrated-features__description").text.replace('\n', '')
info = [title, location, price, area]
thewriter.writerow(info)