-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathget_url.py
executable file
·141 lines (108 loc) · 3.9 KB
/
get_url.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
#!/usr/bin/python3
import argparse
import urllib.request
import urllib.error
from bs4 import BeautifulSoup
from collections import OrderedDict
import os
import json
import dateparser
def preprocess(soup):
ticks = soup.find_all("i", attrs={'class': 'fa fa-check'})
for tick in ticks:
if tick.text == "":
tick.string = "yes"
ticks = soup.find_all("i", attrs={'class': 'fa fa-times'})
for tick in ticks:
if tick.text == "":
tick.string = "no"
def property_filepath(property_id):
outdir = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "properties")
return os.path.join(outdir, property_id)
def parse_location_table(soup):
data = []
table = soup.find('div', attrs={'id': 'LocalTransport'})
if table:
rows = table.find_all('tr')
for row in rows[1:]:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols if ele])
return data
def get_title(soup):
return soup.find("h1", attrs={'class': "property-title"}).text.strip()
def parse_feature_table(soup):
def process_el(el):
return el.text.strip()
data = []
tables = soup.find('div', attrs={'id': 'Features'}).find_all('table')
for table in tables:
rows = table.find_all('tr')
for row in rows:
cols = row.find_all('td')
cols = [process_el(ele) for ele in cols]
data.append([ele for ele in cols if ele])
return data
def available_from(features):
date_text = [x[1] for x in features if x[0] == "Available From"][0]
parsed = dateparser.parse(date_text)
if not parsed:
return date_text
return str(parsed.date())
def EPC_rating(features):
rating = [x[1] for x in features if x[0] == "EPC Rating"]
if rating:
return rating[0]
def has_garden(features):
garden_found = [x[1] for x in features if x[0] == "Garden"]
if garden_found:
has_garden = None
if garden_found[0] == "yes":
has_garden = True
elif garden_found[0] == "no":
has_garden = False
return has_garden
def parse_property_page(property_id, debug=False):
print("Processing property:", property_id)
if not debug:
if os.path.isfile(property_filepath(property_id)):
print("Skipping as it already exists")
return
try:
html_doc = urllib.request.urlopen("http://www.openrent.co.uk/" +
property_id).read()
except urllib.error.HTTPError:
print("Problem parsing %s." % property_id)
return
soup = BeautifulSoup(html_doc, 'html.parser')
preprocess(soup)
price = soup.find_all("h3", {"class": "perMonthPrice"})[0]
price = float(price.text[1:].replace(',', ''))
desc = soup.find_all("div", {"class": "description"})[0]
desc = desc.get_text().strip()
desc.replace("\t", "")
location = parse_location_table(soup)
features = parse_feature_table(soup)
prop = OrderedDict()
prop['id'] = property_id
prop['title'] = get_title(soup)
prop['location'] = location
prop['price'] = price
prop['description'] = desc
prop['available_from'] = available_from(features)
prop['EPC'] = EPC_rating(features)
prop['has_garden'] = has_garden(features)
if not debug:
with open(property_filepath(property_id), "w") as f:
json.dump(prop, f, indent=4, ensure_ascii=False)
else:
print(json.dumps(prop, indent=4, ensure_ascii=False))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("property_id", help='url to get', type=str)
parser.add_argument("--debug", help='url to get', action='store_true',
default=False)
args = parser.parse_args()
property_id = args.property_id
parse_property_page(property_id, debug=args.debug)