Automatically exported from code.google.com/p/planningalerts
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

Lichfield.py 2.4 KiB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667
  1. """
  2. Lichfield District council has no nice search page, but it does have a page
  3. which has the applications received in the last 7 days, so we'll use this,
  4. ignoring the date passed in.
  5. """
  6. import urllib2
  7. import urlparse
  8. import datetime
  9. import BeautifulSoup
  10. from PlanningUtils import PlanningApplication, \
  11. PlanningAuthorityResults, \
  12. getPostcodeFromText
  13. date_format = "%d/%m/%Y"
  14. class LichfieldParser:
  15. def __init__(self, *args):
  16. self.authority_name = "Lichfield District Council"
  17. self.authority_short_name = "Lichfield"
  18. self.base_url = "http://www.lichfielddc.gov.uk/site/scripts/planning_list.php"
  19. self._results = PlanningAuthorityResults(self.authority_name, self.authority_short_name)
  20. def getResultsByDayMonthYear(self, day, month, year):
  21. response = urllib2.urlopen(self.base_url)
  22. soup = BeautifulSoup.BeautifulSoup(response.read())
  23. trs = soup.find("table", {"class": "planningtable"}).tbody.findAll("tr")
  24. for tr in trs:
  25. application = PlanningApplication()
  26. tds = tr.findAll("td")
  27. application.council_reference = tds[0].a.string.strip()
  28. application.info_url = urlparse.urljoin(self.base_url, tds[0].a['href'])
  29. application.address = ' '.join(tds[1].contents[1].strip().split()[1:])
  30. application.postcode = getPostcodeFromText(application.address)
  31. # We're going to need to download the info page in order to get
  32. # the comment link, the date received, and the description.
  33. info_response = urllib2.urlopen(application.info_url)
  34. info_soup = BeautifulSoup.BeautifulSoup(info_response.read())
  35. application.description = info_soup.find(text="Proposal:").findPrevious("div").contents[1].strip()
  36. application.date_received = datetime.datetime.strptime(info_soup.find(text="Date Application Valid:").findNext("span").string.strip(), date_format).date()
  37. application.comment_url = info_soup.find("a", title="Comment on this planning application.")['href']
  38. self._results.addApplication(application)
  39. return self._results
  40. def getResults(self, day, month, year):
  41. return self.getResultsByDayMonthYear(int(day), int(month), int(year)).displayXML()
  42. if __name__ == '__main__':
  43. parser = LichfieldParser()
  44. print parser.getResults(12,10,2008)