Automatically exported from code.google.com/p/planningalerts
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. """
  2. The Weymouth and Portland site has a page with the last 28 days of apps on it,
  3. so we'll use that.
  4. The info and comment pages can only be accessed by POSTs, so we'll have
  5. to give the search page, which is the best we can do.
  6. """
  7. import urllib2
  8. import datetime
  9. from BeautifulSoup import BeautifulSoup
  10. from PlanningUtils import PlanningApplication, \
  11. PlanningAuthorityResults, \
  12. getPostcodeFromText
  13. date_format = "%d %B %Y"
  14. class WeymouthParser:
  15. def __init__(self, *args):
  16. self.authority_name = "Weymouth and Portland Borough Council"
  17. self.authority_short_name = "Weymouth and Portland"
  18. self.base_url = "http://www.weymouth.gov.uk/Planning/applications/newapps.asp"
  19. self.search_url = "http://www.weymouth.gov.uk/planning/applications/planregister.asp"
  20. self._results = PlanningAuthorityResults(self.authority_name, self.authority_short_name)
  21. def getResultsByDayMonthYear(self, day, month, year):
  22. response = urllib2.urlopen(self.base_url)
  23. soup = BeautifulSoup(response.read())
  24. for details_input in soup.find("table", summary="Planning Applications Received in the last 7 days").findAll("input", alt="Planning Details"):
  25. application = PlanningApplication()
  26. first_tr = details_input.findPrevious("tr")
  27. other_trs = first_tr.findNextSiblings("tr", limit=8)
  28. application.council_reference = first_tr.find("input", {"name": "refval"})['value']
  29. application.address = other_trs[0].findAll("td")[1].string.strip()
  30. application.description = other_trs[1].findAll("td")[1].string.strip()
  31. application.date_received = datetime.datetime.strptime(other_trs[3].findAll("td")[1].string.strip(), date_format).date()
  32. # Both the info page and the comment page can only be got to
  33. # by a POST. The best we can do is give the url of the search page
  34. application.info_url = application.comment_url = self.search_url
  35. self._results.addApplication(application)
  36. return self._results
  37. def getResults(self, day, month, year):
  38. return self.getResultsByDayMonthYear(int(day), int(month), int(year)).displayXML()
  39. if __name__ == '__main__':
  40. parser = WeymouthParser()
  41. print parser.getResults(1,8,2008)