Automatically exported from code.google.com/p/planningalerts
選択できるのは25トピックまでです。 トピックは、先頭が英数字で、英数字とダッシュ('-')を使用した35文字以内のものにしてください。
 
 
 
 
 
 

76 行
2.3 KiB

  1. import urllib2
  2. import urllib
  3. import urlparse
  4. import datetime, time
  5. import cgi
  6. from BeautifulSoup import BeautifulSoup
  7. from PlanningUtils import PlanningApplication, \
  8. PlanningAuthorityResults, \
  9. getPostcodeFromText
  10. date_format = "%Y%m%d"
  11. class ForestOfDeanParser:
  12. def __init__(self, *args):
  13. self.authority_name = "Forest of Dean District Council"
  14. self.authority_short_name = "Forest of Dean"
  15. self.base_url = "http://www.fdean.gov.uk/content.asp"
  16. self._results = PlanningAuthorityResults(self.authority_name, self.authority_short_name)
  17. def getResultsByDayMonthYear(self, day, month, year):
  18. search_date = datetime.date(year, month, day)
  19. search_data = urllib.urlencode(
  20. [
  21. ("parent_directory_id", "200"),
  22. ("nav", "679"),
  23. ("id", "13266"),
  24. ("RecStart", "1"),
  25. ("RecCount", "100"),
  26. ("SDate", search_date.strftime(date_format)),
  27. ("EDate", search_date.strftime(date_format)),
  28. ]
  29. )
  30. search_url = self.base_url + "?" + search_data
  31. response = urllib2.urlopen(search_url)
  32. soup = BeautifulSoup(response.read())
  33. results_table = soup.find("table", summary="List of planning applications that match your query")
  34. for tr in results_table.findAll("tr")[1:]:
  35. application = PlanningApplication()
  36. application.date_received = search_date
  37. tds = tr.findAll("td")
  38. application.council_reference = tds[0].a.string.strip()
  39. application.info_url = urlparse.urljoin(self.base_url, tds[0].a['href'])
  40. application.comment_url = application.info_url
  41. application.address = ' '.join(tds[1].string.strip().split())
  42. application.postcode = getPostcodeFromText(application.address)
  43. application.description = tds[2].string.strip()
  44. self._results.addApplication(application)
  45. return self._results
  46. def getResults(self, day, month, year):
  47. return self.getResultsByDayMonthYear(int(day), int(month), int(year)).displayXML()
  48. if __name__ == '__main__':
  49. parser = ForestOfDeanParser()
  50. print parser.getResults(21,5,2008)
  51. # TODO - looks like it paginates at 20