Automatically exported from code.google.com/p/planningalerts
Vous ne pouvez pas sélectionner plus de 25 sujets Les noms de sujets doivent commencer par une lettre ou un nombre, peuvent contenir des tirets ('-') et peuvent comporter jusqu'à 35 caractères.

Carmarthenshire.py 2.9 KiB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778
  1. import urllib2
  2. import urllib
  3. import urlparse
  4. import datetime, time
  5. import cgi
  6. from BeautifulSoup import BeautifulSoup
  7. from PlanningUtils import PlanningApplication, \
  8. PlanningAuthorityResults, \
  9. getPostcodeFromText
  10. class CarmarthenshireParser:
  11. def __init__(self, *args):
  12. self.comments_email_address = "planning@carmarthenshire.gov.uk"
  13. self.authority_name = "Carmarthenshire County Council"
  14. self.authority_short_name = "Carmarthenshire"
  15. self.base_url = "http://www.carmarthenshire.gov.uk/CCC_APPS/eng/plannaps/CCC_PlanningApplicationsResults.asp?datemode=range&in_lo_date=%(day)s%%2F%(month)s%%2F%(year)s&in_hi_date=%(day)s%%2F%(month)s%%2F%(year)s&SUBMIT=Search"
  16. self._results = PlanningAuthorityResults(self.authority_name, self.authority_short_name)
  17. def getResultsByDayMonthYear(self, day, month, year):
  18. search_day = datetime.date(year, month, day)
  19. # Now get the search page
  20. response = urllib2.urlopen(self.base_url %{"day": day,
  21. "month": month,
  22. "year": year,
  23. })
  24. soup = BeautifulSoup(response.read())
  25. trs = soup.findAll("tr", valign="middle")
  26. count = 0
  27. for tr in trs:
  28. # The odd trs are just spacers
  29. if count % 2 == 0:
  30. application = PlanningApplication()
  31. tds = tr.findAll("td")
  32. application.date_received = search_day
  33. application.council_reference = tds[1].a.string
  34. application.address = tds[3].a.string
  35. application.postcode = getPostcodeFromText(application.address)
  36. # All the links in this <tr> go to the same place...
  37. application.info_url = urlparse.urljoin(self.base_url, tr.a['href'])
  38. # Still looking for description and comment url
  39. # For the description, we'll need the info page
  40. info_soup = BeautifulSoup(urllib2.urlopen(application.info_url).read())
  41. application.description = info_soup.find(text="Description").findNext("td").findNext("td").font.string
  42. # While we're here, lets get the OSGB grid ref
  43. application.osgb_x, application.osgb_y = info_soup.find(text="Grid Reference").findNext("td").font.string.split("-")
  44. # We'll have to use an email address for comments
  45. application.comment_url = self.comments_email_address
  46. self._results.addApplication(application)
  47. count += 1
  48. return self._results
  49. def getResults(self, day, month, year):
  50. return self.getResultsByDayMonthYear(int(day), int(month), int(year)).displayXML()
  51. if __name__ == '__main__':
  52. parser = CarmarthenshireParser()
  53. print parser.getResults(8,8,2008)