| @@ -4,3 +4,5 @@ ruby '2.3.1' | |||||
| gem 'scraperwiki', :git => 'https://github.com/openaustralia/scraperwiki-ruby/', :branch => 'morph_defaults' | gem 'scraperwiki', :git => 'https://github.com/openaustralia/scraperwiki-ruby/', :branch => 'morph_defaults' | ||||
| gem 'petrify', :git => 'https://github.com/adrianshort/petrify/' | gem 'petrify', :git => 'https://github.com/adrianshort/petrify/' | ||||
| gem 'rest-client' | |||||
| gem 'json' | |||||
| @@ -17,21 +17,39 @@ GIT | |||||
| GEM | GEM | ||||
| remote: https://rubygems.org/ | remote: https://rubygems.org/ | ||||
| specs: | specs: | ||||
| domain_name (0.5.20180417) | |||||
| unf (>= 0.0.5, < 1.0.0) | |||||
| haml (5.0.4) | haml (5.0.4) | ||||
| temple (>= 0.8.0) | temple (>= 0.8.0) | ||||
| tilt | tilt | ||||
| http-cookie (1.0.3) | |||||
| domain_name (~> 0.5) | |||||
| httpclient (2.8.3) | httpclient (2.8.3) | ||||
| json (2.1.0) | |||||
| mime-types (3.2.2) | |||||
| mime-types-data (~> 3.2015) | |||||
| mime-types-data (3.2018.0812) | |||||
| netrc (0.11.0) | |||||
| rest-client (2.0.2) | |||||
| http-cookie (>= 1.0.2, < 2.0) | |||||
| mime-types (>= 1.16, < 4.0) | |||||
| netrc (~> 0.8) | |||||
| sqlite3 (1.3.13) | sqlite3 (1.3.13) | ||||
| sqlite_magic (0.0.6) | sqlite_magic (0.0.6) | ||||
| sqlite3 | sqlite3 | ||||
| temple (0.8.0) | temple (0.8.0) | ||||
| tilt (2.0.8) | tilt (2.0.8) | ||||
| unf (0.1.4) | |||||
| unf_ext | |||||
| unf_ext (0.0.7.5) | |||||
| PLATFORMS | PLATFORMS | ||||
| ruby | ruby | ||||
| DEPENDENCIES | DEPENDENCIES | ||||
| json | |||||
| petrify! | petrify! | ||||
| rest-client | |||||
| scraperwiki! | scraperwiki! | ||||
| RUBY VERSION | RUBY VERSION | ||||
| @@ -0,0 +1,16 @@ | |||||
| #!/usr/bin/env ruby | |||||
| require 'scraperwiki' | |||||
| require 'rest-client' | |||||
| require 'json' | |||||
| require 'pp' | |||||
| morph_api_url = 'https://api.morph.io/adrianshort/inlink-scraper/data.json' | |||||
| result = RestClient.get(morph_api_url, params: { | |||||
| key: ENV['MORPH_API_KEY'], | |||||
| query: "select * from applications" | |||||
| }) | |||||
| apps = JSON.parse(result) | |||||
| ScraperWiki.save_sqlite(['authority_name', 'council_reference'], apps, 'applications') | |||||