Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
*.pyc
*.DS_Store
.DS_Store
agencies/
exemptions/
jurisdictions/
32 changes: 32 additions & 0 deletions export_agencies.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#!/usr/bin/env python2
# -- coding: utf-8 --

import utils
import urllib, os, json, datetime, requests, urlparse

api_url = utils.API_URL
token = utils.get_api_key()
headers = utils.get_headers(token)

page = 1
next_url = api_url + "agency/?page=" + str(page)
done_so_far = 0


try:
os.mkdir('agencies')
except Exception as e:
print 'dir exists'

while next_url:
agencies = requests.get(next_url , headers=headers).json()
agency_data = agencies['results']
for agency in agency_data:
text_file = open('agencies/' + str(agency["id"]) + ".json", "w+")
text_file.write(json.dumps(agency, sort_keys=True, indent=4, separators=(',', ': ')))
text_file.close()

done_so_far = done_so_far + len(agency_data)
count = agencies['count']
print 'Getting agencies: %d of %d' % (done_so_far, count)
next_url = agencies['next']
31 changes: 31 additions & 0 deletions export_exemptions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
#!/usr/bin/env python2
# -- coding: utf-8 --

import utils
import urllib, os, json, datetime, requests, urlparse

api_url = utils.API_URL
token = utils.get_api_key()
headers = utils.get_headers(token)

page = 1
next_url = api_url + "exemption/?page=" + str(page)
done_so_far = 0

try:
os.mkdir('exemptions')
except Exception as e:
print 'dir exists'

while next_url:
exemptions = requests.get(next_url , headers=headers).json()
exemption_data = exemptions['results']
for exemption in exemption_data:
text_file = open('exemptions/' + str(exemption["id"]) + ".json", "w+")
text_file.write(json.dumps(exemption, sort_keys=True, indent=4, separators=(',', ': ')))
text_file.close()

done_so_far = done_so_far + len(exemption_data)
count = exemptions['count']
print 'Getting exemptions: %d of %d' % (done_so_far, count)
next_url = exemptions['next']
31 changes: 31 additions & 0 deletions export_jurisdictions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
#!/usr/bin/env python2
# -- coding: utf-8 --

import utils
import urllib, os, json, datetime, requests, urlparse

api_url = utils.API_URL
token = utils.get_api_key()
headers = utils.get_headers(token)

page = 1
next_url = api_url + "jurisdiction/?page=" + str(page)
done_so_far = 0

try:
os.mkdir('jurisdictions')
except Exception as e:
print 'dir exists'

while next_url:
jurisdictions = requests.get(next_url , headers=headers).json()
jurisdiction_data = jurisdictions['results']
for jurisdiction in jurisdiction_data:
text_file = open('jurisdictions/' + str(jurisdiction["id"]) + ".json", "w+")
text_file.write(json.dumps(jurisdiction, sort_keys=True, indent=4, separators=(',', ': ')))
text_file.close()

done_so_far = done_so_far + len(jurisdiction_data)
count = jurisdictions['count']
print 'Getting jurisdictions: %d of %d' % (done_so_far, count)
next_url = jurisdictions['next']
4 changes: 4 additions & 0 deletions export_muckrock_identifiers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/usr/bin/env python2
# -- coding: utf-8 --

import export_agencies, export_exceptions, export_jurisdictions