-
Notifications
You must be signed in to change notification settings - Fork 25
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'ruff' into ruff_upgrade
- Loading branch information
Showing
2 changed files
with
29 additions
and
7 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,34 +1,54 @@ | ||
import os | ||
from shutil import rmtree | ||
|
||
try: | ||
import requests | ||
from platformdirs import user_cache_path | ||
except ImportError: | ||
raise ImportError("requests and platformdirs are needed to download data") | ||
raise ImportError("requests and platformdirs are needed to download data") from None | ||
|
||
|
||
if os.environ.get("GITHUB_TOKEN"): | ||
HEADERS = {"Authorization": f"token {os.environ['GITHUB_TOKEN']}"} | ||
else: | ||
HEADERS = None | ||
|
||
|
||
def download_map(dataset): | ||
if dataset not in ("naturalearth_lowres", "naturalearth_cities"): | ||
raise ValueError(f"Unknown dataset: {dataset}, supported datasets are 'naturalearth_lowres' and 'naturalearth_cities'") | ||
raise ValueError( | ||
f"Unknown dataset: {dataset}, supported datasets are 'naturalearth_lowres' and 'naturalearth_cities'" | ||
) | ||
url = f"https://api.github.com/repos/geopandas/geopandas/contents/geopandas/datasets/{dataset}?ref=v0.14.4" | ||
local_dir = user_cache_path() / "spatialpandas" / dataset | ||
|
||
if local_dir.exists(): | ||
return local_dir | ||
|
||
response = requests.get(url) | ||
if response.status_code == 200: | ||
response = requests.get(url, headers=HEADERS) | ||
if response.ok: | ||
files = response.json() | ||
else: | ||
print(f"Failed to retrieve contents: {response.status_code}") | ||
return None | ||
raise ValueError( | ||
f"Failed to retrieve contents ({response.status_code}): \n {response.text}" | ||
) | ||
|
||
if not local_dir.exists(): | ||
local_dir.mkdir(parents=True) | ||
|
||
for file in files: | ||
file_url = file["download_url"] | ||
file_name = file["name"] | ||
file_response = requests.get(file_url) | ||
file_response = requests.get(file_url, headers=HEADERS) | ||
if not file_response.ok: | ||
rmtree(local_dir) | ||
raise ValueError(f"Failed to download file: {file_name}, \n{file_response.text}") | ||
with open(local_dir / file_name, "wb") as f: | ||
f.write(file_response.content) | ||
|
||
return local_dir | ||
|
||
|
||
if __name__ == "__main__": | ||
download_map("naturalearth_lowres") | ||
download_map("naturalearth_cities") |