Skip to content

Commit

Permalink
Merge branch 'staging_CW_2_SV' into flask-cache
Browse files Browse the repository at this point in the history
  • Loading branch information
sverhoeven committed Jan 21, 2025
2 parents 117705a + 33c0509 commit e3fdf7d
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 18 deletions.
7 changes: 4 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ Should have the following data files:
1. `data/xr_allow_<2030|2040|FC>.nc` - NetCDF file
1. `data/ne_110m_admin_0_countries.geojson` - can be downloaded with `npm run download:borders`

If your data is else where, you can change `data/` to a symlink. For example `rm data;ln -s /path/to/data data`.

## API service

The API web service reads the NetCDF file and returns the data as JSON which is used in the web application.
Expand Down Expand Up @@ -107,11 +109,10 @@ node build/index.js

> To deploy your app, you may need to install an [adapter](https://kit.svelte.dev/docs/adapters) for your target environment.
If the Python webservice is not running on `http://127.0.0.1:5000` then set `CABE_API_URL` environment variable to right URL.

The web application server expects the Python web service to be running on `http://127.0.0.1:5000`.
## Cache

The Python webservice caches all requests for 10 hours in the `./cache` directory. The cache can be cleared with
The Python webservice caches all requests for forever in the `./cache` directory. The cache can be cleared with

```bash
# stop webservice
Expand Down
3 changes: 1 addition & 2 deletions src/lib/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,7 @@ export function pathwayQueryFromSearchParams(
};
}

// export const API_URL = process.env.CABE_API_URL ?? 'http://127.0.0.1:5000'; // for production
export const API_URL = import.meta.env.CABE_API_URL ?? 'http://127.0.0.1:5000'; // for development
export const API_URL = 'http://127.0.0.1:5000';

async function getJSON(path: string, myfetch = fetch) {
let url = `${API_URL}${path}`;
Expand Down
3 changes: 2 additions & 1 deletion src/lib/server/db/data.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { open_borders } from './borders';

export const dataDir = process.env.CABE_DATA_DIR || 'data';
export const dataDir = 'data'

const bordersPath = dataDir + '/ne_110m_admin_0_countries.geojson';

export const borders = await open_borders(bordersPath);
23 changes: 11 additions & 12 deletions ws.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,10 @@
# TODO use class-based views for a reusable nc-file viewer?
# TODO write tests with dummy data

# DATA_PATH = Path("/data/DataUpdate_NDC_06_2024")
DATA_PATH = Path("/data/DataUpdate_10_2024")
CABE_DATA_DIR = Path("data")

# Global data (xr_dataread.nc)
dsGlobal = xr.open_dataset(DATA_PATH / "xr_dataread.nc")
dsGlobal = xr.open_dataset(CABE_DATA_DIR / "xr_dataread.nc")

# PCC convergence year is standard on 2050
DEFAULT_CONVERGENCE_YEAR = 2050
Expand Down Expand Up @@ -123,16 +122,16 @@ def pathwaySelection():

available_region_files = set(
[
str(os.path.basename(p)).removeprefix("xr_alloc_").removesuffix(".nc")
for p in glob(str(DATA_PATH / "xr_alloc_*.nc"))
str(os.path.basename(p)).removeprefix("xr_alloc_").removesuffix(".nc")
for p in glob(str(CABE_DATA_DIR / "xr_alloc_*.nc"))
]
)


def build_regions():
countries_geojson = {}
for g in loads(
Path(DATA_PATH, "ne_110m_admin_0_countries.geojson").read_text(encoding="utf8")
(CABE_DATA_DIR / "ne_110m_admin_0_countries.geojson").read_text(encoding="utf8")
)["features"]:
ps = g["properties"]
countries_geojson[ps["ISO_A3_EH"]] = {
Expand Down Expand Up @@ -334,10 +333,10 @@ def gdpOverTime(region):


# Map data (xr_alloc_2030.nc etc)
ds_alloc_2030 = xr.open_dataset(DATA_PATH / "xr_alloc_2030.nc")
ds_alloc_2040 = xr.open_dataset(DATA_PATH / "xr_alloc_2040.nc")
ds_alloc_2050 = xr.open_dataset(DATA_PATH / "xr_alloc_2050.nc")
ds_alloc_FC = xr.open_dataset(DATA_PATH / "xr_alloc_FC.nc")
ds_alloc_2030 = xr.open_dataset(CABE_DATA_DIR / "xr_alloc_2030.nc")
ds_alloc_2040 = xr.open_dataset(CABE_DATA_DIR / "xr_alloc_2040.nc")
ds_alloc_2050 = xr.open_dataset(CABE_DATA_DIR / "xr_alloc_2050.nc")
ds_alloc_FC = xr.open_dataset(CABE_DATA_DIR / "xr_alloc_FC.nc")


def population_map(year, scenario="SSP2"):
Expand Down Expand Up @@ -401,7 +400,7 @@ def fullCenturyBudgetSpatial(year):


# Reference pathway data (xr_policyscen.nc)
ds_policyscen = xr.open_dataset(DATA_PATH / "xr_policyscen.nc")
ds_policyscen = xr.open_dataset(CABE_DATA_DIR / "xr_policyscen.nc")


@app.get("/policyPathway/<policy>/<region>")
Expand Down Expand Up @@ -514,7 +513,7 @@ def indicators(region):
def get_ds(ISO):
if ISO not in available_region_files:
raise ValueError(f"ISO {ISO} not found")
fn = DATA_PATH / f"xr_alloc_{ISO}.nc"
fn = CABE_DATA_DIR / f"xr_alloc_{ISO}.nc"
return xr.open_dataset(fn)


Expand Down

0 comments on commit e3fdf7d

Please sign in to comment.