-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #4 from blenback/develop-chenyu
Develop chenyu
- Loading branch information
Showing
34 changed files
with
833 additions
and
7 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
from owslib.wcs import WebCoverageService | ||
import rasterio | ||
import matplotlib.pyplot as plt | ||
from rasterio import plot | ||
|
||
# WCS URLs for different soil properties | ||
wcs_urls = { | ||
'phh2o': 'http://maps.isric.org/mapserv?map=/map/phh2o.map', | ||
'bdod': 'http://maps.isric.org/mapserv?map=/map/bdod.map', | ||
'cec': 'http://maps.isric.org/mapserv?map=/map/cec.map', | ||
'cfvo': 'https://maps.isric.org/mapserv?map=/map/cfvo.map', | ||
'clay': 'https://maps.isric.org/mapserv?map=/map/clay.map', | ||
'nitrogen': 'https://maps.isric.org/mapserv?map=/map/nitrogen.map', | ||
'sand': 'https://maps.isric.org/mapserv?map=/map/sand.map', | ||
'silt': 'https://maps.isric.org/mapserv?map=/map/silt.map', | ||
'soc': 'https://maps.isric.org/mapserv?map=/map/soc.map', | ||
<<<<<<< HEAD | ||
======= | ||
'ocs': 'https://maps.isric.org/mapserv?map=/map/ocs.map', | ||
>>>>>>> 67f953ec92d5f3e8523aaa160e6654c867c0ebd5 | ||
'ocd': 'https://maps.isric.org/mapserv?map=/map/ocd.map' | ||
|
||
# Add other WCS URLs here | ||
# ... | ||
} | ||
|
||
# Soil properties | ||
<<<<<<< HEAD | ||
soil_properties = ['phh2o', 'bdod', 'cec', 'cfvo', 'clay', 'nitrogen', 'sand', 'silt', 'soc', 'ocd'] | ||
======= | ||
soil_properties = ['phh2o', 'bdod', 'cec', 'cfvo', 'clay', 'nitrogen', 'sand', 'silt', 'soc', 'ocd', 'ocs'] | ||
>>>>>>> 67f953ec92d5f3e8523aaa160e6654c867c0ebd5 | ||
|
||
# Depth intervals | ||
depth_intervals = ['0-5cm', '5-15cm', '15-30cm', '30-60cm', '60-100cm', '100-200cm'] | ||
|
||
# A bounding box broadly matching Peru | ||
bbox = (-81.81, -18.80, -68.15, 0.32) | ||
|
||
for property in soil_properties: | ||
wcs_url = wcs_urls.get(property, '') | ||
if wcs_url: | ||
wcs = WebCoverageService(wcs_url, version='1.0.0') | ||
|
||
for depth in depth_intervals: | ||
identifier = f'{property}_{depth}_mean' | ||
file_name = f'./data/{property}_{depth}_mean.tif' | ||
title = f'Mean {property} between {depth} deep in Peru' | ||
|
||
# Fetch the map segment within the bounding box | ||
response = wcs.getCoverage( | ||
identifier=identifier, | ||
crs='urn:ogc:def:crs:EPSG:4326', | ||
bbox=bbox, | ||
resx=0.002, resy=0.002, | ||
format='GEOTIFF_INT16') | ||
|
||
# Fetch the coverage for Peru and save it to disk | ||
with open(file_name, 'wb') as file: | ||
file.write(response.read()) | ||
print(f"download complete: {file_name}") | ||
|
||
# Open the file from disk | ||
ph = rasterio.open(file_name, driver="GTiff") | ||
|
||
# Use the plot class to plot the map | ||
plot.show(ph, title=title, cmap='gist_ncar') | ||
plt.show() | ||
else: | ||
print(f"WCS URL for {property} not defined.") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
######################################################################## | ||
## Script name: Rasterize_shapefile | ||
## Purpose of script: rasterize the shapefils of three geographical regions in Peru | ||
## Author: Chenyu Shen | ||
## Date Created: 2023-10-27 | ||
## Notes: | ||
######################################################################## | ||
|
||
### ========================================================================= | ||
### A - Preparation | ||
### ========================================================================= | ||
|
||
## Install and load packages | ||
|
||
#vector other required packages | ||
packs<-c("terra") | ||
|
||
#install new packages | ||
new.packs <- packs[!(packs %in% installed.packages()[, "Package"])] | ||
if (length(new.packs)) install.packages(new.packs) | ||
|
||
# Load required packages | ||
invisible(lapply(packs, require, character.only = TRUE)) | ||
|
||
# Set working directory | ||
setwd("C:/Work/Peru-env-futures") | ||
|
||
# Read in the shapefile | ||
regions <- vect("src/utils/Geographical_Region/region-geografica.shp") | ||
|
||
# Read in the reference raster | ||
ref_raster <- rast("src/utils/ref_grid.tif") | ||
|
||
|
||
### ========================================================================= | ||
### B - Check and Transform CRS | ||
### ========================================================================= | ||
|
||
# Check CRS of both files | ||
crs_regions <- crs(regions) | ||
crs_ref <- crs(ref_raster) | ||
|
||
# Transform if necessary | ||
if (crs_regions != crs_ref) { | ||
regions <- project(regions, crs_ref) | ||
} | ||
|
||
|
||
### ========================================================================= | ||
### C - Rasterize the shapefile | ||
### ========================================================================= | ||
|
||
# Access and view the attibutes table of the shapefile | ||
attributes <- values(regions) | ||
print(attributes) | ||
|
||
# Rasterize | ||
rasterized_regions <- rasterize(regions, ref_raster, field="nombre", fun="min") | ||
|
||
# Plot rasterized data | ||
plot(rasterized_regions, col=rainbow(3)) | ||
|
||
# Overlay vector data (e.g., boundaries or points) | ||
plot(regions, add=TRUE) | ||
|
||
writeRaster(rasterized_regions, "src/utils/rasterized_regions.tif") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
import os | ||
import pandas as pd | ||
|
||
# Set the directory where your TIF files are located | ||
directory_path = 'C:/Users/chshen/OneDrive - ETH Zurich/Work/Peru_data_collab/Preds/Raw/Soil' | ||
relative_path_root = 'Peru_data_collab/Preds/Raw/Soil' | ||
|
||
# List all TIF files in the directory | ||
tif_files = [f for f in os.listdir(directory_path) if f.endswith('.tif')] | ||
|
||
# Load existing data from the Excel file | ||
excel_path = "C:/Users/chshen/OneDrive - ETH Zurich/Work/predictor_table.xlsx" # Replace with the path to your Excel file | ||
existing_data = pd.read_excel(excel_path) | ||
|
||
# Append each TIF file path as a new row to the DataFrame | ||
for tif_file in tif_files: | ||
# Extract the file name without the .tif extension to use as the 'Variable_name' | ||
variable_name = os.path.splitext(tif_file)[0] | ||
|
||
# Construct the relative file path | ||
relative_file_path = os.path.join(relative_path_root, tif_file) | ||
|
||
# Define the new data entry | ||
new_data_entry = { | ||
'Variable_name': variable_name, | ||
'Data_citation': 'Soilgrid', | ||
'URL': 'https://maps.isric.org/', | ||
'Original_resolution': '250m', | ||
'Static_or_dynamic': 'static', # or 'dynamic' based on your data | ||
'Temporal_coverage': '', | ||
'Temporal_resolution': '', | ||
'Prepared': 'No', | ||
'Raw_data_path': relative_file_path, | ||
# ... add other fields as necessary | ||
} | ||
|
||
# Convert the dictionary to a DataFrame to ensure similar structure | ||
new_data_df = pd.DataFrame([new_data_entry]) | ||
existing_data = pd.concat([existing_data, new_data_df], ignore_index=True) | ||
|
||
# Save the updated DataFrame back to an Excel file | ||
existing_data.to_excel(excel_path, index=False) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
######################################################################## | ||
## Script name: Aggregate_LULC | ||
## Purpose of script: Aggregate the MapBiomas LULC data to 120m resolution | ||
## Author: Chenyu Shen | ||
## Date Created: 2023-11-20 | ||
## Notes: | ||
######################################################################## | ||
|
||
### ========================================================================= | ||
### A - Preparation | ||
### ========================================================================= | ||
|
||
## Install and load packages | ||
|
||
#vector other required packages | ||
packs<-c("terra","raster","sp") | ||
|
||
#install new packages | ||
new.packs <- packs[!(packs %in% installed.packages()[, "Package"])] | ||
if (length(new.packs)) install.packages(new.packs) | ||
|
||
# Load required packages | ||
invisible(lapply(packs, require, character.only = TRUE)) | ||
|
||
# Set working directory | ||
setwd(Data_dir) | ||
|
||
# Set output directory | ||
output_dir <- "LULC/MapBiomas_120m" | ||
|
||
### ========================================================================= | ||
### B - Aggregate the Data | ||
### ========================================================================= | ||
for (year in 1985:2021) { | ||
# Construct file path for the input raster | ||
input_path <- sprintf("LULC/MapBiomas/peru_coverage_%d.tif", year) | ||
|
||
# Check if the file exists | ||
if (!file.exists(input_path)) { | ||
next # Skip to the next iteration if the file does not exist | ||
} | ||
|
||
# Load the raster | ||
lulc_30m <- raster(input_path) | ||
|
||
# Aggregate to 120m resolution | ||
agg_factor <- 4 # 30m to 120m needs a factor of 4 | ||
lulc_120m <- aggregate(lulc_30m, fact=agg_factor, fun=modal, na.rm=TRUE) | ||
|
||
# Construct file path for the output raster | ||
output_path <- sprintf("%s/peru_coverage_120m_%d.tif", output_dir, year) | ||
|
||
# Save the aggregated raster | ||
writeRaster(lulc_120m, filename=output_path, overwrite=TRUE) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,65 @@ | ||
######################################################################## | ||
## Script name: Distance_to_water | ||
## Purpose of script: Calculate the distance from water bodies (navigable rivers, | ||
## small rivers and streams, lakes and lagoons) | ||
## Author: Chenyu Shen | ||
## Date Created: 2023-11-15 | ||
## Notes: | ||
######################################################################## | ||
|
||
### ========================================================================= | ||
### A - Preparation | ||
### ========================================================================= | ||
|
||
## Install and load packages | ||
|
||
#vector other required packages | ||
packs<-c("raster", "sf", "terra") | ||
|
||
#install new packages | ||
new.packs <- packs[!(packs %in% installed.packages()[, "Package"])] | ||
if (length(new.packs)) install.packages(new.packs) | ||
|
||
# Load required packages | ||
invisible(lapply(packs, require, character.only = TRUE)) | ||
|
||
# Set working directory | ||
setwd(Data_dir) | ||
|
||
|
||
# Load the shapefiles of waterbody | ||
navigable_rivers <- st_read("Preds/Raw/Shapefiles/navigable_river/Rio_navegables.shp") | ||
small_rivers <- st_read("Preds/Raw/Shapefiles/river_stream/Rios_Quebradas.shp") | ||
lakes_lagoons <- st_read("Preds/Raw/Shapefiles/lake_lagoon/Lagos_lagunas_Project.shp") | ||
|
||
# Load and reproject Peru's boundary to match the CRS of the water body rasters | ||
peru_boundary <- st_read("Preds/Raw/Shapefiles/Country/nivel-politico-1.shp") | ||
template_raster <- rast("ref_grid.tif") | ||
peru_boundary_transformed <- st_transform(peru_boundary, crs = crs(template_raster)) | ||
|
||
### ========================================================================= | ||
### B - Calculate Distance | ||
### ========================================================================= | ||
|
||
# Rasterize the Shapefiles | ||
raster_navigable_rivers <- rasterize(navigable_rivers, template_raster,field=1) | ||
raster_small_rivers <- rasterize(small_rivers, template_raster,field=1) | ||
raster_lakes_lagoons <- rasterize(lakes_lagoons, template_raster,field=1) | ||
# field=1 is used to mark the presence of the feature. | ||
# It's a placeholder to indicate where the features are in the raster grid | ||
|
||
# Clip the rasters to Peru's boundary | ||
raster_navigable_rivers_clipped <- mask(raster_navigable_rivers, peru_boundary_transformed) | ||
raster_small_rivers_clipped <- mask(raster_small_rivers, peru_boundary_transformed) | ||
raster_lakes_lagoons_clipped <- mask(raster_lakes_lagoons, peru_boundary_transformed) | ||
|
||
# Calculate Distance within Peru's boundary | ||
distance_navigable_rivers <- terra::distance(raster_navigable_rivers_clipped) | ||
distance_small_rivers <- terra::distance(raster_small_rivers_clipped) | ||
distance_lakes_lagoons <- terra::distance(raster_lakes_lagoons_clipped) | ||
|
||
|
||
# Write the output as Tiff files | ||
writeRaster(distance_navigable_rivers, "distance_navigable_rivers.tif") | ||
writeRaster(distance_small_rivers, "distance_small_rivers.tif") | ||
writeRaster(distance_lakes_lagoons, "distance_lakes_lagoons.tif") |
Oops, something went wrong.