Skip to content

Commit

Permalink
Adding veg pre processing file
Browse files Browse the repository at this point in the history
  • Loading branch information
aivanova5 committed Mar 20, 2024
1 parent 7803034 commit 49ebe1a
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 6 deletions.
3 changes: 2 additions & 1 deletion US/CA/SLAC/anticipation/.catalog
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@ folium.glm:a=r
add_info.py:a=rx
status_log.py:a=rx
folium_data.py:a=rx
convert_to_csv.glm:a=rx
convert_to_csv.glm:a=rx
veg_data_preprocess.py:a=rx
30 changes: 30 additions & 0 deletions US/CA/SLAC/anticipation/header.glm
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
#define suppress_repeat_messages=TRUE
#define pythonpath="/usr/local/opt/gridlabd/current/share/gridlabd/template/US/CA/SLAC/anticipation"
//#exec printenv
#set minimum_timestep=3600


module status_log;
module climate;
Expand All @@ -23,10 +25,38 @@ module powerflow {
line_capacitance true;
message_flags VERBOSE;
}
#ifdef RELIABILITY_ON
module reliability {
report_event_log true;
}
object fault_check {
name test_fault;
check_mode ONCHANGE;
reliability_mode true;
// output_filename ${FAULT_OUT_PATH};
grid_association true;
};

object power_metrics {
name pwrmetrics;
base_time_value 1 h;
}

object metrics {
name testmetrics;
report_file metrics.txt;
module_metrics_object pwrmetrics;
metrics_of_interest "SAIFI,SAIDI,CAIDI,ASAI,MAIFI";
customer_group "class=meter";
metric_interval 5 h;
report_interval 5 h;
};
#endif
clock {
starttime "${STARTTIME}";
stoptime "${STOPTIME}";
timezone "${TIMEZONE}";
}


#ifdef WIND_SPEED
Expand Down
10 changes: 5 additions & 5 deletions US/CA/SLAC/anticipation/status_log.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
import pandas as pd
import datetime

pole_status = {}
metered_energy = {}
data = {}
pole_status = {}
objs = {}
wrn_count = 0

Expand Down Expand Up @@ -35,7 +34,7 @@ def on_init(t) :
return True

def on_commit(t) :
global pole_status, wrn_count
global pole_status, metered_energy, wrn_count
pole_status.update(get_info(t,'pole', 'status'))
# pole_status.update(get_info(t,'pole', 'total_moment'))
try :
Expand All @@ -47,7 +46,8 @@ def on_commit(t) :
return True

def on_term(t) :
global data
global pole_status, metered_energy

df_energy = dump_csv(data,'/tmp/output/pole_status.csv')
df_energy = dump_csv(pole_status,'/tmp/output/pole_status.csv')
dump_csv(metered_energy, '/tmp/output/metered_energy.csv')
return None
35 changes: 35 additions & 0 deletions US/CA/SLAC/anticipation/veg_data_preprocess.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import pandas as pd
import sys

def extract_poles() :
input_file_name = sys.argv[1]
output_file_name = sys.argv[2]

df = pd.read_csv(input_file_name)

# Create a new DataFrame with only rows where 'class' is 'pole'
filtered_df = df[df['class'] == 'pole']

# Reset the index of the filtered DataFrame
filtered_df.reset_index(drop=True, inplace=True)

# Extract 'pole_length' from the corresponding 'pole_configuration' rows
pole_length_map = dict(zip(df[df['class'] == 'pole_configuration']['name'], df[df['class'] == 'pole_configuration']['pole_length']))

# Map 'pole_length' values based on 'class' using .map after creating a copy
filtered_df = filtered_df.copy() # Create a copy to avoid SettingWithCopyWarning

filtered_df['pole_length'] = filtered_df['configuration'].map(pole_length_map)

# Check for missing values in latitude and longitude columns and drop rows with any missing value
filtered_df.dropna(subset=["latitude", "longitude"], inplace=True)

# Reset the index
filtered_df.reset_index(drop=True, inplace=True)

# Display the final DataFrame
filtered_df.to_csv(output_file_name, index=False)


if __name__ == "__main__":
extract_poles()

0 comments on commit 49ebe1a

Please sign in to comment.