-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathreaddblimitfiles.py
208 lines (161 loc) · 9.46 KB
/
readdblimitfiles.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
"""
Read in each telemetry database used for limit monitoring, store all data in all serializable
datastructure. Save this file to .pkl and .json files.
Each database must have been converted to sets of CSV files, one for each file. Filenames and
file contents should all be in lower case text.
At this point only the tables related to MSID definitions, limit and expected state definitions,
and calibaration definitions are included.
The resulting datastructure will have the following format:
all_databases[database_version][msid]['msid']
all_databases[database_version][msid]['technical_name']
all_databases[database_version][msid]['data_type']
all_databases[database_version][msid]['calibration_type']
all_databases[database_version][msid]['eng_unit']
all_databases[database_version][msid]['low_raw_count']
all_databases[database_version][msid]['high_raw_count']
all_databases[database_version][msid]['total_length']
all_databases[database_version][msid]['prop']
all_databases[database_version][msid]['counter_msid']
all_databases[database_version][msid]['range_msid']
all_databases[database_version][msid]['calibration_switch_msid']
all_databases[database_version][msid]['calibration_default_set_num']
all_databases[database_version][msid]['limit_switch_msid']
all_databases[database_version][msid]['limit_default_set_num']
all_databases[database_version][msid]['es_switch_msid']
all_databases[database_version][msid]['es_default_set_num']
all_databases[database_version][msid]['owner_id']
all_databases[database_version][msid]['description']
all_databases[database_version][msid]['ehs_header_flag']
all_databases[database_version][msid]['limit_lrvt_location']
all_databases[database_version][msid]['em_error_description']
all_databases[database_version][msid]['limit'][set_num]['caution_low']
all_databases[database_version][msid]['limit'][set_num]['caution_high']
all_databases[database_version][msid]['limit'][set_num]['warning_low']
all_databases[database_version][msid]['limit'][set_num]['warning_high']
all_databases[database_version][msid]['limit'][set_num]['delta']
all_databases[database_version][msid]['limit'][set_num]['toler']
all_databases[database_version][msid]['limit'][set_num]['em_all_samp_flag']
all_databases[database_version][msid]['lim_switch'][set_num]['limit_set_num']
all_databases[database_version][msid]['lim_switch'][set_num]['low_range']
all_databases[database_version][msid]['lim_switch'][set_num]['high_range']
all_databases[database_version][msid]['lim_switch'][set_num]['state_code']
all_databases[database_version][msid]['cal_switch']['calibration_set_num']
all_databases[database_version][msid]['cal_switch']['low_range']
all_databases[database_version][msid]['cal_switch']['high_range']
all_databases[database_version][msid]['cal_switch']['state_code']
all_databases[database_version][msid]['point_pair'][set_num][sequence_num]['calibration_set_num']
all_databases[database_version][msid]['point_pair'][set_num][sequence_num]['sequence_num']
all_databases[database_version][msid]['point_pair'][set_num][sequence_num]['raw_count']
all_databases[database_version][msid]['point_pair'][set_num][sequence_num]['eng_unit_value']
all_databases[database_version][msid]['poly_cal'][set_num]['calibration_set_num']
all_databases[database_version][msid]['poly_cal'][set_num]['end_unit_low']
all_databases[database_version][msid]['poly_cal'][set_num]['eng_unit_high']
all_databases[database_version][msid]['poly_cal'][set_num]['deg']
all_databases[database_version][msid]['poly_cal'][set_num]['coef0']
all_databases[database_version][msid]['poly_cal'][set_num]['coef1']
all_databases[database_version][msid]['poly_cal'][set_num]['coef2']
all_databases[database_version][msid]['poly_cal'][set_num]['coef3']
all_databases[database_version][msid]['poly_cal'][set_num]['coef4']
all_databases[database_version][msid]['poly_cal'][set_num]['coef5']
all_databases[database_version][msid]['poly_cal'][set_num]['coef6']
all_databases[database_version][msid]['poly_cal'][set_num]['coef7']
all_databases[database_version][msid]['poly_cal'][set_num]['coef8']
all_databases[database_version][msid]['poly_cal'][set_num]['coef9']
all_databases[database_version][msid]['exp_state']['es_set_num']
all_databases[database_version][msid]['exp_state']['expected_state']
all_databases[database_version][msid]['exp_state']['toler']
all_databases[database_version][msid]['exp_state']['em_all_samp_flag']
all_databases[database_version][msid]['state_code'][set_num][sequence_num]['calibration_set_num']
all_databases[database_version][msid]['state_code'][set_num][sequence_num]['sequence_num']
all_databases[database_version][msid]['state_code'][set_num][sequence_num]['low_raw_count']
all_databases[database_version][msid]['state_code'][set_num][sequence_num]['high_raw_count']
all_databases[database_version][msid]['state_code'][set_num][sequence_num]['state_code']
all_databases[database_version][msid]['es_switch']['es_set_num']
all_databases[database_version][msid]['es_switch']['low_range']
all_databases[database_version][msid]['es_switch']['high_range']
all_databases[database_version][msid]['es_switch']['state_code']
"""
import os
import pandas
import pickle as pickle
import json
def assignsetvals(db, table, field, sequence=False):
"""Convert TDB table to dictionary.
Modifies a pre-existing dictionary by adding a TDB table in the desired format.
:param db: Pre-existing dictionary to update
:param table: TDB table as a Pandas 2D dataframe
:param field: Desired name for converted table
:param sequence: Boolean indicating if a sequence of items exists in `table` such as a set of
point pair calibration values
"""
for row in table.values:
msid = row[0]
setnum = int(row[1])
if field not in db[msid]:
db[msid].update({field:{}})
if not sequence:
db[msid][field][setnum] = dict(list(zip(table.columns[2:], row[2:])))
else:
seq = int(row[2])
if setnum not in db[msid][field]:
db[msid][field].update({setnum:{}})
db[msid][field][setnum][seq] = dict(list(zip(table.columns[3:], row[3:])))
def readdb(rootdir):
"""Read TDB csv file and return Pandas data frame.
Modifies a pre-existing dictionary by adding a TDB table in the desired format.
:param rootdir: String containing the location of the set of TDB csv files
:returns: Dictionary of TDB tables in Pandas dataframe format
"""
tdbframes = {}
tdbframes['tdbmsid'] = pandas.read_csv(os.path.join(rootdir, 'tdb_msid.csv'))
tdbframes['tdblimit'] = pandas.read_csv(os.path.join(rootdir, 'tdb_limit.csv'))
tdbframes['tdblimswitch'] = pandas.read_csv(os.path.join(rootdir, 'tdb_lim_switch.csv'))
tdbframes['tdbpointpair'] = pandas.read_csv(os.path.join(rootdir, 'tdb_point_pair.csv'))
tdbframes['tdbpolycal'] = pandas.read_csv(os.path.join(rootdir, 'tdb_poly_cal.csv'))
tdbframes['tdbcalswitch'] = pandas.read_csv(os.path.join(rootdir, 'tdb_cal_switch.csv'))
tdbframes['tdbexpstate'] = pandas.read_csv(os.path.join(rootdir, 'tdb_exp_state.csv'))
tdbframes['tdbesswitch'] = pandas.read_csv(os.path.join(rootdir, 'tdb_es_switch.csv'))
tdbframes['tdbstatecode'] = pandas.read_csv(os.path.join(rootdir, 'tdb_state_code.csv'))
return tdbframes
def processdb(tdbframes):
"""Convert a TDB from Pandas dataframe format to a dictionary
:param tdbframes: TDB in Pandas dataframe format
:returns: TDB in dictionary format (serializable)
"""
tdb = {}
for row in tdbframes['tdbmsid'].values:
tdb.update({row[0]:dict(list(zip(tdbframes['tdbmsid'].columns[1:], row[1:])))})
assignsetvals(tdb, tdbframes['tdblimit'], 'limit')
assignsetvals(tdb, tdbframes['tdblimswitch'], 'lim_switch')
assignsetvals(tdb, tdbframes['tdbpointpair'], 'point_pair', sequence=True)
assignsetvals(tdb, tdbframes['tdbpolycal'], 'poly_cal')
assignsetvals(tdb, tdbframes['tdbcalswitch'], 'cal_switch')
assignsetvals(tdb, tdbframes['tdbexpstate'], 'exp_state')
assignsetvals(tdb, tdbframes['tdbesswitch'], 'es_switch')
assignsetvals(tdb, tdbframes['tdbstatecode'], 'state_code', sequence=True)
return tdb
def process_files(rootdir):
"""Return dictionary of all TDB's: P007, P009, P010, P011
:param rootdir: String containing the location of all TDB directories
:returns: Dictionary of serializable TDB's
"""
tdbframes = readdb(os.path.join(rootdir, 'p007'))
tdb007 = processdb(tdbframes)
tdbframes = readdb(os.path.join(rootdir, 'p009'))
tdb009 = processdb(tdbframes)
tdbframes = readdb(os.path.join(rootdir, 'p010'))
tdb010 = processdb(tdbframes)
tdbframes = readdb(os.path.join(rootdir, 'p011'))
tdb011 = processdb(tdbframes)
tdbframes = readdb(os.path.join(rootdir, 'p012'))
tdb012 = processdb(tdbframes)
tdbframes = readdb(os.path.join(rootdir, 'p013'))
tdb013 = processdb(tdbframes)
tdbframes = readdb(os.path.join(rootdir, 'p014'))
tdb014 = processdb(tdbframes)
return {'p007':tdb007, 'p009':tdb009, 'p010':tdb010, 'p011':tdb011, 'p012':tdb012,
'p013':tdb013, 'p014':tdb014}
if __name__ == '__main__':
tdb_all = process_files('./')
pickle.dump(tdb_all, open('tdb_all.pkl','w'), protocol=2)
json.dump(tdb_all, open('tdb_all.json','w'))